Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -48,4 +48,7 @@ dist

artifacts

# uv lock file
uv.lock

bin
12 changes: 6 additions & 6 deletions example/examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ def deviation_risk_parity(w, cov_matrix):

# Black-Litterman
spy_prices = pd.read_csv(
"tests/resources/spy_prices.csv", parse_dates=True, index_col=0, squeeze=True
)
"tests/resources/spy_prices.csv", parse_dates=True, index_col=0
).squeeze()
delta = black_litterman.market_implied_risk_aversion(spy_prices)

mcaps = {
Expand Down Expand Up @@ -116,7 +116,7 @@ def deviation_risk_parity(w, cov_matrix):
weights = hrp.optimize()
hrp.portfolio_performance(verbose=True)
print(weights)
plotting.plot_dendrogram(hrp) # to plot dendrogram
plotting.plot_dendrogram(hrp, showfig=False) # Use showfig=True to display plot

"""
Expected annual return: 10.8%
Expand Down Expand Up @@ -146,11 +146,11 @@ def deviation_risk_parity(w, cov_matrix):
"""


# Crticial Line Algorithm
cla = CLA(mu, S)
# Critical Line Algorithm (CLA)
cla = CLA(mu, S, use_cvxcla=False) # Use use_cvxcla=True for faster performance
print(cla.max_sharpe())
cla.portfolio_performance(verbose=True)
plotting.plot_efficient_frontier(cla) # to plot
plotting.plot_efficient_frontier(cla, interactive=True, showfig=False) # Use showfig=True to open browser for interactive plot

"""
{'GOOG': 0.020889868669945022,
Expand Down
135 changes: 128 additions & 7 deletions pypfopt/cla.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ class CLA(base_optimizer.BaseOptimizer):
- ``save_weights_to_file()`` saves the weights to csv, json, or txt.
"""

def __init__(self, expected_returns, cov_matrix, weight_bounds=(0, 1)):
def __init__(self, expected_returns, cov_matrix, weight_bounds=(0, 1), use_cvxcla=False):
"""
:param expected_returns: expected returns for each asset. Set to None if
optimising for volatility only.
Expand All @@ -57,15 +57,78 @@ def __init__(self, expected_returns, cov_matrix, weight_bounds=(0, 1)):
:param weight_bounds: minimum and maximum weight of an asset, defaults to (0, 1).
Must be changed to (-1, 1) for portfolios with shorting.
:type weight_bounds: tuple (float, float) or (list/ndarray, list/ndarray) or list(tuple(float, float))
:param use_cvxcla: if True, use cvxcla backend for faster performance. Defaults to False.
:type use_cvxcla: bool
:raises TypeError: if ``expected_returns`` is not a series, list or array
:raises TypeError: if ``cov_matrix`` is not a dataframe or array
"""
# Initialize the class
# Store backend choice
self.use_cvxcla = use_cvxcla

# Setup cvxcla backend if requested
if use_cvxcla:
try:
from cvxcla import CLA as CVXCLAEngine
# Convert to cvxcla format
self.mean = np.asarray(expected_returns).flatten()
self.expected_returns = self.mean # For backward compatibility
n_assets = len(self.mean)

# Handle weight bounds
if len(weight_bounds) == len(self.mean) and not isinstance(weight_bounds[0], (float, int)):
self.lower_bounds = np.array([b[0] for b in weight_bounds])
self.upper_bounds = np.array([b[1] for b in weight_bounds])
else:
self.lower_bounds = np.full(n_assets, weight_bounds[0])
self.upper_bounds = np.full(n_assets, weight_bounds[1])

# Store cvxcla initialization parameters BEFORE setting cov_matrix property
self._cvxcla_mean = self.mean
self._cvxcla_bounds = (self.lower_bounds, self.upper_bounds)
self._cvxcla_cov_matrix = np.asarray(cov_matrix) # Direct assignment to avoid property setter during init
self.n_assets = n_assets # Set n_assets before creating engine

# Create cvxcla engine
self._cvxcla_engine = CVXCLAEngine(
mean=self.mean,
covariance=self._cvxcla_cov_matrix,
lower_bounds=self.lower_bounds,
upper_bounds=self.upper_bounds,
a=np.ones((1, n_assets)), # Fully invested constraint
b=np.ones(1)
)

# Store ticker mapping for backward compatibility
if hasattr(expected_returns, 'index'):
self.tickers = list(expected_returns.index)
else:
self.tickers = list(range(n_assets))

# Set n_assets for backward compatibility
self.n_assets = n_assets

# Add frontier_values for plotting compatibility
self.frontier_values = None

# Initialize parent class
super().__init__(n_assets, self.tickers)
return # Skip the original initialization

except ImportError:
import warnings
warnings.warn(
"cvxcla not available, falling back to standard implementation. "
"Install with: pip install cvxcla",
RuntimeWarning
)
self.use_cvxcla = False

# Original initialization code
self.mean = np.array(expected_returns).reshape((len(expected_returns), 1))
# if (self.mean == np.ones(self.mean.shape) * self.mean.mean()).all():
# self.mean[-1, 0] += 1e-5
self.expected_returns = self.mean.reshape((len(self.mean),))
self.cov_matrix = np.asarray(cov_matrix)
self._cov_matrix = np.asarray(cov_matrix) # Use _cov_matrix for original implementation

# Bounds
if len(weight_bounds) == len(self.mean) and not isinstance(
Expand Down Expand Up @@ -96,6 +159,33 @@ def __init__(self, expected_returns, cov_matrix, weight_bounds=(0, 1)):
tickers = list(range(len(self.mean)))
super().__init__(len(tickers), tickers)

def _recreate_cvxcla_engine(self):
"""Recreate cvxcla engine when parameters change (e.g., covariance matrix)."""
if self.use_cvxcla and hasattr(self, '_cvxcla_mean') and hasattr(self, '_cvxcla_bounds'):
from cvxcla import CLA as CVXCLAEngine
self._cvxcla_engine = CVXCLAEngine(
mean=self._cvxcla_mean,
covariance=self._cvxcla_cov_matrix,
lower_bounds=self._cvxcla_bounds[0],
upper_bounds=self._cvxcla_bounds[1],
a=np.ones((1, self.n_assets)), # Fully invested constraint
b=np.ones(1)
)

@property
def cov_matrix(self):
"""Get the covariance matrix."""
return self._cvxcla_cov_matrix if self.use_cvxcla else self._cov_matrix

@cov_matrix.setter
def cov_matrix(self, new_cov_matrix):
"""Set the covariance matrix and update cvxcla engine if needed."""
if self.use_cvxcla:
self._cvxcla_cov_matrix = np.asarray(new_cov_matrix)
self._recreate_cvxcla_engine()
else:
self._cov_matrix = new_cov_matrix

@staticmethod
def _infnone(x):
"""
Expand Down Expand Up @@ -138,14 +228,16 @@ def _compute_w(self, covarF_inv, covarFB, meanF, wB):
g1 = np.dot(np.dot(onesF.T, covarF_inv), meanF)
g2 = np.dot(np.dot(onesF.T, covarF_inv), onesF)
if wB is None:
g, w1 = float(-self.ls[-1] * g1 / g2 + 1 / g2), 0
g_result = -self.ls[-1] * g1 / g2 + 1 / g2
g, w1 = float(g_result.item() if hasattr(g_result, 'item') else g_result), 0
else:
onesB = np.ones(wB.shape)
g3 = np.dot(onesB.T, wB)
g4 = np.dot(covarF_inv, covarFB)
w1 = np.dot(g4, wB)
g4 = np.dot(onesF.T, w1)
g = float(-self.ls[-1] * g1 / g2 + (1 - g3 + g4) / g2)
g_result = -self.ls[-1] * g1 / g2 + (1 - g3 + g4) / g2
g = float(g_result.item() if hasattr(g_result, 'item') else g_result)
# 2) compute weights
w2 = np.dot(covarF_inv, onesF)
w3 = np.dot(covarF_inv, meanF)
Expand All @@ -167,14 +259,16 @@ def _compute_lambda(self, covarF_inv, covarFB, meanF, wB, i, bi):
# 3) Lambda
if wB is None:
# All free assets
return float((c4[i] - c1 * bi) / c), bi
result = (c4[i] - c1 * bi) / c
return float(result.item() if hasattr(result, 'item') else result), bi
else:
onesB = np.ones(wB.shape)
l1 = np.dot(onesB.T, wB)
l2 = np.dot(covarF_inv, covarFB)
l3 = np.dot(l2, wB)
l2 = np.dot(onesF.T, l3)
return float(((1 - l1 + l2) * c4[i] - c1 * (bi + l3[i])) / c), bi
result = ((1 - l1 + l2) * c4[i] - c1 * (bi + l3[i])) / c
return float(result.item() if hasattr(result, 'item') else result), bi

def _get_matrices(self, f):
# Slice covarF,covarFB,covarB,meanF,meanB,wF,wB
Expand Down Expand Up @@ -376,6 +470,14 @@ def max_sharpe(self):
:return: asset weights for the max-sharpe portfolio
:rtype: OrderedDict
"""
# Use cvxcla backend if enabled
if self.use_cvxcla:
_, weights = self._cvxcla_engine.frontier.max_sharpe
self.weights = weights
# Convert to OrderedDict with tickers
return dict(zip(self.tickers, weights))

# Original implementation
if not self.w:
self._solve()
# 1) Compute the local max SR portfolio between any two neighbor turning points
Expand All @@ -398,6 +500,15 @@ def min_volatility(self):
:return: asset weights for the volatility-minimising portfolio
:rtype: OrderedDict
"""
# Use cvxcla backend if enabled
if self.use_cvxcla:
# Last point on efficient frontier = minimum variance portfolio
weights = self._cvxcla_engine.frontier.weights[-1]
self.weights = weights
# Convert to OrderedDict with tickers
return dict(zip(self.tickers, weights))

# Original implementation
if not self.w:
self._solve()
var = []
Expand All @@ -418,6 +529,16 @@ def efficient_frontier(self, points=100):
:return: return list, std list, weight list
:rtype: (float list, float list, np.ndarray list)
"""
# Use cvxcla backend if enabled
if self.use_cvxcla:
frontier = self._cvxcla_engine.frontier.interpolate(points)
mu = frontier.returns.tolist()
sigma = frontier.volatility.tolist()
weights = [w for w in frontier.weights]
self.frontier_values = (mu, sigma, weights)
return mu, sigma, weights

# Original implementation
if not self.w:
self._solve()

Expand Down
4 changes: 4 additions & 0 deletions pypfopt/plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,6 +386,10 @@ def plot_efficient_frontier(
xaxis_title="Volatility",
yaxis_title="Return",
)
# Handle showfig for interactive plotly plots
showfig = kwargs.get("showfig", False)
if showfig:
ax.show()
else:
ax.legend()
ax.set_xlabel("Volatility")
Expand Down
9 changes: 9 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,12 @@ classifiers=[
# core dependencies of pyportfolioopt
# this set should be kept minimal!
dependencies = [
"cvxcla>=1.5.1",
"cvxpy>=1.1.19",
"numpy>=1.26.0",
"packaging>=26.0",
"pandas>=0.19",
"plotly>=6.5.2",
"scikit-base<0.14.0",
"scikit-learn>=0.24.1",
"scipy>=1.3.0",
Expand Down Expand Up @@ -108,6 +111,12 @@ indent-style = "space"
line-ending = "auto"
skip-magic-trailing-comma = false

[dependency-groups]
dev = [
"pytest>=9.0.2",
"pytest-cov>=7.0.0",
]

[tool.ruff.lint.isort]
known-first-party = ["pypfopt"]
combine-as-imports = true
Expand Down
Loading