Skip to content

Commit a6e6865

Browse files
author
88d52bdba0366127fffca9dfa93895
committed
enhance tests
1 parent 8c3c1bd commit a6e6865

17 files changed

+236
-372
lines changed

.github/workflows/main.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,4 +31,4 @@ jobs:
3131
- name: Test with pytest
3232
run: |
3333
pip install pytest pytest-cov
34-
pytest ./tests --doctest-modules --cov-report=html
34+
pytest -s ./tests --doctest-modules --cov-report=html

pypfopt/base_optimizer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ def __init__(
164164
for portfolios with shorting.
165165
:type weight_bounds: tuple OR tuple list, optional
166166
:param solver: name of solver. list available solvers with: ``cvxpy.installed_solvers()``
167-
:type solver: str, optional. Defaults to "ECOS"
167+
:type solver: str, optional.
168168
:param verbose: whether performance and debugging info should be printed, defaults to False
169169
:type verbose: bool, optional
170170
:param solver_options: parameters for the given solver

tests/test_base_optimizer.py

Lines changed: 22 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,13 @@
22
import os
33
import tempfile
44

5+
import cvxpy as cp
56
import numpy as np
67
import pandas as pd
78
import pytest
8-
import cvxpy as cp
99

10-
from pypfopt import EfficientFrontier, objective_functions
11-
from pypfopt import exceptions
12-
from pypfopt.base_optimizer import portfolio_performance, BaseOptimizer
10+
from pypfopt import EfficientFrontier, exceptions, objective_functions
11+
from pypfopt.base_optimizer import BaseOptimizer, portfolio_performance
1312
from tests.utilities_for_tests import get_data, setup_efficient_frontier
1413

1514

@@ -23,9 +22,7 @@ def test_base_optimizer():
2322

2423

2524
def test_custom_bounds():
26-
ef = EfficientFrontier(
27-
*setup_efficient_frontier(data_only=True), weight_bounds=(0.02, 0.13)
28-
)
25+
ef = setup_efficient_frontier(weight_bounds=(0.02, 0.13))
2926
ef.min_volatility()
3027
np.testing.assert_allclose(ef._lower_bounds, np.array([0.02] * ef.n_assets))
3128
np.testing.assert_allclose(ef._upper_bounds, np.array([0.13] * ef.n_assets))
@@ -37,97 +34,71 @@ def test_custom_bounds():
3734

3835
def test_custom_bounds_different_values():
3936
bounds = [(0.01, 0.13), (0.02, 0.11)] * 10
40-
ef = EfficientFrontier(
41-
*setup_efficient_frontier(data_only=True), weight_bounds=bounds
42-
)
37+
ef = setup_efficient_frontier(weight_bounds=bounds)
4338
ef.min_volatility()
4439
assert (0.01 <= ef.weights[::2]).all() and (ef.weights[::2] <= 0.13).all()
4540
assert (0.02 <= ef.weights[1::2]).all() and (ef.weights[1::2] <= 0.11).all()
4641
np.testing.assert_almost_equal(ef.weights.sum(), 1)
4742

4843
bounds = ((0.01, 0.13), (0.02, 0.11)) * 10
49-
assert EfficientFrontier(
50-
*setup_efficient_frontier(data_only=True), weight_bounds=bounds
51-
)
44+
assert setup_efficient_frontier(weight_bounds=bounds)
5245

5346

5447
def test_weight_bounds_minus_one_to_one():
55-
ef = EfficientFrontier(
56-
*setup_efficient_frontier(data_only=True), weight_bounds=(-1, 1)
57-
)
48+
ef = setup_efficient_frontier(weight_bounds=(-1, 1))
5849
assert ef.max_sharpe()
59-
ef2 = EfficientFrontier(
60-
*setup_efficient_frontier(data_only=True), weight_bounds=(-1, 1)
61-
)
50+
ef2 = setup_efficient_frontier(weight_bounds=(-1, 1))
6251
assert ef2.min_volatility()
6352

6453

6554
def test_none_bounds():
66-
ef = EfficientFrontier(
67-
*setup_efficient_frontier(data_only=True), weight_bounds=(None, 0.3)
68-
)
55+
ef = setup_efficient_frontier(weight_bounds=(None, 0.3))
6956
ef.min_volatility()
7057
w1 = ef.weights
7158

72-
ef = EfficientFrontier(
73-
*setup_efficient_frontier(data_only=True), weight_bounds=(-1, 0.3)
74-
)
59+
ef = setup_efficient_frontier(weight_bounds=(-1, 0.3))
7560
ef.min_volatility()
7661
w2 = ef.weights
62+
7763
np.testing.assert_array_almost_equal(w1, w2)
7864

7965

8066
def test_bound_input_types():
8167
bounds = [0.01, 0.13]
82-
ef = EfficientFrontier(
83-
*setup_efficient_frontier(data_only=True), weight_bounds=bounds
84-
)
68+
ef = setup_efficient_frontier(weight_bounds=bounds)
8569
assert ef
8670
np.testing.assert_allclose(ef._lower_bounds, np.array([0.01] * ef.n_assets))
8771
np.testing.assert_allclose(ef._upper_bounds, np.array([0.13] * ef.n_assets))
8872

8973
lb = np.array([0.01, 0.02] * 10)
9074
ub = np.array([0.07, 0.2] * 10)
91-
assert EfficientFrontier(
92-
*setup_efficient_frontier(data_only=True), weight_bounds=(lb, ub)
93-
)
75+
assert setup_efficient_frontier(weight_bounds=(lb, ub))
76+
9477
bounds = ((0.01, 0.13), (0.02, 0.11)) * 10
95-
assert EfficientFrontier(
96-
*setup_efficient_frontier(data_only=True), weight_bounds=bounds
97-
)
78+
assert setup_efficient_frontier(weight_bounds=bounds)
9879

9980

10081
def test_bound_failure():
10182
# Ensure optimization fails when lower bound is too high or upper bound is too low
102-
ef = EfficientFrontier(
103-
*setup_efficient_frontier(data_only=True), weight_bounds=(0.06, 0.13)
104-
)
83+
ef = setup_efficient_frontier(weight_bounds=(0.06, 0.13))
10584
with pytest.raises(exceptions.OptimizationError):
10685
ef.min_volatility()
10786

108-
ef = EfficientFrontier(
109-
*setup_efficient_frontier(data_only=True), weight_bounds=(0, 0.04)
110-
)
87+
ef = setup_efficient_frontier(weight_bounds=(0, 0.04))
11188
with pytest.raises(exceptions.OptimizationError):
11289
ef.min_volatility()
11390

11491

11592
def test_bounds_errors():
116-
assert EfficientFrontier(
117-
*setup_efficient_frontier(data_only=True), weight_bounds=(0, 1)
118-
)
93+
assert setup_efficient_frontier(weight_bounds=(0, 1))
11994

12095
with pytest.raises(TypeError):
121-
EfficientFrontier(
122-
*setup_efficient_frontier(data_only=True), weight_bounds=(0.06, 1, 3)
123-
)
96+
setup_efficient_frontier(weight_bounds=(0.06, 1, 3))
12497

12598
with pytest.raises(TypeError):
12699
# Not enough bounds
127100
bounds = [(0.01, 0.13), (0.02, 0.11)] * 5
128-
EfficientFrontier(
129-
*setup_efficient_frontier(data_only=True), weight_bounds=bounds
130-
)
101+
setup_efficient_frontier(weight_bounds=bounds)
131102

132103

133104
def test_clean_weights():
@@ -138,16 +109,13 @@ def test_clean_weights():
138109
cleaned_weights = cleaned.values()
139110
clean_number_tiny_weights = sum(i < 1e-4 for i in cleaned_weights)
140111
assert clean_number_tiny_weights == number_tiny_weights
141-
#  Check rounding
112+
# Check rounding
142113
cleaned_weights_str_length = [len(str(i)) for i in cleaned_weights]
143114
assert all([length == 7 or length == 3 for length in cleaned_weights_str_length])
144115

145116

146117
def test_clean_weights_short():
147-
ef = setup_efficient_frontier()
148-
ef = EfficientFrontier(
149-
*setup_efficient_frontier(data_only=True), weight_bounds=(-1, 1)
150-
)
118+
ef = setup_efficient_frontier(weight_bounds=(-1, 1))
151119
ef.min_volatility()
152120
# In practice we would never use such a high cutoff
153121
number_tiny_weights = sum(np.abs(ef.weights) < 0.05)

tests/test_black_litterman.py

Lines changed: 21 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,12 @@
22
import pandas as pd
33
import pytest
44

5-
from pypfopt import black_litterman
6-
from pypfopt.black_litterman import BlackLittermanModel
75
from pypfopt import risk_models, expected_returns
6+
from pypfopt.black_litterman import (
7+
BlackLittermanModel,
8+
market_implied_risk_aversion,
9+
market_implied_prior_returns,
10+
)
811
from tests.utilities_for_tests import get_data, get_market_caps, resource
912

1013

@@ -241,18 +244,18 @@ def test_market_risk_aversion():
241244
prices = pd.read_csv(
242245
resource("spy_prices.csv"), parse_dates=True, index_col=0
243246
).squeeze("columns")
244-
delta = black_litterman.market_implied_risk_aversion(prices)
247+
delta = market_implied_risk_aversion(prices)
245248
assert np.round(delta, 5) == 2.68549
246249

247250
# check it works for df
248251
prices = pd.read_csv(resource("spy_prices.csv"), parse_dates=True, index_col=0)
249-
delta = black_litterman.market_implied_risk_aversion(prices)
252+
delta = market_implied_risk_aversion(prices)
250253
assert np.round(delta.iloc[0], 5) == 2.68549
251254

252255
# Check it raises for other types.
253256
list_invalid = [100.0, 110.0, 120.0, 130.0]
254257
with pytest.raises(TypeError):
255-
delta = black_litterman.market_implied_risk_aversion(list_invalid)
258+
delta = market_implied_risk_aversion(list_invalid)
256259

257260

258261
def test_bl_weights():
@@ -266,7 +269,7 @@ def test_bl_weights():
266269
resource("spy_prices.csv"), parse_dates=True, index_col=0
267270
).squeeze("columns")
268271

269-
delta = black_litterman.market_implied_risk_aversion(prices)
272+
delta = market_implied_risk_aversion(prices)
270273
bl.bl_weights(delta)
271274
w = bl.clean_weights()
272275
assert abs(sum(w.values()) - 1) < 1e-5
@@ -318,10 +321,10 @@ def test_market_implied_prior():
318321
prices = pd.read_csv(
319322
resource("spy_prices.csv"), parse_dates=True, index_col=0
320323
).squeeze("columns")
321-
delta = black_litterman.market_implied_risk_aversion(prices)
324+
delta = market_implied_risk_aversion(prices)
322325

323326
mcaps = get_market_caps()
324-
pi = black_litterman.market_implied_prior_returns(mcaps, delta, S)
327+
pi = market_implied_prior_returns(mcaps, delta, S)
325328
assert isinstance(pi, pd.Series)
326329
assert list(pi.index) == list(df.columns)
327330
assert pi.notnull().all()
@@ -355,7 +358,7 @@ def test_market_implied_prior():
355358
)
356359

357360
mcaps = pd.Series(mcaps)
358-
pi2 = black_litterman.market_implied_prior_returns(mcaps, delta, S)
361+
pi2 = market_implied_prior_returns(mcaps, delta, S)
359362
pd.testing.assert_series_equal(pi, pi2, check_exact=False)
360363

361364
# Test alternate syntax
@@ -366,7 +369,7 @@ def test_market_implied_prior():
366369
absolute_views={"AAPL": 0.1},
367370
risk_aversion=delta,
368371
)
369-
pi = black_litterman.market_implied_prior_returns(mcaps, delta, S, risk_free_rate=0)
372+
pi = market_implied_prior_returns(mcaps, delta, S, risk_free_rate=0)
370373
np.testing.assert_array_almost_equal(bl.pi, pi.values.reshape(-1, 1))
371374

372375

@@ -378,14 +381,14 @@ def test_bl_market_prior():
378381
resource("spy_prices.csv"), parse_dates=True, index_col=0
379382
).squeeze("columns")
380383

381-
delta = black_litterman.market_implied_risk_aversion(prices)
384+
delta = market_implied_risk_aversion(prices)
382385

383386
mcaps = get_market_caps()
384387

385388
with pytest.warns(RuntimeWarning):
386-
black_litterman.market_implied_prior_returns(mcaps, delta, S.values)
389+
market_implied_prior_returns(mcaps, delta, S.values)
387390

388-
prior = black_litterman.market_implied_prior_returns(mcaps, delta, S)
391+
prior = market_implied_prior_returns(mcaps, delta, S)
389392

390393
viewdict = {"GOOG": 0.40, "AAPL": -0.30, "FB": 0.30, "BABA": 0}
391394
bl = BlackLittermanModel(S, pi=prior, absolute_views=viewdict)
@@ -419,7 +422,7 @@ def test_bl_market_automatic():
419422
rets = bl.bl_returns()
420423

421424
# Compare with explicit
422-
prior = black_litterman.market_implied_prior_returns(mcaps, 1, S, 0)
425+
prior = market_implied_prior_returns(mcaps, 1, S, 0)
423426
bl2 = BlackLittermanModel(S, pi=prior, absolute_views=viewdict)
424427
rets2 = bl2.bl_returns()
425428
pd.testing.assert_series_equal(rets, rets2)
@@ -432,8 +435,8 @@ def test_bl_market_automatic():
432435
# mcaps2 = {k: v for k, v in list(mcaps.items())[::-1]}
433436
# # mcaps = pd.Series(mcaps)
434437

435-
# market_prior1 = black_litterman.market_implied_prior_returns(mcaps, 2, S.values, 0)
436-
# market_prior2 = black_litterman.market_implied_prior_returns(mcaps2, 2, S.values, 0)
438+
# market_prior1 = market_implied_prior_returns(mcaps, 2, S.values, 0)
439+
# market_prior2 = market_implied_prior_returns(mcaps2, 2, S.values, 0)
437440
# market_prior1 == market_prior2
438441

439442
# mcaps = pd.Series(mcaps)
@@ -471,10 +474,10 @@ def test_bl_tau():
471474
resource("spy_prices.csv"), parse_dates=True, index_col=0
472475
).squeeze("columns")
473476

474-
delta = black_litterman.market_implied_risk_aversion(prices)
477+
delta = market_implied_risk_aversion(prices)
475478

476479
mcaps = get_market_caps()
477-
prior = black_litterman.market_implied_prior_returns(mcaps, delta, S)
480+
prior = market_implied_prior_returns(mcaps, delta, S)
478481

479482
viewdict = {"GOOG": 0.40, "AAPL": -0.30, "FB": 0.30, "BABA": 0}
480483

tests/test_cla.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
import numpy as np
22
import pytest
3-
from tests.utilities_for_tests import get_data, setup_cla
3+
44
from pypfopt import risk_models
55
from pypfopt.cla import CLA
6+
from tests.utilities_for_tests import get_data, setup_cla
67

78

89
def test_portfolio_performance():
@@ -33,7 +34,7 @@ def test_cla_max_sharpe_long_only():
3334

3435

3536
def test_cla_max_sharpe_short():
36-
cla = CLA(*setup_cla(data_only=True), weight_bounds=(-1, 1))
37+
cla = setup_cla(weight_bounds=(-1, 1))
3738
w = cla.max_sharpe()
3839
assert isinstance(w, dict)
3940
assert set(w.keys()) == set(cla.tickers)
@@ -53,7 +54,7 @@ def test_cla_max_sharpe_short():
5354

5455
def test_cla_custom_bounds():
5556
bounds = [(0.01, 0.13), (0.02, 0.11)] * 10
56-
cla = CLA(*setup_cla(data_only=True), weight_bounds=bounds)
57+
cla = setup_cla(weight_bounds=bounds)
5758
df = get_data()
5859
cla.cov_matrix = risk_models.exp_cov(df).values
5960
w = cla.min_volatility()
@@ -64,7 +65,7 @@ def test_cla_custom_bounds():
6465
assert (0.02 <= cla.weights[1::2]).all() and (cla.weights[1::2] <= 0.11).all()
6566
# Test polymorphism of the weight_bounds param.
6667
bounds2 = ([bounds[0][0], bounds[1][0]] * 10, [bounds[0][1], bounds[1][1]] * 10)
67-
cla2 = CLA(*setup_cla(data_only=True), weight_bounds=bounds2)
68+
cla2 = setup_cla(weight_bounds=bounds2)
6869
cla2.cov_matrix = risk_models.exp_cov(df).values
6970
w2 = cla2.min_volatility()
7071
assert dict(w2) == dict(w)
@@ -126,7 +127,7 @@ def test_cla_max_sharpe_exp_cov():
126127

127128

128129
def test_cla_min_volatility_exp_cov_short():
129-
cla = CLA(*setup_cla(data_only=True), weight_bounds=(-1, 1))
130+
cla = setup_cla(weight_bounds=(-1, 1))
130131
df = get_data()
131132
cla.cov_matrix = risk_models.exp_cov(df).values
132133
w = cla.min_volatility()

0 commit comments

Comments
 (0)