Skip to content

Commit

Permalink
Add tests for genetic algorithm optimizer
Browse files Browse the repository at this point in the history
  • Loading branch information
diogomatoschaves committed Mar 1, 2024
1 parent 5ad6354 commit d26e2ca
Show file tree
Hide file tree
Showing 7 changed files with 119 additions and 26 deletions.
13 changes: 8 additions & 5 deletions stratestic/backtesting/_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ def optimize(
self,
params,
optimization_metric="Return",
optimizer: Literal["brute", "gen_alg"] = 'brute',
optimizer: Literal["brute_force", "gen_alg"] = 'brute_force',
**kwargs
):
"""Optimizes the trading strategy using brute force.
Expand Down Expand Up @@ -321,7 +321,7 @@ def optimize(
opt_params, strategy_params_mapping, optimization_steps = adapt_optimization_input(self.strategy, params)

self.bar = progressbar.ProgressBar(
max_value=optimization_steps if self._optimizer == 'brute' else progressbar.UnknownLength,
max_value=optimization_steps if self._optimizer == 'brute_force' else progressbar.UnknownLength,
redirect_stdout=True
)
self.optimization_steps = 0
Expand All @@ -340,8 +340,11 @@ def optimize(
self.bar.finish()
print()

return (get_params_mapping(self.strategy, opt, strategy_params_mapping, params),
-self._update_and_run(opt, True, True, strategy_params_mapping, params))
optimized_params = get_params_mapping(self.strategy, opt, strategy_params_mapping, params)
optimized_result = self._update_and_run(opt, True, True, strategy_params_mapping, params)
optimized_result = optimized_result if self._optimizer == 'gen_alg' else -optimized_result

return optimized_params, optimized_result

def maximum_leverage(self, margin_threshold=None):
"""
Expand Down Expand Up @@ -427,7 +430,7 @@ def _check_metric_input(self, optimization_metric):
self.optimization_metric = optimization_options[optimization_metric]

def _check_optimizer_input(self, optimizer):
optimizer_options = ["brute", "gen_alg"]
optimizer_options = ["brute_force", "gen_alg"]
if optimizer not in optimizer_options:
raise ValueError(f"The selected optimizer is not supported. "

Check warning on line 435 in stratestic/backtesting/_mixin.py

View check run for this annotation

Codecov / codecov/patch

stratestic/backtesting/_mixin.py#L435

Added line #L435 was not covered by tests
f"Choose one of: {', '.join(optimizer_options)}")
Expand Down
6 changes: 3 additions & 3 deletions stratestic/backtesting/optimization/_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def strategy_optimizer(
strategy_runner,
opt_params: tuple,
runner_args: tuple,
optimizer: Literal["brute", "gen_alg"] = 'brute',
optimizer: Literal["brute_force", "gen_alg"] = 'brute',
**kwargs
):
"""
Expand All @@ -63,7 +63,7 @@ def strategy_optimizer(
(start2, stop2, step2), ...).
runner_args : tuple
Additional arguments required by the `strategy_runner` function.
optimizer : Literal["brute", "gen_alg"]
optimizer : Literal["brute_force", "gen_alg"]
Choice of algorithm for the optimization.
**kwargs : dict
Additional keyword arguments passed to the `brute` function from scipy.optimize.
Expand All @@ -79,7 +79,7 @@ def strategy_optimizer(
defined by `opt_params`. The objective function to be optimized is defined by `strategy_runner`.
"""

if optimizer == "brute":
if optimizer == "brute_force":

return brute(
strategy_runner,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
expected_performance = 0.985337
expected_outperformance = -0.030928

expected_optimization_results = ({'ma': 2.0, 'sd': 1.0}, 0)
expected_optimization_results = {
"brute_force": ({'ma': 2.0, 'sd': 1.0}, 0),
"gen_alg": ({'ma': 5.0, 'sd': 5.0}, 0),
}

expected_results = [
{
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
import numpy as np
from pandas import Timestamp

expected_optimization_results = ({'nr_lags': 2.0}, 13.175614657220974)
expected_optimization_results = {
"brute_force": ({'nr_lags': 2.0}, 13.175614657220974),
"gen_alg": ({'nr_lags': 5.0}, np.inf),
}

expected_results = [
{
Expand Down
5 changes: 4 additions & 1 deletion tests/backtesting/iterative/out/momentum_no_trading_costs.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
expected_performance = 1.009399
expected_outperformance = -0.006867

expected_optimization_results = ({'window': 5.0}, -0.2131523135281635)
expected_optimization_results = {
"brute_force": ({'window': 5.0}, -0.2131523135281635),
"gen_alg": ({'window': 5.0}, -0.2131523135281635),
}

expected_results = [
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
expected_performance = 1.003375
expected_outperformance = -0.003015

expected_optimization_results = ({'sma_l': 6.0, 'sma_s': 1.0}, 100.0)
expected_optimization_results = {
"brute_force": ({'sma_l': 6.0, 'sma_s': 1.0}, 100.0),
"gen_alg": ({'sma_s': 5.0, 'sma_l': 7.0}, 100.0)
}

expected_results = [
{
Expand Down
105 changes: 91 additions & 14 deletions tests/backtesting/iterative/test_iterative.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,14 +162,29 @@ def test_optimize_parameters_input_validation(
backtester.load_data(data=test_data)
backtester.optimize(optimization_params)

@pytest.mark.parametrize(
"optimizer,extra_args",
[
pytest.param(
"brute_force",
{},
id="BruteForce",
),
pytest.param(
"gen_alg",
{"pop_size": 4, "max_gen": 1, "random_state": 42},
id="GenAlg",
)
]
)
@pytest.mark.parametrize(
"fixture",
[
pytest.param(fixture, id=fixture_name)
for fixture_name, fixture in fixtures.items()
],
)
def test_optimize_parameters(self, fixture, common_fixture):
def test_optimize_parameters(self, fixture, optimizer, extra_args, common_fixture):
strategy = fixture["in"]["strategy"]
params = fixture["in"]["params"]
optimization_params = fixture["in"]["optimization_params"]
Expand All @@ -181,11 +196,25 @@ def test_optimize_parameters(self, fixture, common_fixture):

ite = IterativeBacktester(strategy_instance, trading_costs=trading_costs)

optimization_results = ite.optimize(*optimization_params)
optimization_results = ite.optimize(*optimization_params, optimizer=optimizer, **extra_args)

assert optimization_results == fixture["out"]["expected_optimization_results"]
assert optimization_results == fixture["out"]["expected_optimization_results"][optimizer]

@pytest.mark.slow
@pytest.mark.parametrize(
"optimizer,extra_args",
[
pytest.param(
"brute_force",
{},
id="BruteForce",
),
pytest.param(
"gen_alg",
{"pop_size": 4, "max_gen": 1, "random_state": 42},
id="GenAlg",
)
]
)
@pytest.mark.parametrize(
"input_params,optimization_params,expected_results",
[
Expand All @@ -195,7 +224,10 @@ def test_optimize_parameters(self, fixture, common_fixture):
"method": "Unanimous"
},
[{"window": (2, 4)}, {"ma": (1, 3)}],
[{'window': 3.0}, {'ma': 2.0}],
{
"brute_force": [{'window': 3.0}, {'ma': 2.0}],
"gen_alg": [{'window': 4.0}, {'ma': 3.0}]
},
id='2_strategies-unanimous'
),
pytest.param(
Expand All @@ -204,7 +236,10 @@ def test_optimize_parameters(self, fixture, common_fixture):
"method": "Majority"
},
[{"window": (2, 4)}, {"ma": (1, 3)}],
[{'window': 3.0}, {'ma': 1.0}],
{
"brute_force": [{'window': 3.0}, {'ma': 1.0}],
"gen_alg": [{'window': 4.0}, {'ma': 3.0}]
},
id='2_strategies-majority'
),
pytest.param(
Expand All @@ -213,7 +248,10 @@ def test_optimize_parameters(self, fixture, common_fixture):
"method": "Majority"
},
[{"window": (2, 4)}, {"ma": (1, 3)}, {}],
[{'window': 3.0}, {'ma': 2.0}, {}],
{
"brute_force": [{'window': 3.0}, {'ma': 2.0}, {}],
"gen_alg": [{'window': 4.0}, {'ma': 3.0}, {}]
},
id='3_strategies-majority'
),
pytest.param(
Expand All @@ -222,7 +260,10 @@ def test_optimize_parameters(self, fixture, common_fixture):
"method": "Unanimous"
},
[{"sma_s": (2, 4), "sma_l": (4, 6)}],
[{'sma_s': 3.0, 'sma_l': 4.0}],
{
"brute_force": [{'sma_s': 3.0, 'sma_l': 4.0}],
"gen_alg": [{'sma_l': 6.0, 'sma_s': 4.0}]
},
id='1_strategies-unanimous'
),
],
Expand All @@ -232,6 +273,8 @@ def test_optimize_parameters_combined_strategies(
input_params,
optimization_params,
expected_results,
optimizer,
extra_args,
common_fixture
):
test_data = data.set_index("open_time")
Expand All @@ -240,10 +283,25 @@ def test_optimize_parameters_combined_strategies(

ite = IterativeBacktester(strategy_instance)

optimization_results, perf = ite.optimize(optimization_params)
optimization_results, perf = ite.optimize(optimization_params, optimizer=optimizer, **extra_args)

assert optimization_results == expected_results
assert optimization_results == expected_results[optimizer]

@pytest.mark.parametrize(
"optimizer,extra_args",
[
pytest.param(
"brute_force",
{},
id="BruteForce",
),
pytest.param(
"gen_alg",
{"pop_size": 4, "max_gen": 1, "random_state": 42},
id="GenAlg",
)
]
)
@pytest.mark.parametrize(
"input_params,optimization_params",
[
Expand All @@ -269,6 +327,8 @@ def test_load_data_optimize_parameters_combined_strategies(
self,
input_params,
optimization_params,
optimizer,
extra_args,
common_fixture
):
test_data = data.set_index("open_time")
Expand All @@ -280,12 +340,27 @@ def test_load_data_optimize_parameters_combined_strategies(
ite_2 = IterativeBacktester(strategy_instance_2)
ite_2.load_data(data=test_data)

optimization_results_1, perf_1 = ite_1.optimize(optimization_params)
optimization_results_2, perf_2 = ite_2.optimize(optimization_params)
optimization_results_1, perf_1 = ite_1.optimize(optimization_params, optimizer=optimizer, **extra_args)
optimization_results_2, perf_2 = ite_2.optimize(optimization_params, optimizer=optimizer, **extra_args)

assert optimization_results_1 == optimization_results_2
assert perf_1 == perf_2

@pytest.mark.parametrize(
"optimizer,extra_args",
[
pytest.param(
"brute_force",
{},
id="BruteForce",
),
pytest.param(
"gen_alg",
{"pop_size": 4, "max_gen": 1, "random_state": 42},
id="GenAlg",
)
]
)
@pytest.mark.parametrize(
"input_params,optimization_params",
[
Expand All @@ -311,6 +386,8 @@ def test_optimize_parameters_combined_strategies_equal_to_vectorized(
self,
input_params,
optimization_params,
optimizer,
extra_args,
common_fixture
):
test_data = data.set_index("open_time")
Expand All @@ -321,8 +398,8 @@ def test_optimize_parameters_combined_strategies_equal_to_vectorized(
ite = IterativeBacktester(strategy_instance_1)
vect = IterativeBacktester(strategy_instance_2)

optimization_results_1, perf_1 = ite.optimize(optimization_params)
optimization_results_2, perf_2 = vect.optimize(optimization_params)
optimization_results_1, perf_1 = ite.optimize(optimization_params, optimizer=optimizer, **extra_args)
optimization_results_2, perf_2 = vect.optimize(optimization_params, optimizer=optimizer, **extra_args)

assert optimization_results_1 == optimization_results_2
assert perf_1 == perf_2
Expand Down

0 comments on commit d26e2ca

Please sign in to comment.