• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

freqtrade / freqtrade / 15035809744

25 Apr 2025 06:28AM UTC coverage: 94.385% (+0.03%) from 94.358%
15035809744

push

github

xmatthias
test: update test for calculate_market_change

22257 of 23581 relevant lines covered (94.39%)

0.94 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

98.58
/freqtrade/optimize/hyperopt/hyperopt_optimizer.py
1
"""
2
This module contains the hyperopt optimizer class, which needs to be pickled
3
and will be sent to the hyperopt worker processes.
4
"""
5

6
import logging
1✔
7
import sys
1✔
8
import warnings
1✔
9
from datetime import datetime, timezone
1✔
10
from typing import Any
1✔
11

12
from joblib import dump, load
1✔
13
from joblib.externals import cloudpickle
1✔
14
from pandas import DataFrame
1✔
15

16
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config
1✔
17
from freqtrade.data.converter import trim_dataframes
1✔
18
from freqtrade.data.history import get_timerange
1✔
19
from freqtrade.data.metrics import calculate_market_change
1✔
20
from freqtrade.enums import HyperoptState
1✔
21
from freqtrade.exceptions import OperationalException
1✔
22
from freqtrade.ft_types import BacktestContentType
1✔
23
from freqtrade.misc import deep_merge_dicts
1✔
24
from freqtrade.optimize.backtesting import Backtesting
1✔
25

26
# Import IHyperOptLoss to allow unpickling classes from these modules
27
from freqtrade.optimize.hyperopt.hyperopt_auto import HyperOptAuto
1✔
28
from freqtrade.optimize.hyperopt_loss.hyperopt_loss_interface import IHyperOptLoss
1✔
29
from freqtrade.optimize.hyperopt_tools import HyperoptStateContainer, HyperoptTools
1✔
30
from freqtrade.optimize.optimize_reports import generate_strategy_stats
1✔
31
from freqtrade.resolvers.hyperopt_resolver import HyperOptLossResolver
1✔
32
from freqtrade.util.dry_run_wallet import get_dry_run_wallet
1✔
33

34

35
# Suppress scikit-learn FutureWarnings from skopt
36
with warnings.catch_warnings():
1✔
37
    warnings.filterwarnings("ignore", category=FutureWarning)
1✔
38
    from skopt import Optimizer
1✔
39
    from skopt.space import Dimension
1✔
40

41
logger = logging.getLogger(__name__)
1✔
42

43

44
MAX_LOSS = 100000  # just a big enough number to be bad result in loss optimization
1✔
45

46

47
class HyperOptimizer:
1✔
48
    """
49
    HyperoptOptimizer class
50
    This class is sent to the hyperopt worker processes.
51
    """
52

53
    def __init__(self, config: Config) -> None:
1✔
54
        self.buy_space: list[Dimension] = []
1✔
55
        self.sell_space: list[Dimension] = []
1✔
56
        self.protection_space: list[Dimension] = []
1✔
57
        self.roi_space: list[Dimension] = []
1✔
58
        self.stoploss_space: list[Dimension] = []
1✔
59
        self.trailing_space: list[Dimension] = []
1✔
60
        self.max_open_trades_space: list[Dimension] = []
1✔
61
        self.dimensions: list[Dimension] = []
1✔
62

63
        self.config = config
1✔
64
        self.min_date: datetime
1✔
65
        self.max_date: datetime
1✔
66

67
        self.backtesting = Backtesting(self.config)
1✔
68
        self.pairlist = self.backtesting.pairlists.whitelist
1✔
69
        self.custom_hyperopt: HyperOptAuto
1✔
70
        self.analyze_per_epoch = self.config.get("analyze_per_epoch", False)
1✔
71

72
        if not self.config.get("hyperopt"):
1✔
73
            self.custom_hyperopt = HyperOptAuto(self.config)
1✔
74
        else:
75
            raise OperationalException(
×
76
                "Using separate Hyperopt files has been removed in 2021.9. Please convert "
77
                "your existing Hyperopt file to the new Hyperoptable strategy interface"
78
            )
79

80
        self.backtesting._set_strategy(self.backtesting.strategylist[0])
1✔
81
        self.custom_hyperopt.strategy = self.backtesting.strategy
1✔
82

83
        self.hyperopt_pickle_magic(self.backtesting.strategy.__class__.__bases__)
1✔
84
        self.custom_hyperoptloss: IHyperOptLoss = HyperOptLossResolver.load_hyperoptloss(
1✔
85
            self.config
86
        )
87
        self.calculate_loss = self.custom_hyperoptloss.hyperopt_loss_function
1✔
88

89
        self.data_pickle_file = (
1✔
90
            self.config["user_data_dir"] / "hyperopt_results" / "hyperopt_tickerdata.pkl"
91
        )
92

93
        self.market_change = 0.0
1✔
94

95
        if HyperoptTools.has_space(self.config, "sell"):
1✔
96
            # Make sure use_exit_signal is enabled
97
            self.config["use_exit_signal"] = True
1✔
98

99
    def prepare_hyperopt(self) -> None:
1✔
100
        # Initialize spaces ...
101
        self.init_spaces()
1✔
102

103
        self.prepare_hyperopt_data()
1✔
104

105
        # We don't need exchange instance anymore while running hyperopt
106
        self.backtesting.exchange.close()
1✔
107
        self.backtesting.exchange._api = None
1✔
108
        self.backtesting.exchange._api_async = None
1✔
109
        self.backtesting.exchange.loop = None  # type: ignore
1✔
110
        self.backtesting.exchange._loop_lock = None  # type: ignore
1✔
111
        self.backtesting.exchange._cache_lock = None  # type: ignore
1✔
112
        # self.backtesting.exchange = None  # type: ignore
113
        self.backtesting.pairlists = None  # type: ignore
1✔
114

115
    def get_strategy_name(self) -> str:
1✔
116
        return self.backtesting.strategy.get_strategy_name()
1✔
117

118
    def hyperopt_pickle_magic(self, bases: tuple[type, ...]) -> None:
1✔
119
        """
120
        Hyperopt magic to allow strategy inheritance across files.
121
        For this to properly work, we need to register the module of the imported class
122
        to pickle as value.
123
        """
124
        for modules in bases:
1✔
125
            if modules.__name__ != "IStrategy":
1✔
126
                if mod := sys.modules.get(modules.__module__):
1✔
127
                    cloudpickle.register_pickle_by_value(mod)
1✔
128
                self.hyperopt_pickle_magic(modules.__bases__)
1✔
129

130
    def _get_params_dict(
1✔
131
        self, dimensions: list[Dimension], raw_params: list[Any]
132
    ) -> dict[str, Any]:
133
        # Ensure the number of dimensions match
134
        # the number of parameters in the list.
135
        if len(raw_params) != len(dimensions):
1✔
136
            raise ValueError("Mismatch in number of search-space dimensions.")
×
137

138
        # Return a dict where the keys are the names of the dimensions
139
        # and the values are taken from the list of parameters.
140
        return {d.name: v for d, v in zip(dimensions, raw_params, strict=False)}
1✔
141

142
    def _get_params_details(self, params: dict) -> dict:
1✔
143
        """
144
        Return the params for each space
145
        """
146
        result: dict = {}
1✔
147

148
        if HyperoptTools.has_space(self.config, "buy"):
1✔
149
            result["buy"] = {p.name: params.get(p.name) for p in self.buy_space}
1✔
150
        if HyperoptTools.has_space(self.config, "sell"):
1✔
151
            result["sell"] = {p.name: params.get(p.name) for p in self.sell_space}
1✔
152
        if HyperoptTools.has_space(self.config, "protection"):
1✔
153
            result["protection"] = {p.name: params.get(p.name) for p in self.protection_space}
1✔
154
        if HyperoptTools.has_space(self.config, "roi"):
1✔
155
            result["roi"] = {
1✔
156
                str(k): v for k, v in self.custom_hyperopt.generate_roi_table(params).items()
157
            }
158
        if HyperoptTools.has_space(self.config, "stoploss"):
1✔
159
            result["stoploss"] = {p.name: params.get(p.name) for p in self.stoploss_space}
1✔
160
        if HyperoptTools.has_space(self.config, "trailing"):
1✔
161
            result["trailing"] = self.custom_hyperopt.generate_trailing_params(params)
1✔
162
        if HyperoptTools.has_space(self.config, "trades"):
1✔
163
            result["max_open_trades"] = {
1✔
164
                "max_open_trades": (
165
                    self.backtesting.strategy.max_open_trades
166
                    if self.backtesting.strategy.max_open_trades != float("inf")
167
                    else -1
168
                )
169
            }
170

171
        return result
1✔
172

173
    def _get_no_optimize_details(self) -> dict[str, Any]:
1✔
174
        """
175
        Get non-optimized parameters
176
        """
177
        result: dict[str, Any] = {}
1✔
178
        strategy = self.backtesting.strategy
1✔
179
        if not HyperoptTools.has_space(self.config, "roi"):
1✔
180
            result["roi"] = {str(k): v for k, v in strategy.minimal_roi.items()}
1✔
181
        if not HyperoptTools.has_space(self.config, "stoploss"):
1✔
182
            result["stoploss"] = {"stoploss": strategy.stoploss}
1✔
183
        if not HyperoptTools.has_space(self.config, "trailing"):
1✔
184
            result["trailing"] = {
1✔
185
                "trailing_stop": strategy.trailing_stop,
186
                "trailing_stop_positive": strategy.trailing_stop_positive,
187
                "trailing_stop_positive_offset": strategy.trailing_stop_positive_offset,
188
                "trailing_only_offset_is_reached": strategy.trailing_only_offset_is_reached,
189
            }
190
        if not HyperoptTools.has_space(self.config, "trades"):
1✔
191
            result["max_open_trades"] = {"max_open_trades": strategy.max_open_trades}
1✔
192
        return result
1✔
193

194
    def init_spaces(self):
1✔
195
        """
196
        Assign the dimensions in the hyperoptimization space.
197
        """
198
        if HyperoptTools.has_space(self.config, "protection"):
1✔
199
            # Protections can only be optimized when using the Parameter interface
200
            logger.debug("Hyperopt has 'protection' space")
1✔
201
            # Enable Protections if protection space is selected.
202
            self.config["enable_protections"] = True
1✔
203
            self.backtesting.enable_protections = True
1✔
204
            self.protection_space = self.custom_hyperopt.protection_space()
1✔
205

206
        if HyperoptTools.has_space(self.config, "buy"):
1✔
207
            logger.debug("Hyperopt has 'buy' space")
1✔
208
            self.buy_space = self.custom_hyperopt.buy_indicator_space()
1✔
209

210
        if HyperoptTools.has_space(self.config, "sell"):
1✔
211
            logger.debug("Hyperopt has 'sell' space")
1✔
212
            self.sell_space = self.custom_hyperopt.sell_indicator_space()
1✔
213

214
        if HyperoptTools.has_space(self.config, "roi"):
1✔
215
            logger.debug("Hyperopt has 'roi' space")
1✔
216
            self.roi_space = self.custom_hyperopt.roi_space()
1✔
217

218
        if HyperoptTools.has_space(self.config, "stoploss"):
1✔
219
            logger.debug("Hyperopt has 'stoploss' space")
1✔
220
            self.stoploss_space = self.custom_hyperopt.stoploss_space()
1✔
221

222
        if HyperoptTools.has_space(self.config, "trailing"):
1✔
223
            logger.debug("Hyperopt has 'trailing' space")
1✔
224
            self.trailing_space = self.custom_hyperopt.trailing_space()
1✔
225

226
        if HyperoptTools.has_space(self.config, "trades"):
1✔
227
            logger.debug("Hyperopt has 'trades' space")
1✔
228
            self.max_open_trades_space = self.custom_hyperopt.max_open_trades_space()
1✔
229

230
        self.dimensions = (
1✔
231
            self.buy_space
232
            + self.sell_space
233
            + self.protection_space
234
            + self.roi_space
235
            + self.stoploss_space
236
            + self.trailing_space
237
            + self.max_open_trades_space
238
        )
239

240
    def assign_params(self, params_dict: dict[str, Any], category: str) -> None:
1✔
241
        """
242
        Assign hyperoptable parameters
243
        """
244
        for attr_name, attr in self.backtesting.strategy.enumerate_parameters(category):
1✔
245
            if attr.optimize:
1✔
246
                # noinspection PyProtectedMember
247
                attr.value = params_dict[attr_name]
1✔
248

249
    def generate_optimizer(self, raw_params: list[Any]) -> dict[str, Any]:
1✔
250
        """
251
        Used Optimize function.
252
        Called once per epoch to optimize whatever is configured.
253
        Keep this function as optimized as possible!
254
        """
255
        HyperoptStateContainer.set_state(HyperoptState.OPTIMIZE)
1✔
256
        backtest_start_time = datetime.now(timezone.utc)
1✔
257
        params_dict = self._get_params_dict(self.dimensions, raw_params)
1✔
258

259
        # Apply parameters
260
        if HyperoptTools.has_space(self.config, "buy"):
1✔
261
            self.assign_params(params_dict, "buy")
1✔
262

263
        if HyperoptTools.has_space(self.config, "sell"):
1✔
264
            self.assign_params(params_dict, "sell")
1✔
265

266
        if HyperoptTools.has_space(self.config, "protection"):
1✔
267
            self.assign_params(params_dict, "protection")
1✔
268

269
        if HyperoptTools.has_space(self.config, "roi"):
1✔
270
            self.backtesting.strategy.minimal_roi = self.custom_hyperopt.generate_roi_table(
1✔
271
                params_dict
272
            )
273

274
        if HyperoptTools.has_space(self.config, "stoploss"):
1✔
275
            self.backtesting.strategy.stoploss = params_dict["stoploss"]
1✔
276

277
        if HyperoptTools.has_space(self.config, "trailing"):
1✔
278
            d = self.custom_hyperopt.generate_trailing_params(params_dict)
1✔
279
            self.backtesting.strategy.trailing_stop = d["trailing_stop"]
1✔
280
            self.backtesting.strategy.trailing_stop_positive = d["trailing_stop_positive"]
1✔
281
            self.backtesting.strategy.trailing_stop_positive_offset = d[
1✔
282
                "trailing_stop_positive_offset"
283
            ]
284
            self.backtesting.strategy.trailing_only_offset_is_reached = d[
1✔
285
                "trailing_only_offset_is_reached"
286
            ]
287

288
        if HyperoptTools.has_space(self.config, "trades"):
1✔
289
            if self.config["stake_amount"] == "unlimited" and (
1✔
290
                params_dict["max_open_trades"] == -1 or params_dict["max_open_trades"] == 0
291
            ):
292
                # Ignore unlimited max open trades if stake amount is unlimited
293
                params_dict.update({"max_open_trades": self.config["max_open_trades"]})
1✔
294

295
            updated_max_open_trades = (
1✔
296
                int(params_dict["max_open_trades"])
297
                if (params_dict["max_open_trades"] != -1 and params_dict["max_open_trades"] != 0)
298
                else float("inf")
299
            )
300

301
            self.config.update({"max_open_trades": updated_max_open_trades})
1✔
302

303
            self.backtesting.strategy.max_open_trades = updated_max_open_trades
1✔
304

305
        with self.data_pickle_file.open("rb") as f:
1✔
306
            processed = load(f, mmap_mode="r")
1✔
307
            if self.analyze_per_epoch:
1✔
308
                # Data is not yet analyzed, rerun populate_indicators.
309
                processed = self.advise_and_trim(processed)
×
310

311
        bt_results = self.backtesting.backtest(
1✔
312
            processed=processed, start_date=self.min_date, end_date=self.max_date
313
        )
314
        backtest_end_time = datetime.now(timezone.utc)
1✔
315
        bt_results.update(
1✔
316
            {
317
                "backtest_start_time": int(backtest_start_time.timestamp()),
318
                "backtest_end_time": int(backtest_end_time.timestamp()),
319
            }
320
        )
321

322
        return self._get_results_dict(
1✔
323
            bt_results, self.min_date, self.max_date, params_dict, processed=processed
324
        )
325

326
    def _get_results_dict(
1✔
327
        self,
328
        backtesting_results: BacktestContentType,
329
        min_date: datetime,
330
        max_date: datetime,
331
        params_dict: dict[str, Any],
332
        processed: dict[str, DataFrame],
333
    ) -> dict[str, Any]:
334
        params_details = self._get_params_details(params_dict)
1✔
335

336
        strat_stats = generate_strategy_stats(
1✔
337
            self.pairlist,
338
            self.backtesting.strategy.get_strategy_name(),
339
            backtesting_results,
340
            min_date,
341
            max_date,
342
            market_change=self.market_change,
343
            is_hyperopt=True,
344
        )
345
        results_explanation = HyperoptTools.format_results_explanation_string(
1✔
346
            strat_stats, self.config["stake_currency"]
347
        )
348

349
        not_optimized = self.backtesting.strategy.get_no_optimize_params()
1✔
350
        not_optimized = deep_merge_dicts(not_optimized, self._get_no_optimize_details())
1✔
351

352
        trade_count = strat_stats["total_trades"]
1✔
353
        total_profit = strat_stats["profit_total"]
1✔
354

355
        # If this evaluation contains too short amount of trades to be
356
        # interesting -- consider it as 'bad' (assigned max. loss value)
357
        # in order to cast this hyperspace point away from optimization
358
        # path. We do not want to optimize 'hodl' strategies.
359
        loss: float = MAX_LOSS
1✔
360
        if trade_count >= self.config["hyperopt_min_trades"]:
1✔
361
            loss = self.calculate_loss(
1✔
362
                results=backtesting_results["results"],
363
                trade_count=trade_count,
364
                min_date=min_date,
365
                max_date=max_date,
366
                config=self.config,
367
                processed=processed,
368
                backtest_stats=strat_stats,
369
                starting_balance=get_dry_run_wallet(self.config),
370
            )
371
        return {
1✔
372
            "loss": loss,
373
            "params_dict": params_dict,
374
            "params_details": params_details,
375
            "params_not_optimized": not_optimized,
376
            "results_metrics": strat_stats,
377
            "results_explanation": results_explanation,
378
            "total_profit": total_profit,
379
        }
380

381
    def get_optimizer(
1✔
382
        self,
383
        cpu_count: int,
384
        random_state: int,
385
        initial_points: int,
386
        model_queue_size: int,
387
    ) -> Optimizer:
388
        dimensions = self.dimensions
1✔
389
        estimator = self.custom_hyperopt.generate_estimator(dimensions=dimensions)
1✔
390

391
        acq_optimizer = "sampling"
1✔
392
        if isinstance(estimator, str):
1✔
393
            if estimator not in ("GP", "RF", "ET", "GBRT"):
1✔
394
                raise OperationalException(f"Estimator {estimator} not supported.")
1✔
395
            else:
396
                acq_optimizer = "auto"
1✔
397

398
        logger.info(f"Using estimator {estimator}.")
1✔
399
        return Optimizer(
1✔
400
            dimensions,
401
            base_estimator=estimator,
402
            acq_optimizer=acq_optimizer,
403
            n_initial_points=initial_points,
404
            acq_optimizer_kwargs={"n_jobs": cpu_count},
405
            random_state=random_state,
406
            model_queue_size=model_queue_size,
407
        )
408

409
    def advise_and_trim(self, data: dict[str, DataFrame]) -> dict[str, DataFrame]:
1✔
410
        preprocessed = self.backtesting.strategy.advise_all_indicators(data)
1✔
411

412
        # Trim startup period from analyzed dataframe to get correct dates for output.
413
        # This is only used to keep track of min/max date after trimming.
414
        # The result is NOT returned from this method, actual trimming happens in backtesting.
415
        trimmed = trim_dataframes(preprocessed, self.timerange, self.backtesting.required_startup)
1✔
416
        self.min_date, self.max_date = get_timerange(trimmed)
1✔
417
        if not self.market_change:
1✔
418
            self.market_change = calculate_market_change(trimmed, "close")
1✔
419

420
        # Real trimming will happen as part of backtesting.
421
        return preprocessed
1✔
422

423
    def prepare_hyperopt_data(self) -> None:
1✔
424
        HyperoptStateContainer.set_state(HyperoptState.DATALOAD)
1✔
425
        data, self.timerange = self.backtesting.load_bt_data()
1✔
426
        self.backtesting.load_bt_data_detail()
1✔
427
        logger.info("Dataload complete. Calculating indicators")
1✔
428

429
        if not self.analyze_per_epoch:
1✔
430
            HyperoptStateContainer.set_state(HyperoptState.INDICATORS)
1✔
431

432
            preprocessed = self.advise_and_trim(data)
1✔
433

434
            logger.info(
1✔
435
                f"Hyperopting with data from "
436
                f"{self.min_date.strftime(DATETIME_PRINT_FORMAT)} "
437
                f"up to {self.max_date.strftime(DATETIME_PRINT_FORMAT)} "
438
                f"({(self.max_date - self.min_date).days} days).."
439
            )
440
            # Store non-trimmed data - will be trimmed after signal generation.
441
            dump(preprocessed, self.data_pickle_file)
1✔
442
        else:
443
            dump(data, self.data_pickle_file)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc