• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

freqtrade / freqtrade / 15035825052

09 Apr 2025 08:08AM UTC coverage: 94.373% (+0.007%) from 94.366%
15035825052

push

github

web-flow
Update dependabot.yml

22172 of 23494 relevant lines covered (94.37%)

0.94 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

98.58
/freqtrade/optimize/hyperopt/hyperopt_optimizer.py
1
"""
2
This module contains the hyperopt optimizer class, which needs to be pickled
3
and will be sent to the hyperopt worker processes.
4
"""
5

6
import logging
1✔
7
import sys
1✔
8
import warnings
1✔
9
from datetime import datetime, timezone
1✔
10
from typing import Any
1✔
11

12
from joblib import dump, load
1✔
13
from joblib.externals import cloudpickle
1✔
14
from pandas import DataFrame
1✔
15

16
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config
1✔
17
from freqtrade.data.converter import trim_dataframes
1✔
18
from freqtrade.data.history import get_timerange
1✔
19
from freqtrade.data.metrics import calculate_market_change
1✔
20
from freqtrade.enums import HyperoptState
1✔
21
from freqtrade.exceptions import OperationalException
1✔
22
from freqtrade.misc import deep_merge_dicts
1✔
23
from freqtrade.optimize.backtesting import Backtesting
1✔
24

25
# Import IHyperOptLoss to allow unpickling classes from these modules
26
from freqtrade.optimize.hyperopt.hyperopt_auto import HyperOptAuto
1✔
27
from freqtrade.optimize.hyperopt_loss.hyperopt_loss_interface import IHyperOptLoss
1✔
28
from freqtrade.optimize.hyperopt_tools import HyperoptStateContainer, HyperoptTools
1✔
29
from freqtrade.optimize.optimize_reports import generate_strategy_stats
1✔
30
from freqtrade.resolvers.hyperopt_resolver import HyperOptLossResolver
1✔
31
from freqtrade.util.dry_run_wallet import get_dry_run_wallet
1✔
32

33

34
# Suppress scikit-learn FutureWarnings from skopt
35
with warnings.catch_warnings():
1✔
36
    warnings.filterwarnings("ignore", category=FutureWarning)
1✔
37
    from skopt import Optimizer
1✔
38
    from skopt.space import Dimension
1✔
39

40
logger = logging.getLogger(__name__)
1✔
41

42

43
MAX_LOSS = 100000  # just a big enough number to be bad result in loss optimization
1✔
44

45

46
class HyperOptimizer:
1✔
47
    """
48
    HyperoptOptimizer class
49
    This class is sent to the hyperopt worker processes.
50
    """
51

52
    def __init__(self, config: Config) -> None:
1✔
53
        self.buy_space: list[Dimension] = []
1✔
54
        self.sell_space: list[Dimension] = []
1✔
55
        self.protection_space: list[Dimension] = []
1✔
56
        self.roi_space: list[Dimension] = []
1✔
57
        self.stoploss_space: list[Dimension] = []
1✔
58
        self.trailing_space: list[Dimension] = []
1✔
59
        self.max_open_trades_space: list[Dimension] = []
1✔
60
        self.dimensions: list[Dimension] = []
1✔
61

62
        self.config = config
1✔
63
        self.min_date: datetime
1✔
64
        self.max_date: datetime
1✔
65

66
        self.backtesting = Backtesting(self.config)
1✔
67
        self.pairlist = self.backtesting.pairlists.whitelist
1✔
68
        self.custom_hyperopt: HyperOptAuto
1✔
69
        self.analyze_per_epoch = self.config.get("analyze_per_epoch", False)
1✔
70

71
        if not self.config.get("hyperopt"):
1✔
72
            self.custom_hyperopt = HyperOptAuto(self.config)
1✔
73
        else:
74
            raise OperationalException(
×
75
                "Using separate Hyperopt files has been removed in 2021.9. Please convert "
76
                "your existing Hyperopt file to the new Hyperoptable strategy interface"
77
            )
78

79
        self.backtesting._set_strategy(self.backtesting.strategylist[0])
1✔
80
        self.custom_hyperopt.strategy = self.backtesting.strategy
1✔
81

82
        self.hyperopt_pickle_magic(self.backtesting.strategy.__class__.__bases__)
1✔
83
        self.custom_hyperoptloss: IHyperOptLoss = HyperOptLossResolver.load_hyperoptloss(
1✔
84
            self.config
85
        )
86
        self.calculate_loss = self.custom_hyperoptloss.hyperopt_loss_function
1✔
87

88
        self.data_pickle_file = (
1✔
89
            self.config["user_data_dir"] / "hyperopt_results" / "hyperopt_tickerdata.pkl"
90
        )
91

92
        self.market_change = 0.0
1✔
93

94
        if HyperoptTools.has_space(self.config, "sell"):
1✔
95
            # Make sure use_exit_signal is enabled
96
            self.config["use_exit_signal"] = True
1✔
97

98
    def prepare_hyperopt(self) -> None:
1✔
99
        # Initialize spaces ...
100
        self.init_spaces()
1✔
101

102
        self.prepare_hyperopt_data()
1✔
103

104
        # We don't need exchange instance anymore while running hyperopt
105
        self.backtesting.exchange.close()
1✔
106
        self.backtesting.exchange._api = None
1✔
107
        self.backtesting.exchange._api_async = None
1✔
108
        self.backtesting.exchange.loop = None  # type: ignore
1✔
109
        self.backtesting.exchange._loop_lock = None  # type: ignore
1✔
110
        self.backtesting.exchange._cache_lock = None  # type: ignore
1✔
111
        # self.backtesting.exchange = None  # type: ignore
112
        self.backtesting.pairlists = None  # type: ignore
1✔
113

114
    def get_strategy_name(self) -> str:
1✔
115
        return self.backtesting.strategy.get_strategy_name()
1✔
116

117
    def hyperopt_pickle_magic(self, bases: tuple[type, ...]) -> None:
1✔
118
        """
119
        Hyperopt magic to allow strategy inheritance across files.
120
        For this to properly work, we need to register the module of the imported class
121
        to pickle as value.
122
        """
123
        for modules in bases:
1✔
124
            if modules.__name__ != "IStrategy":
1✔
125
                if mod := sys.modules.get(modules.__module__):
1✔
126
                    cloudpickle.register_pickle_by_value(mod)
1✔
127
                self.hyperopt_pickle_magic(modules.__bases__)
1✔
128

129
    def _get_params_dict(
1✔
130
        self, dimensions: list[Dimension], raw_params: list[Any]
131
    ) -> dict[str, Any]:
132
        # Ensure the number of dimensions match
133
        # the number of parameters in the list.
134
        if len(raw_params) != len(dimensions):
1✔
135
            raise ValueError("Mismatch in number of search-space dimensions.")
×
136

137
        # Return a dict where the keys are the names of the dimensions
138
        # and the values are taken from the list of parameters.
139
        return {d.name: v for d, v in zip(dimensions, raw_params, strict=False)}
1✔
140

141
    def _get_params_details(self, params: dict) -> dict:
1✔
142
        """
143
        Return the params for each space
144
        """
145
        result: dict = {}
1✔
146

147
        if HyperoptTools.has_space(self.config, "buy"):
1✔
148
            result["buy"] = {p.name: params.get(p.name) for p in self.buy_space}
1✔
149
        if HyperoptTools.has_space(self.config, "sell"):
1✔
150
            result["sell"] = {p.name: params.get(p.name) for p in self.sell_space}
1✔
151
        if HyperoptTools.has_space(self.config, "protection"):
1✔
152
            result["protection"] = {p.name: params.get(p.name) for p in self.protection_space}
1✔
153
        if HyperoptTools.has_space(self.config, "roi"):
1✔
154
            result["roi"] = {
1✔
155
                str(k): v for k, v in self.custom_hyperopt.generate_roi_table(params).items()
156
            }
157
        if HyperoptTools.has_space(self.config, "stoploss"):
1✔
158
            result["stoploss"] = {p.name: params.get(p.name) for p in self.stoploss_space}
1✔
159
        if HyperoptTools.has_space(self.config, "trailing"):
1✔
160
            result["trailing"] = self.custom_hyperopt.generate_trailing_params(params)
1✔
161
        if HyperoptTools.has_space(self.config, "trades"):
1✔
162
            result["max_open_trades"] = {
1✔
163
                "max_open_trades": (
164
                    self.backtesting.strategy.max_open_trades
165
                    if self.backtesting.strategy.max_open_trades != float("inf")
166
                    else -1
167
                )
168
            }
169

170
        return result
1✔
171

172
    def _get_no_optimize_details(self) -> dict[str, Any]:
1✔
173
        """
174
        Get non-optimized parameters
175
        """
176
        result: dict[str, Any] = {}
1✔
177
        strategy = self.backtesting.strategy
1✔
178
        if not HyperoptTools.has_space(self.config, "roi"):
1✔
179
            result["roi"] = {str(k): v for k, v in strategy.minimal_roi.items()}
1✔
180
        if not HyperoptTools.has_space(self.config, "stoploss"):
1✔
181
            result["stoploss"] = {"stoploss": strategy.stoploss}
1✔
182
        if not HyperoptTools.has_space(self.config, "trailing"):
1✔
183
            result["trailing"] = {
1✔
184
                "trailing_stop": strategy.trailing_stop,
185
                "trailing_stop_positive": strategy.trailing_stop_positive,
186
                "trailing_stop_positive_offset": strategy.trailing_stop_positive_offset,
187
                "trailing_only_offset_is_reached": strategy.trailing_only_offset_is_reached,
188
            }
189
        if not HyperoptTools.has_space(self.config, "trades"):
1✔
190
            result["max_open_trades"] = {"max_open_trades": strategy.max_open_trades}
1✔
191
        return result
1✔
192

193
    def init_spaces(self):
1✔
194
        """
195
        Assign the dimensions in the hyperoptimization space.
196
        """
197
        if HyperoptTools.has_space(self.config, "protection"):
1✔
198
            # Protections can only be optimized when using the Parameter interface
199
            logger.debug("Hyperopt has 'protection' space")
1✔
200
            # Enable Protections if protection space is selected.
201
            self.config["enable_protections"] = True
1✔
202
            self.backtesting.enable_protections = True
1✔
203
            self.protection_space = self.custom_hyperopt.protection_space()
1✔
204

205
        if HyperoptTools.has_space(self.config, "buy"):
1✔
206
            logger.debug("Hyperopt has 'buy' space")
1✔
207
            self.buy_space = self.custom_hyperopt.buy_indicator_space()
1✔
208

209
        if HyperoptTools.has_space(self.config, "sell"):
1✔
210
            logger.debug("Hyperopt has 'sell' space")
1✔
211
            self.sell_space = self.custom_hyperopt.sell_indicator_space()
1✔
212

213
        if HyperoptTools.has_space(self.config, "roi"):
1✔
214
            logger.debug("Hyperopt has 'roi' space")
1✔
215
            self.roi_space = self.custom_hyperopt.roi_space()
1✔
216

217
        if HyperoptTools.has_space(self.config, "stoploss"):
1✔
218
            logger.debug("Hyperopt has 'stoploss' space")
1✔
219
            self.stoploss_space = self.custom_hyperopt.stoploss_space()
1✔
220

221
        if HyperoptTools.has_space(self.config, "trailing"):
1✔
222
            logger.debug("Hyperopt has 'trailing' space")
1✔
223
            self.trailing_space = self.custom_hyperopt.trailing_space()
1✔
224

225
        if HyperoptTools.has_space(self.config, "trades"):
1✔
226
            logger.debug("Hyperopt has 'trades' space")
1✔
227
            self.max_open_trades_space = self.custom_hyperopt.max_open_trades_space()
1✔
228

229
        self.dimensions = (
1✔
230
            self.buy_space
231
            + self.sell_space
232
            + self.protection_space
233
            + self.roi_space
234
            + self.stoploss_space
235
            + self.trailing_space
236
            + self.max_open_trades_space
237
        )
238

239
    def assign_params(self, params_dict: dict[str, Any], category: str) -> None:
1✔
240
        """
241
        Assign hyperoptable parameters
242
        """
243
        for attr_name, attr in self.backtesting.strategy.enumerate_parameters(category):
1✔
244
            if attr.optimize:
1✔
245
                # noinspection PyProtectedMember
246
                attr.value = params_dict[attr_name]
1✔
247

248
    def generate_optimizer(self, raw_params: list[Any]) -> dict[str, Any]:
1✔
249
        """
250
        Used Optimize function.
251
        Called once per epoch to optimize whatever is configured.
252
        Keep this function as optimized as possible!
253
        """
254
        HyperoptStateContainer.set_state(HyperoptState.OPTIMIZE)
1✔
255
        backtest_start_time = datetime.now(timezone.utc)
1✔
256
        params_dict = self._get_params_dict(self.dimensions, raw_params)
1✔
257

258
        # Apply parameters
259
        if HyperoptTools.has_space(self.config, "buy"):
1✔
260
            self.assign_params(params_dict, "buy")
1✔
261

262
        if HyperoptTools.has_space(self.config, "sell"):
1✔
263
            self.assign_params(params_dict, "sell")
1✔
264

265
        if HyperoptTools.has_space(self.config, "protection"):
1✔
266
            self.assign_params(params_dict, "protection")
1✔
267

268
        if HyperoptTools.has_space(self.config, "roi"):
1✔
269
            self.backtesting.strategy.minimal_roi = self.custom_hyperopt.generate_roi_table(
1✔
270
                params_dict
271
            )
272

273
        if HyperoptTools.has_space(self.config, "stoploss"):
1✔
274
            self.backtesting.strategy.stoploss = params_dict["stoploss"]
1✔
275

276
        if HyperoptTools.has_space(self.config, "trailing"):
1✔
277
            d = self.custom_hyperopt.generate_trailing_params(params_dict)
1✔
278
            self.backtesting.strategy.trailing_stop = d["trailing_stop"]
1✔
279
            self.backtesting.strategy.trailing_stop_positive = d["trailing_stop_positive"]
1✔
280
            self.backtesting.strategy.trailing_stop_positive_offset = d[
1✔
281
                "trailing_stop_positive_offset"
282
            ]
283
            self.backtesting.strategy.trailing_only_offset_is_reached = d[
1✔
284
                "trailing_only_offset_is_reached"
285
            ]
286

287
        if HyperoptTools.has_space(self.config, "trades"):
1✔
288
            if self.config["stake_amount"] == "unlimited" and (
1✔
289
                params_dict["max_open_trades"] == -1 or params_dict["max_open_trades"] == 0
290
            ):
291
                # Ignore unlimited max open trades if stake amount is unlimited
292
                params_dict.update({"max_open_trades": self.config["max_open_trades"]})
1✔
293

294
            updated_max_open_trades = (
1✔
295
                int(params_dict["max_open_trades"])
296
                if (params_dict["max_open_trades"] != -1 and params_dict["max_open_trades"] != 0)
297
                else float("inf")
298
            )
299

300
            self.config.update({"max_open_trades": updated_max_open_trades})
1✔
301

302
            self.backtesting.strategy.max_open_trades = updated_max_open_trades
1✔
303

304
        with self.data_pickle_file.open("rb") as f:
1✔
305
            processed = load(f, mmap_mode="r")
1✔
306
            if self.analyze_per_epoch:
1✔
307
                # Data is not yet analyzed, rerun populate_indicators.
308
                processed = self.advise_and_trim(processed)
×
309

310
        bt_results = self.backtesting.backtest(
1✔
311
            processed=processed, start_date=self.min_date, end_date=self.max_date
312
        )
313
        backtest_end_time = datetime.now(timezone.utc)
1✔
314
        bt_results.update(
1✔
315
            {
316
                "backtest_start_time": int(backtest_start_time.timestamp()),
317
                "backtest_end_time": int(backtest_end_time.timestamp()),
318
            }
319
        )
320

321
        return self._get_results_dict(
1✔
322
            bt_results, self.min_date, self.max_date, params_dict, processed=processed
323
        )
324

325
    def _get_results_dict(
1✔
326
        self,
327
        backtesting_results: dict[str, Any],
328
        min_date: datetime,
329
        max_date: datetime,
330
        params_dict: dict[str, Any],
331
        processed: dict[str, DataFrame],
332
    ) -> dict[str, Any]:
333
        params_details = self._get_params_details(params_dict)
1✔
334

335
        strat_stats = generate_strategy_stats(
1✔
336
            self.pairlist,
337
            self.backtesting.strategy.get_strategy_name(),
338
            backtesting_results,
339
            min_date,
340
            max_date,
341
            market_change=self.market_change,
342
            is_hyperopt=True,
343
        )
344
        results_explanation = HyperoptTools.format_results_explanation_string(
1✔
345
            strat_stats, self.config["stake_currency"]
346
        )
347

348
        not_optimized = self.backtesting.strategy.get_no_optimize_params()
1✔
349
        not_optimized = deep_merge_dicts(not_optimized, self._get_no_optimize_details())
1✔
350

351
        trade_count = strat_stats["total_trades"]
1✔
352
        total_profit = strat_stats["profit_total"]
1✔
353

354
        # If this evaluation contains too short amount of trades to be
355
        # interesting -- consider it as 'bad' (assigned max. loss value)
356
        # in order to cast this hyperspace point away from optimization
357
        # path. We do not want to optimize 'hodl' strategies.
358
        loss: float = MAX_LOSS
1✔
359
        if trade_count >= self.config["hyperopt_min_trades"]:
1✔
360
            loss = self.calculate_loss(
1✔
361
                results=backtesting_results["results"],
362
                trade_count=trade_count,
363
                min_date=min_date,
364
                max_date=max_date,
365
                config=self.config,
366
                processed=processed,
367
                backtest_stats=strat_stats,
368
                starting_balance=get_dry_run_wallet(self.config),
369
            )
370
        return {
1✔
371
            "loss": loss,
372
            "params_dict": params_dict,
373
            "params_details": params_details,
374
            "params_not_optimized": not_optimized,
375
            "results_metrics": strat_stats,
376
            "results_explanation": results_explanation,
377
            "total_profit": total_profit,
378
        }
379

380
    def get_optimizer(
1✔
381
        self,
382
        cpu_count: int,
383
        random_state: int,
384
        initial_points: int,
385
        model_queue_size: int,
386
    ) -> Optimizer:
387
        dimensions = self.dimensions
1✔
388
        estimator = self.custom_hyperopt.generate_estimator(dimensions=dimensions)
1✔
389

390
        acq_optimizer = "sampling"
1✔
391
        if isinstance(estimator, str):
1✔
392
            if estimator not in ("GP", "RF", "ET", "GBRT"):
1✔
393
                raise OperationalException(f"Estimator {estimator} not supported.")
1✔
394
            else:
395
                acq_optimizer = "auto"
1✔
396

397
        logger.info(f"Using estimator {estimator}.")
1✔
398
        return Optimizer(
1✔
399
            dimensions,
400
            base_estimator=estimator,
401
            acq_optimizer=acq_optimizer,
402
            n_initial_points=initial_points,
403
            acq_optimizer_kwargs={"n_jobs": cpu_count},
404
            random_state=random_state,
405
            model_queue_size=model_queue_size,
406
        )
407

408
    def advise_and_trim(self, data: dict[str, DataFrame]) -> dict[str, DataFrame]:
1✔
409
        preprocessed = self.backtesting.strategy.advise_all_indicators(data)
1✔
410

411
        # Trim startup period from analyzed dataframe to get correct dates for output.
412
        # This is only used to keep track of min/max date after trimming.
413
        # The result is NOT returned from this method, actual trimming happens in backtesting.
414
        trimmed = trim_dataframes(preprocessed, self.timerange, self.backtesting.required_startup)
1✔
415
        self.min_date, self.max_date = get_timerange(trimmed)
1✔
416
        if not self.market_change:
1✔
417
            self.market_change = calculate_market_change(trimmed, "close")
1✔
418

419
        # Real trimming will happen as part of backtesting.
420
        return preprocessed
1✔
421

422
    def prepare_hyperopt_data(self) -> None:
1✔
423
        HyperoptStateContainer.set_state(HyperoptState.DATALOAD)
1✔
424
        data, self.timerange = self.backtesting.load_bt_data()
1✔
425
        self.backtesting.load_bt_data_detail()
1✔
426
        logger.info("Dataload complete. Calculating indicators")
1✔
427

428
        if not self.analyze_per_epoch:
1✔
429
            HyperoptStateContainer.set_state(HyperoptState.INDICATORS)
1✔
430

431
            preprocessed = self.advise_and_trim(data)
1✔
432

433
            logger.info(
1✔
434
                f"Hyperopting with data from "
435
                f"{self.min_date.strftime(DATETIME_PRINT_FORMAT)} "
436
                f"up to {self.max_date.strftime(DATETIME_PRINT_FORMAT)} "
437
                f"({(self.max_date - self.min_date).days} days).."
438
            )
439
            # Store non-trimmed data - will be trimmed after signal generation.
440
            dump(preprocessed, self.data_pickle_file)
1✔
441
        else:
442
            dump(data, self.data_pickle_file)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc