• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

freqtrade / freqtrade / 14234163970

08 Feb 2025 03:51PM UTC coverage: 94.407% (-0.02%) from 94.422%
14234163970

push

github

xmatthias
fix: validate config when calling pair_history

closes #11271

6 of 6 new or added lines in 1 file covered. (100.0%)

367 existing lines in 28 files now uncovered.

21894 of 23191 relevant lines covered (94.41%)

0.94 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

98.57
/freqtrade/optimize/hyperopt/hyperopt_optimizer.py
1
"""
2
This module contains the hyperopt optimizer class, which needs to be pickled
3
and will be sent to the hyperopt worker processes.
4
"""
5

6
import logging
1✔
7
import sys
1✔
8
import warnings
1✔
9
from datetime import datetime, timezone
1✔
10
from typing import Any
1✔
11

12
from joblib import dump, load
1✔
13
from joblib.externals import cloudpickle
1✔
14
from pandas import DataFrame
1✔
15

16
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config
1✔
17
from freqtrade.data.converter import trim_dataframes
1✔
18
from freqtrade.data.history import get_timerange
1✔
19
from freqtrade.data.metrics import calculate_market_change
1✔
20
from freqtrade.enums import HyperoptState
1✔
21
from freqtrade.exceptions import OperationalException
1✔
22
from freqtrade.misc import deep_merge_dicts
1✔
23
from freqtrade.optimize.backtesting import Backtesting
1✔
24

25
# Import IHyperOptLoss to allow unpickling classes from these modules
26
from freqtrade.optimize.hyperopt.hyperopt_auto import HyperOptAuto
1✔
27
from freqtrade.optimize.hyperopt_loss.hyperopt_loss_interface import IHyperOptLoss
1✔
28
from freqtrade.optimize.hyperopt_tools import HyperoptStateContainer, HyperoptTools
1✔
29
from freqtrade.optimize.optimize_reports import generate_strategy_stats
1✔
30
from freqtrade.resolvers.hyperopt_resolver import HyperOptLossResolver
1✔
31
from freqtrade.util.dry_run_wallet import get_dry_run_wallet
1✔
32

33

34
# Suppress scikit-learn FutureWarnings from skopt
35
with warnings.catch_warnings():
1✔
36
    warnings.filterwarnings("ignore", category=FutureWarning)
1✔
37
    from skopt import Optimizer
1✔
38
    from skopt.space import Dimension
1✔
39

40
logger = logging.getLogger(__name__)
1✔
41

42

43
MAX_LOSS = 100000  # just a big enough number to be bad result in loss optimization
1✔
44

45

46
class HyperOptimizer:
1✔
47
    """
48
    HyperoptOptimizer class
49
    This class is sent to the hyperopt worker processes.
50
    """
51

52
    def __init__(self, config: Config) -> None:
1✔
53
        self.buy_space: list[Dimension] = []
1✔
54
        self.sell_space: list[Dimension] = []
1✔
55
        self.protection_space: list[Dimension] = []
1✔
56
        self.roi_space: list[Dimension] = []
1✔
57
        self.stoploss_space: list[Dimension] = []
1✔
58
        self.trailing_space: list[Dimension] = []
1✔
59
        self.max_open_trades_space: list[Dimension] = []
1✔
60
        self.dimensions: list[Dimension] = []
1✔
61

62
        self.config = config
1✔
63
        self.min_date: datetime
1✔
64
        self.max_date: datetime
1✔
65

66
        self.backtesting = Backtesting(self.config)
1✔
67
        self.pairlist = self.backtesting.pairlists.whitelist
1✔
68
        self.custom_hyperopt: HyperOptAuto
1✔
69
        self.analyze_per_epoch = self.config.get("analyze_per_epoch", False)
1✔
70

71
        if not self.config.get("hyperopt"):
1✔
72
            self.custom_hyperopt = HyperOptAuto(self.config)
1✔
73
        else:
74
            raise OperationalException(
×
75
                "Using separate Hyperopt files has been removed in 2021.9. Please convert "
76
                "your existing Hyperopt file to the new Hyperoptable strategy interface"
77
            )
78

79
        self.backtesting._set_strategy(self.backtesting.strategylist[0])
1✔
80
        self.custom_hyperopt.strategy = self.backtesting.strategy
1✔
81

82
        self.hyperopt_pickle_magic(self.backtesting.strategy.__class__.__bases__)
1✔
83
        self.custom_hyperoptloss: IHyperOptLoss = HyperOptLossResolver.load_hyperoptloss(
1✔
84
            self.config
85
        )
86
        self.calculate_loss = self.custom_hyperoptloss.hyperopt_loss_function
1✔
87

88
        self.data_pickle_file = (
1✔
89
            self.config["user_data_dir"] / "hyperopt_results" / "hyperopt_tickerdata.pkl"
90
        )
91

92
        self.market_change = 0.0
1✔
93

94
        if HyperoptTools.has_space(self.config, "sell"):
1✔
95
            # Make sure use_exit_signal is enabled
96
            self.config["use_exit_signal"] = True
1✔
97

98
    def prepare_hyperopt(self) -> None:
1✔
99
        # Initialize spaces ...
100
        self.init_spaces()
1✔
101

102
        self.prepare_hyperopt_data()
1✔
103

104
        # We don't need exchange instance anymore while running hyperopt
105
        self.backtesting.exchange.close()
1✔
106
        self.backtesting.exchange._api = None
1✔
107
        self.backtesting.exchange._api_async = None
1✔
108
        self.backtesting.exchange.loop = None  # type: ignore
1✔
109
        self.backtesting.exchange._loop_lock = None  # type: ignore
1✔
110
        self.backtesting.exchange._cache_lock = None  # type: ignore
1✔
111
        # self.backtesting.exchange = None  # type: ignore
112
        self.backtesting.pairlists = None  # type: ignore
1✔
113

114
    def get_strategy_name(self) -> str:
1✔
115
        return self.backtesting.strategy.get_strategy_name()
1✔
116

117
    def hyperopt_pickle_magic(self, bases) -> None:
1✔
118
        """
119
        Hyperopt magic to allow strategy inheritance across files.
120
        For this to properly work, we need to register the module of the imported class
121
        to pickle as value.
122
        """
123
        for modules in bases:
1✔
124
            if modules.__name__ != "IStrategy":
1✔
125
                cloudpickle.register_pickle_by_value(sys.modules[modules.__module__])
1✔
126
                self.hyperopt_pickle_magic(modules.__bases__)
1✔
127

128
    def _get_params_dict(
1✔
129
        self, dimensions: list[Dimension], raw_params: list[Any]
130
    ) -> dict[str, Any]:
131
        # Ensure the number of dimensions match
132
        # the number of parameters in the list.
133
        if len(raw_params) != len(dimensions):
1✔
UNCOV
134
            raise ValueError("Mismatch in number of search-space dimensions.")
×
135

136
        # Return a dict where the keys are the names of the dimensions
137
        # and the values are taken from the list of parameters.
138
        return {d.name: v for d, v in zip(dimensions, raw_params, strict=False)}
1✔
139

140
    def _get_params_details(self, params: dict) -> dict:
1✔
141
        """
142
        Return the params for each space
143
        """
144
        result: dict = {}
1✔
145

146
        if HyperoptTools.has_space(self.config, "buy"):
1✔
147
            result["buy"] = {p.name: params.get(p.name) for p in self.buy_space}
1✔
148
        if HyperoptTools.has_space(self.config, "sell"):
1✔
149
            result["sell"] = {p.name: params.get(p.name) for p in self.sell_space}
1✔
150
        if HyperoptTools.has_space(self.config, "protection"):
1✔
151
            result["protection"] = {p.name: params.get(p.name) for p in self.protection_space}
1✔
152
        if HyperoptTools.has_space(self.config, "roi"):
1✔
153
            result["roi"] = {
1✔
154
                str(k): v for k, v in self.custom_hyperopt.generate_roi_table(params).items()
155
            }
156
        if HyperoptTools.has_space(self.config, "stoploss"):
1✔
157
            result["stoploss"] = {p.name: params.get(p.name) for p in self.stoploss_space}
1✔
158
        if HyperoptTools.has_space(self.config, "trailing"):
1✔
159
            result["trailing"] = self.custom_hyperopt.generate_trailing_params(params)
1✔
160
        if HyperoptTools.has_space(self.config, "trades"):
1✔
161
            result["max_open_trades"] = {
1✔
162
                "max_open_trades": (
163
                    self.backtesting.strategy.max_open_trades
164
                    if self.backtesting.strategy.max_open_trades != float("inf")
165
                    else -1
166
                )
167
            }
168

169
        return result
1✔
170

171
    def _get_no_optimize_details(self) -> dict[str, Any]:
1✔
172
        """
173
        Get non-optimized parameters
174
        """
175
        result: dict[str, Any] = {}
1✔
176
        strategy = self.backtesting.strategy
1✔
177
        if not HyperoptTools.has_space(self.config, "roi"):
1✔
178
            result["roi"] = {str(k): v for k, v in strategy.minimal_roi.items()}
1✔
179
        if not HyperoptTools.has_space(self.config, "stoploss"):
1✔
180
            result["stoploss"] = {"stoploss": strategy.stoploss}
1✔
181
        if not HyperoptTools.has_space(self.config, "trailing"):
1✔
182
            result["trailing"] = {
1✔
183
                "trailing_stop": strategy.trailing_stop,
184
                "trailing_stop_positive": strategy.trailing_stop_positive,
185
                "trailing_stop_positive_offset": strategy.trailing_stop_positive_offset,
186
                "trailing_only_offset_is_reached": strategy.trailing_only_offset_is_reached,
187
            }
188
        if not HyperoptTools.has_space(self.config, "trades"):
1✔
189
            result["max_open_trades"] = {"max_open_trades": strategy.max_open_trades}
1✔
190
        return result
1✔
191

192
    def init_spaces(self):
1✔
193
        """
194
        Assign the dimensions in the hyperoptimization space.
195
        """
196
        if HyperoptTools.has_space(self.config, "protection"):
1✔
197
            # Protections can only be optimized when using the Parameter interface
198
            logger.debug("Hyperopt has 'protection' space")
1✔
199
            # Enable Protections if protection space is selected.
200
            self.config["enable_protections"] = True
1✔
201
            self.backtesting.enable_protections = True
1✔
202
            self.protection_space = self.custom_hyperopt.protection_space()
1✔
203

204
        if HyperoptTools.has_space(self.config, "buy"):
1✔
205
            logger.debug("Hyperopt has 'buy' space")
1✔
206
            self.buy_space = self.custom_hyperopt.buy_indicator_space()
1✔
207

208
        if HyperoptTools.has_space(self.config, "sell"):
1✔
209
            logger.debug("Hyperopt has 'sell' space")
1✔
210
            self.sell_space = self.custom_hyperopt.sell_indicator_space()
1✔
211

212
        if HyperoptTools.has_space(self.config, "roi"):
1✔
213
            logger.debug("Hyperopt has 'roi' space")
1✔
214
            self.roi_space = self.custom_hyperopt.roi_space()
1✔
215

216
        if HyperoptTools.has_space(self.config, "stoploss"):
1✔
217
            logger.debug("Hyperopt has 'stoploss' space")
1✔
218
            self.stoploss_space = self.custom_hyperopt.stoploss_space()
1✔
219

220
        if HyperoptTools.has_space(self.config, "trailing"):
1✔
221
            logger.debug("Hyperopt has 'trailing' space")
1✔
222
            self.trailing_space = self.custom_hyperopt.trailing_space()
1✔
223

224
        if HyperoptTools.has_space(self.config, "trades"):
1✔
225
            logger.debug("Hyperopt has 'trades' space")
1✔
226
            self.max_open_trades_space = self.custom_hyperopt.max_open_trades_space()
1✔
227

228
        self.dimensions = (
1✔
229
            self.buy_space
230
            + self.sell_space
231
            + self.protection_space
232
            + self.roi_space
233
            + self.stoploss_space
234
            + self.trailing_space
235
            + self.max_open_trades_space
236
        )
237

238
    def assign_params(self, params_dict: dict[str, Any], category: str) -> None:
1✔
239
        """
240
        Assign hyperoptable parameters
241
        """
242
        for attr_name, attr in self.backtesting.strategy.enumerate_parameters(category):
1✔
243
            if attr.optimize:
1✔
244
                # noinspection PyProtectedMember
245
                attr.value = params_dict[attr_name]
1✔
246

247
    def generate_optimizer(self, raw_params: list[Any]) -> dict[str, Any]:
1✔
248
        """
249
        Used Optimize function.
250
        Called once per epoch to optimize whatever is configured.
251
        Keep this function as optimized as possible!
252
        """
253
        HyperoptStateContainer.set_state(HyperoptState.OPTIMIZE)
1✔
254
        backtest_start_time = datetime.now(timezone.utc)
1✔
255
        params_dict = self._get_params_dict(self.dimensions, raw_params)
1✔
256

257
        # Apply parameters
258
        if HyperoptTools.has_space(self.config, "buy"):
1✔
259
            self.assign_params(params_dict, "buy")
1✔
260

261
        if HyperoptTools.has_space(self.config, "sell"):
1✔
262
            self.assign_params(params_dict, "sell")
1✔
263

264
        if HyperoptTools.has_space(self.config, "protection"):
1✔
265
            self.assign_params(params_dict, "protection")
1✔
266

267
        if HyperoptTools.has_space(self.config, "roi"):
1✔
268
            self.backtesting.strategy.minimal_roi = self.custom_hyperopt.generate_roi_table(
1✔
269
                params_dict
270
            )
271

272
        if HyperoptTools.has_space(self.config, "stoploss"):
1✔
273
            self.backtesting.strategy.stoploss = params_dict["stoploss"]
1✔
274

275
        if HyperoptTools.has_space(self.config, "trailing"):
1✔
276
            d = self.custom_hyperopt.generate_trailing_params(params_dict)
1✔
277
            self.backtesting.strategy.trailing_stop = d["trailing_stop"]
1✔
278
            self.backtesting.strategy.trailing_stop_positive = d["trailing_stop_positive"]
1✔
279
            self.backtesting.strategy.trailing_stop_positive_offset = d[
1✔
280
                "trailing_stop_positive_offset"
281
            ]
282
            self.backtesting.strategy.trailing_only_offset_is_reached = d[
1✔
283
                "trailing_only_offset_is_reached"
284
            ]
285

286
        if HyperoptTools.has_space(self.config, "trades"):
1✔
287
            if self.config["stake_amount"] == "unlimited" and (
1✔
288
                params_dict["max_open_trades"] == -1 or params_dict["max_open_trades"] == 0
289
            ):
290
                # Ignore unlimited max open trades if stake amount is unlimited
291
                params_dict.update({"max_open_trades": self.config["max_open_trades"]})
1✔
292

293
            updated_max_open_trades = (
1✔
294
                int(params_dict["max_open_trades"])
295
                if (params_dict["max_open_trades"] != -1 and params_dict["max_open_trades"] != 0)
296
                else float("inf")
297
            )
298

299
            self.config.update({"max_open_trades": updated_max_open_trades})
1✔
300

301
            self.backtesting.strategy.max_open_trades = updated_max_open_trades
1✔
302

303
        with self.data_pickle_file.open("rb") as f:
1✔
304
            processed = load(f, mmap_mode="r")
1✔
305
            if self.analyze_per_epoch:
1✔
306
                # Data is not yet analyzed, rerun populate_indicators.
UNCOV
307
                processed = self.advise_and_trim(processed)
×
308

309
        bt_results = self.backtesting.backtest(
1✔
310
            processed=processed, start_date=self.min_date, end_date=self.max_date
311
        )
312
        backtest_end_time = datetime.now(timezone.utc)
1✔
313
        bt_results.update(
1✔
314
            {
315
                "backtest_start_time": int(backtest_start_time.timestamp()),
316
                "backtest_end_time": int(backtest_end_time.timestamp()),
317
            }
318
        )
319

320
        return self._get_results_dict(
1✔
321
            bt_results, self.min_date, self.max_date, params_dict, processed=processed
322
        )
323

324
    def _get_results_dict(
1✔
325
        self,
326
        backtesting_results: dict[str, Any],
327
        min_date: datetime,
328
        max_date: datetime,
329
        params_dict: dict[str, Any],
330
        processed: dict[str, DataFrame],
331
    ) -> dict[str, Any]:
332
        params_details = self._get_params_details(params_dict)
1✔
333

334
        strat_stats = generate_strategy_stats(
1✔
335
            self.pairlist,
336
            self.backtesting.strategy.get_strategy_name(),
337
            backtesting_results,
338
            min_date,
339
            max_date,
340
            market_change=self.market_change,
341
            is_hyperopt=True,
342
        )
343
        results_explanation = HyperoptTools.format_results_explanation_string(
1✔
344
            strat_stats, self.config["stake_currency"]
345
        )
346

347
        not_optimized = self.backtesting.strategy.get_no_optimize_params()
1✔
348
        not_optimized = deep_merge_dicts(not_optimized, self._get_no_optimize_details())
1✔
349

350
        trade_count = strat_stats["total_trades"]
1✔
351
        total_profit = strat_stats["profit_total"]
1✔
352

353
        # If this evaluation contains too short amount of trades to be
354
        # interesting -- consider it as 'bad' (assigned max. loss value)
355
        # in order to cast this hyperspace point away from optimization
356
        # path. We do not want to optimize 'hodl' strategies.
357
        loss: float = MAX_LOSS
1✔
358
        if trade_count >= self.config["hyperopt_min_trades"]:
1✔
359
            loss = self.calculate_loss(
1✔
360
                results=backtesting_results["results"],
361
                trade_count=trade_count,
362
                min_date=min_date,
363
                max_date=max_date,
364
                config=self.config,
365
                processed=processed,
366
                backtest_stats=strat_stats,
367
                starting_balance=get_dry_run_wallet(self.config),
368
            )
369
        return {
1✔
370
            "loss": loss,
371
            "params_dict": params_dict,
372
            "params_details": params_details,
373
            "params_not_optimized": not_optimized,
374
            "results_metrics": strat_stats,
375
            "results_explanation": results_explanation,
376
            "total_profit": total_profit,
377
        }
378

379
    def get_optimizer(
1✔
380
        self,
381
        cpu_count: int,
382
        random_state: int,
383
        initial_points: int,
384
        model_queue_size: int,
385
    ) -> Optimizer:
386
        dimensions = self.dimensions
1✔
387
        estimator = self.custom_hyperopt.generate_estimator(dimensions=dimensions)
1✔
388

389
        acq_optimizer = "sampling"
1✔
390
        if isinstance(estimator, str):
1✔
391
            if estimator not in ("GP", "RF", "ET", "GBRT"):
1✔
392
                raise OperationalException(f"Estimator {estimator} not supported.")
1✔
393
            else:
394
                acq_optimizer = "auto"
1✔
395

396
        logger.info(f"Using estimator {estimator}.")
1✔
397
        return Optimizer(
1✔
398
            dimensions,
399
            base_estimator=estimator,
400
            acq_optimizer=acq_optimizer,
401
            n_initial_points=initial_points,
402
            acq_optimizer_kwargs={"n_jobs": cpu_count},
403
            random_state=random_state,
404
            model_queue_size=model_queue_size,
405
        )
406

407
    def advise_and_trim(self, data: dict[str, DataFrame]) -> dict[str, DataFrame]:
1✔
408
        preprocessed = self.backtesting.strategy.advise_all_indicators(data)
1✔
409

410
        # Trim startup period from analyzed dataframe to get correct dates for output.
411
        # This is only used to keep track of min/max date after trimming.
412
        # The result is NOT returned from this method, actual trimming happens in backtesting.
413
        trimmed = trim_dataframes(preprocessed, self.timerange, self.backtesting.required_startup)
1✔
414
        self.min_date, self.max_date = get_timerange(trimmed)
1✔
415
        if not self.market_change:
1✔
416
            self.market_change = calculate_market_change(trimmed, "close")
1✔
417

418
        # Real trimming will happen as part of backtesting.
419
        return preprocessed
1✔
420

421
    def prepare_hyperopt_data(self) -> None:
1✔
422
        HyperoptStateContainer.set_state(HyperoptState.DATALOAD)
1✔
423
        data, self.timerange = self.backtesting.load_bt_data()
1✔
424
        self.backtesting.load_bt_data_detail()
1✔
425
        logger.info("Dataload complete. Calculating indicators")
1✔
426

427
        if not self.analyze_per_epoch:
1✔
428
            HyperoptStateContainer.set_state(HyperoptState.INDICATORS)
1✔
429

430
            preprocessed = self.advise_and_trim(data)
1✔
431

432
            logger.info(
1✔
433
                f"Hyperopting with data from "
434
                f"{self.min_date.strftime(DATETIME_PRINT_FORMAT)} "
435
                f"up to {self.max_date.strftime(DATETIME_PRINT_FORMAT)} "
436
                f"({(self.max_date - self.min_date).days} days).."
437
            )
438
            # Store non-trimmed data - will be trimmed after signal generation.
439
            dump(preprocessed, self.data_pickle_file)
1✔
440
        else:
441
            dump(data, self.data_pickle_file)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc