• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

freqtrade / freqtrade / 14507242113

02 Dec 2024 07:11PM UTC coverage: 94.422% (+0.05%) from 94.377%
14507242113

push

github

web-flow
Merge pull request #11028 from xzmeng/fix-none

fix: check if days is None before conversion

1 of 1 new or added line in 1 file covered. (100.0%)

525 existing lines in 54 files now uncovered.

21684 of 22965 relevant lines covered (94.42%)

0.94 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

98.56
/freqtrade/optimize/hyperopt/hyperopt_optimizer.py
1
"""
2
This module contains the hyperopt optimizer class, which needs to be pickled
3
and will be sent to the hyperopt worker processes.
4
"""
5

6
import logging
1✔
7
import sys
1✔
8
import warnings
1✔
9
from datetime import datetime, timezone
1✔
10
from typing import Any
1✔
11

12
from joblib import dump, load
1✔
13
from joblib.externals import cloudpickle
1✔
14
from pandas import DataFrame
1✔
15

16
from freqtrade.constants import DATETIME_PRINT_FORMAT, Config
1✔
17
from freqtrade.data.converter import trim_dataframes
1✔
18
from freqtrade.data.history import get_timerange
1✔
19
from freqtrade.data.metrics import calculate_market_change
1✔
20
from freqtrade.enums import HyperoptState
1✔
21
from freqtrade.exceptions import OperationalException
1✔
22
from freqtrade.misc import deep_merge_dicts
1✔
23
from freqtrade.optimize.backtesting import Backtesting
1✔
24

25
# Import IHyperOptLoss to allow unpickling classes from these modules
26
from freqtrade.optimize.hyperopt.hyperopt_auto import HyperOptAuto
1✔
27
from freqtrade.optimize.hyperopt_loss.hyperopt_loss_interface import IHyperOptLoss
1✔
28
from freqtrade.optimize.hyperopt_tools import HyperoptStateContainer, HyperoptTools
1✔
29
from freqtrade.optimize.optimize_reports import generate_strategy_stats
1✔
30
from freqtrade.resolvers.hyperopt_resolver import HyperOptLossResolver
1✔
31

32

33
# Suppress scikit-learn FutureWarnings from skopt
34
with warnings.catch_warnings():
1✔
35
    warnings.filterwarnings("ignore", category=FutureWarning)
1✔
36
    from skopt import Optimizer
1✔
37
    from skopt.space import Dimension
1✔
38

39
logger = logging.getLogger(__name__)
1✔
40

41

42
MAX_LOSS = 100000  # just a big enough number to be bad result in loss optimization
1✔
43

44

45
class HyperOptimizer:
1✔
46
    """
47
    HyperoptOptimizer class
48
    This class is sent to the hyperopt worker processes.
49
    """
50

51
    def __init__(self, config: Config) -> None:
1✔
52
        self.buy_space: list[Dimension] = []
1✔
53
        self.sell_space: list[Dimension] = []
1✔
54
        self.protection_space: list[Dimension] = []
1✔
55
        self.roi_space: list[Dimension] = []
1✔
56
        self.stoploss_space: list[Dimension] = []
1✔
57
        self.trailing_space: list[Dimension] = []
1✔
58
        self.max_open_trades_space: list[Dimension] = []
1✔
59
        self.dimensions: list[Dimension] = []
1✔
60

61
        self.config = config
1✔
62
        self.min_date: datetime
1✔
63
        self.max_date: datetime
1✔
64

65
        self.backtesting = Backtesting(self.config)
1✔
66
        self.pairlist = self.backtesting.pairlists.whitelist
1✔
67
        self.custom_hyperopt: HyperOptAuto
1✔
68
        self.analyze_per_epoch = self.config.get("analyze_per_epoch", False)
1✔
69

70
        if not self.config.get("hyperopt"):
1✔
71
            self.custom_hyperopt = HyperOptAuto(self.config)
1✔
72
        else:
UNCOV
73
            raise OperationalException(
×
74
                "Using separate Hyperopt files has been removed in 2021.9. Please convert "
75
                "your existing Hyperopt file to the new Hyperoptable strategy interface"
76
            )
77

78
        self.backtesting._set_strategy(self.backtesting.strategylist[0])
1✔
79
        self.custom_hyperopt.strategy = self.backtesting.strategy
1✔
80

81
        self.hyperopt_pickle_magic(self.backtesting.strategy.__class__.__bases__)
1✔
82
        self.custom_hyperoptloss: IHyperOptLoss = HyperOptLossResolver.load_hyperoptloss(
1✔
83
            self.config
84
        )
85
        self.calculate_loss = self.custom_hyperoptloss.hyperopt_loss_function
1✔
86

87
        self.data_pickle_file = (
1✔
88
            self.config["user_data_dir"] / "hyperopt_results" / "hyperopt_tickerdata.pkl"
89
        )
90

91
        self.market_change = 0.0
1✔
92

93
        if HyperoptTools.has_space(self.config, "sell"):
1✔
94
            # Make sure use_exit_signal is enabled
95
            self.config["use_exit_signal"] = True
1✔
96

97
    def prepare_hyperopt(self) -> None:
1✔
98
        # Initialize spaces ...
99
        self.init_spaces()
1✔
100

101
        self.prepare_hyperopt_data()
1✔
102

103
        # We don't need exchange instance anymore while running hyperopt
104
        self.backtesting.exchange.close()
1✔
105
        self.backtesting.exchange._api = None
1✔
106
        self.backtesting.exchange._api_async = None
1✔
107
        self.backtesting.exchange.loop = None  # type: ignore
1✔
108
        self.backtesting.exchange._loop_lock = None  # type: ignore
1✔
109
        self.backtesting.exchange._cache_lock = None  # type: ignore
1✔
110
        # self.backtesting.exchange = None  # type: ignore
111
        self.backtesting.pairlists = None  # type: ignore
1✔
112

113
    def get_strategy_name(self) -> str:
1✔
114
        return self.backtesting.strategy.get_strategy_name()
1✔
115

116
    def hyperopt_pickle_magic(self, bases) -> None:
1✔
117
        """
118
        Hyperopt magic to allow strategy inheritance across files.
119
        For this to properly work, we need to register the module of the imported class
120
        to pickle as value.
121
        """
122
        for modules in bases:
1✔
123
            if modules.__name__ != "IStrategy":
1✔
124
                cloudpickle.register_pickle_by_value(sys.modules[modules.__module__])
1✔
125
                self.hyperopt_pickle_magic(modules.__bases__)
1✔
126

127
    def _get_params_dict(
1✔
128
        self, dimensions: list[Dimension], raw_params: list[Any]
129
    ) -> dict[str, Any]:
130
        # Ensure the number of dimensions match
131
        # the number of parameters in the list.
132
        if len(raw_params) != len(dimensions):
1✔
UNCOV
133
            raise ValueError("Mismatch in number of search-space dimensions.")
×
134

135
        # Return a dict where the keys are the names of the dimensions
136
        # and the values are taken from the list of parameters.
137
        return {d.name: v for d, v in zip(dimensions, raw_params, strict=False)}
1✔
138

139
    def _get_params_details(self, params: dict) -> dict:
1✔
140
        """
141
        Return the params for each space
142
        """
143
        result: dict = {}
1✔
144

145
        if HyperoptTools.has_space(self.config, "buy"):
1✔
146
            result["buy"] = {p.name: params.get(p.name) for p in self.buy_space}
1✔
147
        if HyperoptTools.has_space(self.config, "sell"):
1✔
148
            result["sell"] = {p.name: params.get(p.name) for p in self.sell_space}
1✔
149
        if HyperoptTools.has_space(self.config, "protection"):
1✔
150
            result["protection"] = {p.name: params.get(p.name) for p in self.protection_space}
1✔
151
        if HyperoptTools.has_space(self.config, "roi"):
1✔
152
            result["roi"] = {
1✔
153
                str(k): v for k, v in self.custom_hyperopt.generate_roi_table(params).items()
154
            }
155
        if HyperoptTools.has_space(self.config, "stoploss"):
1✔
156
            result["stoploss"] = {p.name: params.get(p.name) for p in self.stoploss_space}
1✔
157
        if HyperoptTools.has_space(self.config, "trailing"):
1✔
158
            result["trailing"] = self.custom_hyperopt.generate_trailing_params(params)
1✔
159
        if HyperoptTools.has_space(self.config, "trades"):
1✔
160
            result["max_open_trades"] = {
1✔
161
                "max_open_trades": (
162
                    self.backtesting.strategy.max_open_trades
163
                    if self.backtesting.strategy.max_open_trades != float("inf")
164
                    else -1
165
                )
166
            }
167

168
        return result
1✔
169

170
    def _get_no_optimize_details(self) -> dict[str, Any]:
1✔
171
        """
172
        Get non-optimized parameters
173
        """
174
        result: dict[str, Any] = {}
1✔
175
        strategy = self.backtesting.strategy
1✔
176
        if not HyperoptTools.has_space(self.config, "roi"):
1✔
177
            result["roi"] = {str(k): v for k, v in strategy.minimal_roi.items()}
1✔
178
        if not HyperoptTools.has_space(self.config, "stoploss"):
1✔
179
            result["stoploss"] = {"stoploss": strategy.stoploss}
1✔
180
        if not HyperoptTools.has_space(self.config, "trailing"):
1✔
181
            result["trailing"] = {
1✔
182
                "trailing_stop": strategy.trailing_stop,
183
                "trailing_stop_positive": strategy.trailing_stop_positive,
184
                "trailing_stop_positive_offset": strategy.trailing_stop_positive_offset,
185
                "trailing_only_offset_is_reached": strategy.trailing_only_offset_is_reached,
186
            }
187
        if not HyperoptTools.has_space(self.config, "trades"):
1✔
188
            result["max_open_trades"] = {"max_open_trades": strategy.max_open_trades}
1✔
189
        return result
1✔
190

191
    def init_spaces(self):
1✔
192
        """
193
        Assign the dimensions in the hyperoptimization space.
194
        """
195
        if HyperoptTools.has_space(self.config, "protection"):
1✔
196
            # Protections can only be optimized when using the Parameter interface
197
            logger.debug("Hyperopt has 'protection' space")
1✔
198
            # Enable Protections if protection space is selected.
199
            self.config["enable_protections"] = True
1✔
200
            self.backtesting.enable_protections = True
1✔
201
            self.protection_space = self.custom_hyperopt.protection_space()
1✔
202

203
        if HyperoptTools.has_space(self.config, "buy"):
1✔
204
            logger.debug("Hyperopt has 'buy' space")
1✔
205
            self.buy_space = self.custom_hyperopt.buy_indicator_space()
1✔
206

207
        if HyperoptTools.has_space(self.config, "sell"):
1✔
208
            logger.debug("Hyperopt has 'sell' space")
1✔
209
            self.sell_space = self.custom_hyperopt.sell_indicator_space()
1✔
210

211
        if HyperoptTools.has_space(self.config, "roi"):
1✔
212
            logger.debug("Hyperopt has 'roi' space")
1✔
213
            self.roi_space = self.custom_hyperopt.roi_space()
1✔
214

215
        if HyperoptTools.has_space(self.config, "stoploss"):
1✔
216
            logger.debug("Hyperopt has 'stoploss' space")
1✔
217
            self.stoploss_space = self.custom_hyperopt.stoploss_space()
1✔
218

219
        if HyperoptTools.has_space(self.config, "trailing"):
1✔
220
            logger.debug("Hyperopt has 'trailing' space")
1✔
221
            self.trailing_space = self.custom_hyperopt.trailing_space()
1✔
222

223
        if HyperoptTools.has_space(self.config, "trades"):
1✔
224
            logger.debug("Hyperopt has 'trades' space")
1✔
225
            self.max_open_trades_space = self.custom_hyperopt.max_open_trades_space()
1✔
226

227
        self.dimensions = (
1✔
228
            self.buy_space
229
            + self.sell_space
230
            + self.protection_space
231
            + self.roi_space
232
            + self.stoploss_space
233
            + self.trailing_space
234
            + self.max_open_trades_space
235
        )
236

237
    def assign_params(self, params_dict: dict[str, Any], category: str) -> None:
1✔
238
        """
239
        Assign hyperoptable parameters
240
        """
241
        for attr_name, attr in self.backtesting.strategy.enumerate_parameters(category):
1✔
242
            if attr.optimize:
1✔
243
                # noinspection PyProtectedMember
244
                attr.value = params_dict[attr_name]
1✔
245

246
    def generate_optimizer(self, raw_params: list[Any]) -> dict[str, Any]:
1✔
247
        """
248
        Used Optimize function.
249
        Called once per epoch to optimize whatever is configured.
250
        Keep this function as optimized as possible!
251
        """
252
        HyperoptStateContainer.set_state(HyperoptState.OPTIMIZE)
1✔
253
        backtest_start_time = datetime.now(timezone.utc)
1✔
254
        params_dict = self._get_params_dict(self.dimensions, raw_params)
1✔
255

256
        # Apply parameters
257
        if HyperoptTools.has_space(self.config, "buy"):
1✔
258
            self.assign_params(params_dict, "buy")
1✔
259

260
        if HyperoptTools.has_space(self.config, "sell"):
1✔
261
            self.assign_params(params_dict, "sell")
1✔
262

263
        if HyperoptTools.has_space(self.config, "protection"):
1✔
264
            self.assign_params(params_dict, "protection")
1✔
265

266
        if HyperoptTools.has_space(self.config, "roi"):
1✔
267
            self.backtesting.strategy.minimal_roi = self.custom_hyperopt.generate_roi_table(
1✔
268
                params_dict
269
            )
270

271
        if HyperoptTools.has_space(self.config, "stoploss"):
1✔
272
            self.backtesting.strategy.stoploss = params_dict["stoploss"]
1✔
273

274
        if HyperoptTools.has_space(self.config, "trailing"):
1✔
275
            d = self.custom_hyperopt.generate_trailing_params(params_dict)
1✔
276
            self.backtesting.strategy.trailing_stop = d["trailing_stop"]
1✔
277
            self.backtesting.strategy.trailing_stop_positive = d["trailing_stop_positive"]
1✔
278
            self.backtesting.strategy.trailing_stop_positive_offset = d[
1✔
279
                "trailing_stop_positive_offset"
280
            ]
281
            self.backtesting.strategy.trailing_only_offset_is_reached = d[
1✔
282
                "trailing_only_offset_is_reached"
283
            ]
284

285
        if HyperoptTools.has_space(self.config, "trades"):
1✔
286
            if self.config["stake_amount"] == "unlimited" and (
1✔
287
                params_dict["max_open_trades"] == -1 or params_dict["max_open_trades"] == 0
288
            ):
289
                # Ignore unlimited max open trades if stake amount is unlimited
290
                params_dict.update({"max_open_trades": self.config["max_open_trades"]})
1✔
291

292
            updated_max_open_trades = (
1✔
293
                int(params_dict["max_open_trades"])
294
                if (params_dict["max_open_trades"] != -1 and params_dict["max_open_trades"] != 0)
295
                else float("inf")
296
            )
297

298
            self.config.update({"max_open_trades": updated_max_open_trades})
1✔
299

300
            self.backtesting.strategy.max_open_trades = updated_max_open_trades
1✔
301

302
        with self.data_pickle_file.open("rb") as f:
1✔
303
            processed = load(f, mmap_mode="r")
1✔
304
            if self.analyze_per_epoch:
1✔
305
                # Data is not yet analyzed, rerun populate_indicators.
UNCOV
306
                processed = self.advise_and_trim(processed)
×
307

308
        bt_results = self.backtesting.backtest(
1✔
309
            processed=processed, start_date=self.min_date, end_date=self.max_date
310
        )
311
        backtest_end_time = datetime.now(timezone.utc)
1✔
312
        bt_results.update(
1✔
313
            {
314
                "backtest_start_time": int(backtest_start_time.timestamp()),
315
                "backtest_end_time": int(backtest_end_time.timestamp()),
316
            }
317
        )
318

319
        return self._get_results_dict(
1✔
320
            bt_results, self.min_date, self.max_date, params_dict, processed=processed
321
        )
322

323
    def _get_results_dict(
1✔
324
        self,
325
        backtesting_results: dict[str, Any],
326
        min_date: datetime,
327
        max_date: datetime,
328
        params_dict: dict[str, Any],
329
        processed: dict[str, DataFrame],
330
    ) -> dict[str, Any]:
331
        params_details = self._get_params_details(params_dict)
1✔
332

333
        strat_stats = generate_strategy_stats(
1✔
334
            self.pairlist,
335
            self.backtesting.strategy.get_strategy_name(),
336
            backtesting_results,
337
            min_date,
338
            max_date,
339
            market_change=self.market_change,
340
            is_hyperopt=True,
341
        )
342
        results_explanation = HyperoptTools.format_results_explanation_string(
1✔
343
            strat_stats, self.config["stake_currency"]
344
        )
345

346
        not_optimized = self.backtesting.strategy.get_no_optimize_params()
1✔
347
        not_optimized = deep_merge_dicts(not_optimized, self._get_no_optimize_details())
1✔
348

349
        trade_count = strat_stats["total_trades"]
1✔
350
        total_profit = strat_stats["profit_total"]
1✔
351

352
        # If this evaluation contains too short amount of trades to be
353
        # interesting -- consider it as 'bad' (assigned max. loss value)
354
        # in order to cast this hyperspace point away from optimization
355
        # path. We do not want to optimize 'hodl' strategies.
356
        loss: float = MAX_LOSS
1✔
357
        if trade_count >= self.config["hyperopt_min_trades"]:
1✔
358
            loss = self.calculate_loss(
1✔
359
                results=backtesting_results["results"],
360
                trade_count=trade_count,
361
                min_date=min_date,
362
                max_date=max_date,
363
                config=self.config,
364
                processed=processed,
365
                backtest_stats=strat_stats,
366
            )
367
        return {
1✔
368
            "loss": loss,
369
            "params_dict": params_dict,
370
            "params_details": params_details,
371
            "params_not_optimized": not_optimized,
372
            "results_metrics": strat_stats,
373
            "results_explanation": results_explanation,
374
            "total_profit": total_profit,
375
        }
376

377
    def get_optimizer(
1✔
378
        self,
379
        cpu_count: int,
380
        random_state: int,
381
        initial_points: int,
382
        model_queue_size: int,
383
    ) -> Optimizer:
384
        dimensions = self.dimensions
1✔
385
        estimator = self.custom_hyperopt.generate_estimator(dimensions=dimensions)
1✔
386

387
        acq_optimizer = "sampling"
1✔
388
        if isinstance(estimator, str):
1✔
389
            if estimator not in ("GP", "RF", "ET", "GBRT"):
1✔
390
                raise OperationalException(f"Estimator {estimator} not supported.")
1✔
391
            else:
392
                acq_optimizer = "auto"
1✔
393

394
        logger.info(f"Using estimator {estimator}.")
1✔
395
        return Optimizer(
1✔
396
            dimensions,
397
            base_estimator=estimator,
398
            acq_optimizer=acq_optimizer,
399
            n_initial_points=initial_points,
400
            acq_optimizer_kwargs={"n_jobs": cpu_count},
401
            random_state=random_state,
402
            model_queue_size=model_queue_size,
403
        )
404

405
    def advise_and_trim(self, data: dict[str, DataFrame]) -> dict[str, DataFrame]:
1✔
406
        preprocessed = self.backtesting.strategy.advise_all_indicators(data)
1✔
407

408
        # Trim startup period from analyzed dataframe to get correct dates for output.
409
        # This is only used to keep track of min/max date after trimming.
410
        # The result is NOT returned from this method, actual trimming happens in backtesting.
411
        trimmed = trim_dataframes(preprocessed, self.timerange, self.backtesting.required_startup)
1✔
412
        self.min_date, self.max_date = get_timerange(trimmed)
1✔
413
        if not self.market_change:
1✔
414
            self.market_change = calculate_market_change(trimmed, "close")
1✔
415

416
        # Real trimming will happen as part of backtesting.
417
        return preprocessed
1✔
418

419
    def prepare_hyperopt_data(self) -> None:
1✔
420
        HyperoptStateContainer.set_state(HyperoptState.DATALOAD)
1✔
421
        data, self.timerange = self.backtesting.load_bt_data()
1✔
422
        self.backtesting.load_bt_data_detail()
1✔
423
        logger.info("Dataload complete. Calculating indicators")
1✔
424

425
        if not self.analyze_per_epoch:
1✔
426
            HyperoptStateContainer.set_state(HyperoptState.INDICATORS)
1✔
427

428
            preprocessed = self.advise_and_trim(data)
1✔
429

430
            logger.info(
1✔
431
                f"Hyperopting with data from "
432
                f"{self.min_date.strftime(DATETIME_PRINT_FORMAT)} "
433
                f"up to {self.max_date.strftime(DATETIME_PRINT_FORMAT)} "
434
                f"({(self.max_date - self.min_date).days} days).."
435
            )
436
            # Store non-trimmed data - will be trimmed after signal generation.
437
            dump(preprocessed, self.data_pickle_file)
1✔
438
        else:
439
            dump(data, self.data_pickle_file)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc