• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

freqtrade / freqtrade / 4131164979

pending completion
4131164979

push

github-actions

Matthias
filled-date shouldn't update again

1 of 1 new or added line in 1 file covered. (100.0%)

17024 of 17946 relevant lines covered (94.86%)

0.95 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

96.22
/freqtrade/optimize/hyperopt.py
1
# pragma pylint: disable=too-many-instance-attributes, pointless-string-statement
2

3
"""
1✔
4
This module contains the hyperopt logic
5
"""
6

7
import logging
1✔
8
import random
1✔
9
import sys
1✔
10
import warnings
1✔
11
from datetime import datetime, timezone
1✔
12
from math import ceil
1✔
13
from pathlib import Path
1✔
14
from typing import Any, Dict, List, Optional, Tuple
1✔
15

16
import progressbar
1✔
17
import rapidjson
1✔
18
from colorama import Fore, Style
1✔
19
from colorama import init as colorama_init
1✔
20
from joblib import Parallel, cpu_count, delayed, dump, load, wrap_non_picklable_objects
1✔
21
from joblib.externals import cloudpickle
1✔
22
from pandas import DataFrame
1✔
23

24
from freqtrade.constants import DATETIME_PRINT_FORMAT, FTHYPT_FILEVERSION, LAST_BT_RESULT_FN, Config
1✔
25
from freqtrade.data.converter import trim_dataframes
1✔
26
from freqtrade.data.history import get_timerange
1✔
27
from freqtrade.data.metrics import calculate_market_change
1✔
28
from freqtrade.enums import HyperoptState
1✔
29
from freqtrade.exceptions import OperationalException
1✔
30
from freqtrade.misc import deep_merge_dicts, file_dump_json, plural
1✔
31
from freqtrade.optimize.backtesting import Backtesting
1✔
32
# Import IHyperOpt and IHyperOptLoss to allow unpickling classes from these modules
33
from freqtrade.optimize.hyperopt_auto import HyperOptAuto
1✔
34
from freqtrade.optimize.hyperopt_loss_interface import IHyperOptLoss
1✔
35
from freqtrade.optimize.hyperopt_tools import (HyperoptStateContainer, HyperoptTools,
1✔
36
                                               hyperopt_serializer)
37
from freqtrade.optimize.optimize_reports import generate_strategy_stats
1✔
38
from freqtrade.resolvers.hyperopt_resolver import HyperOptLossResolver
1✔
39

40

41
# Suppress scikit-learn FutureWarnings from skopt
42
with warnings.catch_warnings():
1✔
43
    warnings.filterwarnings("ignore", category=FutureWarning)
1✔
44
    from skopt import Optimizer
1✔
45
    from skopt.space import Dimension
1✔
46

47
progressbar.streams.wrap_stderr()
1✔
48
progressbar.streams.wrap_stdout()
1✔
49
logger = logging.getLogger(__name__)
1✔
50

51

52
INITIAL_POINTS = 30
1✔
53

54
# Keep no more than SKOPT_MODEL_QUEUE_SIZE models
55
# in the skopt model queue, to optimize memory consumption
56
SKOPT_MODEL_QUEUE_SIZE = 10
1✔
57

58
MAX_LOSS = 100000  # just a big enough number to be bad result in loss optimization
1✔
59

60

61
class Hyperopt:
1✔
62
    """
63
    Hyperopt class, this class contains all the logic to run a hyperopt simulation
64

65
    To start a hyperopt run:
66
    hyperopt = Hyperopt(config)
67
    hyperopt.start()
68
    """
69

70
    def __init__(self, config: Config) -> None:
1✔
71
        self.buy_space: List[Dimension] = []
1✔
72
        self.sell_space: List[Dimension] = []
1✔
73
        self.protection_space: List[Dimension] = []
1✔
74
        self.roi_space: List[Dimension] = []
1✔
75
        self.stoploss_space: List[Dimension] = []
1✔
76
        self.trailing_space: List[Dimension] = []
1✔
77
        self.max_open_trades_space: List[Dimension] = []
1✔
78
        self.dimensions: List[Dimension] = []
1✔
79

80
        self.config = config
1✔
81
        self.min_date: datetime
1✔
82
        self.max_date: datetime
1✔
83

84
        self.backtesting = Backtesting(self.config)
1✔
85
        self.pairlist = self.backtesting.pairlists.whitelist
1✔
86
        self.custom_hyperopt: HyperOptAuto
1✔
87
        self.analyze_per_epoch = self.config.get('analyze_per_epoch', False)
1✔
88
        HyperoptStateContainer.set_state(HyperoptState.STARTUP)
1✔
89

90
        if not self.config.get('hyperopt'):
1✔
91
            self.custom_hyperopt = HyperOptAuto(self.config)
1✔
92
        else:
93
            raise OperationalException(
1✔
94
                "Using separate Hyperopt files has been removed in 2021.9. Please convert "
95
                "your existing Hyperopt file to the new Hyperoptable strategy interface")
96

97
        self.backtesting._set_strategy(self.backtesting.strategylist[0])
1✔
98
        self.custom_hyperopt.strategy = self.backtesting.strategy
1✔
99

100
        self.hyperopt_pickle_magic(self.backtesting.strategy.__class__.__bases__)
1✔
101
        self.custom_hyperoptloss: IHyperOptLoss = HyperOptLossResolver.load_hyperoptloss(
1✔
102
            self.config)
103
        self.calculate_loss = self.custom_hyperoptloss.hyperopt_loss_function
1✔
104
        time_now = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
1✔
105
        strategy = str(self.config['strategy'])
1✔
106
        self.results_file: Path = (self.config['user_data_dir'] / 'hyperopt_results' /
1✔
107
                                   f'strategy_{strategy}_{time_now}.fthypt')
108
        self.data_pickle_file = (self.config['user_data_dir'] /
1✔
109
                                 'hyperopt_results' / 'hyperopt_tickerdata.pkl')
110
        self.total_epochs = config.get('epochs', 0)
1✔
111

112
        self.current_best_loss = 100
1✔
113

114
        self.clean_hyperopt()
1✔
115

116
        self.market_change = 0.0
1✔
117
        self.num_epochs_saved = 0
1✔
118
        self.current_best_epoch: Optional[Dict[str, Any]] = None
1✔
119

120
        # Use max_open_trades for hyperopt as well, except --disable-max-market-positions is set
121
        if not self.config.get('use_max_market_positions', True):
1✔
122
            logger.debug('Ignoring max_open_trades (--disable-max-market-positions was used) ...')
×
123
            self.backtesting.strategy.max_open_trades = float('inf')
×
124
            config.update({'max_open_trades': self.backtesting.strategy.max_open_trades})
×
125

126
        if HyperoptTools.has_space(self.config, 'sell'):
1✔
127
            # Make sure use_exit_signal is enabled
128
            self.config['use_exit_signal'] = True
1✔
129

130
        self.print_all = self.config.get('print_all', False)
1✔
131
        self.hyperopt_table_header = 0
1✔
132
        self.print_colorized = self.config.get('print_colorized', False)
1✔
133
        self.print_json = self.config.get('print_json', False)
1✔
134

135
    @staticmethod
1✔
136
    def get_lock_filename(config: Config) -> str:
1✔
137

138
        return str(config['user_data_dir'] / 'hyperopt.lock')
1✔
139

140
    def clean_hyperopt(self) -> None:
1✔
141
        """
142
        Remove hyperopt pickle files to restart hyperopt.
143
        """
144
        for f in [self.data_pickle_file, self.results_file]:
1✔
145
            p = Path(f)
1✔
146
            if p.is_file():
1✔
147
                logger.info(f"Removing `{p}`.")
1✔
148
                p.unlink()
1✔
149

150
    def hyperopt_pickle_magic(self, bases) -> None:
1✔
151
        """
152
        Hyperopt magic to allow strategy inheritance across files.
153
        For this to properly work, we need to register the module of the imported class
154
        to pickle as value.
155
        """
156
        for modules in bases:
1✔
157
            if modules.__name__ != 'IStrategy':
1✔
158
                cloudpickle.register_pickle_by_value(sys.modules[modules.__module__])
1✔
159
                self.hyperopt_pickle_magic(modules.__bases__)
1✔
160

161
    def _get_params_dict(self, dimensions: List[Dimension], raw_params: List[Any]) -> Dict:
1✔
162

163
        # Ensure the number of dimensions match
164
        # the number of parameters in the list.
165
        if len(raw_params) != len(dimensions):
1✔
166
            raise ValueError('Mismatch in number of search-space dimensions.')
×
167

168
        # Return a dict where the keys are the names of the dimensions
169
        # and the values are taken from the list of parameters.
170
        return {d.name: v for d, v in zip(dimensions, raw_params)}
1✔
171

172
    def _save_result(self, epoch: Dict) -> None:
1✔
173
        """
174
        Save hyperopt results to file
175
        Store one line per epoch.
176
        While not a valid json object - this allows appending easily.
177
        :param epoch: result dictionary for this epoch.
178
        """
179
        epoch[FTHYPT_FILEVERSION] = 2
1✔
180
        with self.results_file.open('a') as f:
1✔
181
            rapidjson.dump(epoch, f, default=hyperopt_serializer,
1✔
182
                           number_mode=rapidjson.NM_NATIVE | rapidjson.NM_NAN)
183
            f.write("\n")
1✔
184

185
        self.num_epochs_saved += 1
1✔
186
        logger.debug(f"{self.num_epochs_saved} {plural(self.num_epochs_saved, 'epoch')} "
1✔
187
                     f"saved to '{self.results_file}'.")
188
        # Store hyperopt filename
189
        latest_filename = Path.joinpath(self.results_file.parent, LAST_BT_RESULT_FN)
1✔
190
        file_dump_json(latest_filename, {'latest_hyperopt': str(self.results_file.name)},
1✔
191
                       log=False)
192

193
    def _get_params_details(self, params: Dict) -> Dict:
1✔
194
        """
195
        Return the params for each space
196
        """
197
        result: Dict = {}
1✔
198

199
        if HyperoptTools.has_space(self.config, 'buy'):
1✔
200
            result['buy'] = {p.name: params.get(p.name) for p in self.buy_space}
1✔
201
        if HyperoptTools.has_space(self.config, 'sell'):
1✔
202
            result['sell'] = {p.name: params.get(p.name) for p in self.sell_space}
1✔
203
        if HyperoptTools.has_space(self.config, 'protection'):
1✔
204
            result['protection'] = {p.name: params.get(p.name) for p in self.protection_space}
1✔
205
        if HyperoptTools.has_space(self.config, 'roi'):
1✔
206
            result['roi'] = {str(k): v for k, v in
1✔
207
                             self.custom_hyperopt.generate_roi_table(params).items()}
208
        if HyperoptTools.has_space(self.config, 'stoploss'):
1✔
209
            result['stoploss'] = {p.name: params.get(p.name) for p in self.stoploss_space}
1✔
210
        if HyperoptTools.has_space(self.config, 'trailing'):
1✔
211
            result['trailing'] = self.custom_hyperopt.generate_trailing_params(params)
1✔
212
        if HyperoptTools.has_space(self.config, 'trades'):
1✔
213
            result['max_open_trades'] = {
1✔
214
                'max_open_trades': self.backtesting.strategy.max_open_trades
215
                if self.backtesting.strategy.max_open_trades != float('inf') else -1}
216

217
        return result
1✔
218

219
    def _get_no_optimize_details(self) -> Dict[str, Any]:
1✔
220
        """
221
        Get non-optimized parameters
222
        """
223
        result: Dict[str, Any] = {}
1✔
224
        strategy = self.backtesting.strategy
1✔
225
        if not HyperoptTools.has_space(self.config, 'roi'):
1✔
226
            result['roi'] = {str(k): v for k, v in strategy.minimal_roi.items()}
1✔
227
        if not HyperoptTools.has_space(self.config, 'stoploss'):
1✔
228
            result['stoploss'] = {'stoploss': strategy.stoploss}
1✔
229
        if not HyperoptTools.has_space(self.config, 'trailing'):
1✔
230
            result['trailing'] = {
1✔
231
                'trailing_stop': strategy.trailing_stop,
232
                'trailing_stop_positive': strategy.trailing_stop_positive,
233
                'trailing_stop_positive_offset': strategy.trailing_stop_positive_offset,
234
                'trailing_only_offset_is_reached': strategy.trailing_only_offset_is_reached,
235
            }
236
        if not HyperoptTools.has_space(self.config, 'trades'):
1✔
237
            result['max_open_trades'] = {'max_open_trades': strategy.max_open_trades}
1✔
238
        return result
1✔
239

240
    def print_results(self, results) -> None:
1✔
241
        """
242
        Log results if it is better than any previous evaluation
243
        TODO: this should be moved to HyperoptTools too
244
        """
245
        is_best = results['is_best']
1✔
246

247
        if self.print_all or is_best:
1✔
248
            print(
1✔
249
                HyperoptTools.get_result_table(
250
                    self.config, results, self.total_epochs,
251
                    self.print_all, self.print_colorized,
252
                    self.hyperopt_table_header
253
                )
254
            )
255
            self.hyperopt_table_header = 2
1✔
256

257
    def init_spaces(self):
1✔
258
        """
259
        Assign the dimensions in the hyperoptimization space.
260
        """
261
        if HyperoptTools.has_space(self.config, 'protection'):
1✔
262
            # Protections can only be optimized when using the Parameter interface
263
            logger.debug("Hyperopt has 'protection' space")
1✔
264
            # Enable Protections if protection space is selected.
265
            self.config['enable_protections'] = True
1✔
266
            self.backtesting.enable_protections = True
1✔
267
            self.protection_space = self.custom_hyperopt.protection_space()
1✔
268

269
        if HyperoptTools.has_space(self.config, 'buy'):
1✔
270
            logger.debug("Hyperopt has 'buy' space")
1✔
271
            self.buy_space = self.custom_hyperopt.buy_indicator_space()
1✔
272

273
        if HyperoptTools.has_space(self.config, 'sell'):
1✔
274
            logger.debug("Hyperopt has 'sell' space")
1✔
275
            self.sell_space = self.custom_hyperopt.sell_indicator_space()
1✔
276

277
        if HyperoptTools.has_space(self.config, 'roi'):
1✔
278
            logger.debug("Hyperopt has 'roi' space")
1✔
279
            self.roi_space = self.custom_hyperopt.roi_space()
1✔
280

281
        if HyperoptTools.has_space(self.config, 'stoploss'):
1✔
282
            logger.debug("Hyperopt has 'stoploss' space")
1✔
283
            self.stoploss_space = self.custom_hyperopt.stoploss_space()
1✔
284

285
        if HyperoptTools.has_space(self.config, 'trailing'):
1✔
286
            logger.debug("Hyperopt has 'trailing' space")
1✔
287
            self.trailing_space = self.custom_hyperopt.trailing_space()
1✔
288

289
        if HyperoptTools.has_space(self.config, 'trades'):
1✔
290
            logger.debug("Hyperopt has 'trades' space")
1✔
291
            self.max_open_trades_space = self.custom_hyperopt.max_open_trades_space()
1✔
292

293
        self.dimensions = (self.buy_space + self.sell_space + self.protection_space
1✔
294
                           + self.roi_space + self.stoploss_space + self.trailing_space
295
                           + self.max_open_trades_space)
296

297
    def assign_params(self, params_dict: Dict, category: str) -> None:
1✔
298
        """
299
        Assign hyperoptable parameters
300
        """
301
        for attr_name, attr in self.backtesting.strategy.enumerate_parameters(category):
1✔
302
            if attr.optimize:
1✔
303
                # noinspection PyProtectedMember
304
                attr.value = params_dict[attr_name]
1✔
305

306
    def generate_optimizer(self, raw_params: List[Any]) -> Dict[str, Any]:
1✔
307
        """
308
        Used Optimize function.
309
        Called once per epoch to optimize whatever is configured.
310
        Keep this function as optimized as possible!
311
        """
312
        HyperoptStateContainer.set_state(HyperoptState.OPTIMIZE)
1✔
313
        backtest_start_time = datetime.now(timezone.utc)
1✔
314
        params_dict = self._get_params_dict(self.dimensions, raw_params)
1✔
315

316
        # Apply parameters
317
        if HyperoptTools.has_space(self.config, 'buy'):
1✔
318
            self.assign_params(params_dict, 'buy')
1✔
319

320
        if HyperoptTools.has_space(self.config, 'sell'):
1✔
321
            self.assign_params(params_dict, 'sell')
1✔
322

323
        if HyperoptTools.has_space(self.config, 'protection'):
1✔
324
            self.assign_params(params_dict, 'protection')
1✔
325

326
        if HyperoptTools.has_space(self.config, 'roi'):
1✔
327
            self.backtesting.strategy.minimal_roi = (
1✔
328
                self.custom_hyperopt.generate_roi_table(params_dict))
329

330
        if HyperoptTools.has_space(self.config, 'stoploss'):
1✔
331
            self.backtesting.strategy.stoploss = params_dict['stoploss']
1✔
332

333
        if HyperoptTools.has_space(self.config, 'trailing'):
1✔
334
            d = self.custom_hyperopt.generate_trailing_params(params_dict)
1✔
335
            self.backtesting.strategy.trailing_stop = d['trailing_stop']
1✔
336
            self.backtesting.strategy.trailing_stop_positive = d['trailing_stop_positive']
1✔
337
            self.backtesting.strategy.trailing_stop_positive_offset = \
1✔
338
                d['trailing_stop_positive_offset']
339
            self.backtesting.strategy.trailing_only_offset_is_reached = \
1✔
340
                d['trailing_only_offset_is_reached']
341

342
        if HyperoptTools.has_space(self.config, 'trades'):
1✔
343
            if self.config["stake_amount"] == "unlimited" and \
1✔
344
                    (params_dict['max_open_trades'] == -1 or params_dict['max_open_trades'] == 0):
345
                # Ignore unlimited max open trades if stake amount is unlimited
346
                params_dict.update({'max_open_trades': self.config['max_open_trades']})
1✔
347

348
            updated_max_open_trades = int(params_dict['max_open_trades']) \
1✔
349
                if (params_dict['max_open_trades'] != -1
350
                    and params_dict['max_open_trades'] != 0) else float('inf')
351

352
            self.config.update({'max_open_trades': updated_max_open_trades})
1✔
353

354
            self.backtesting.strategy.max_open_trades = updated_max_open_trades
1✔
355

356
        with self.data_pickle_file.open('rb') as f:
1✔
357
            processed = load(f, mmap_mode='r')
1✔
358
            if self.analyze_per_epoch:
1✔
359
                # Data is not yet analyzed, rerun populate_indicators.
360
                processed = self.advise_and_trim(processed)
×
361

362
        bt_results = self.backtesting.backtest(
1✔
363
            processed=processed,
364
            start_date=self.min_date,
365
            end_date=self.max_date
366
        )
367
        backtest_end_time = datetime.now(timezone.utc)
1✔
368
        bt_results.update({
1✔
369
            'backtest_start_time': int(backtest_start_time.timestamp()),
370
            'backtest_end_time': int(backtest_end_time.timestamp()),
371
        })
372

373
        return self._get_results_dict(bt_results, self.min_date, self.max_date,
1✔
374
                                      params_dict,
375
                                      processed=processed)
376

377
    def _get_results_dict(self, backtesting_results, min_date, max_date,
1✔
378
                          params_dict, processed: Dict[str, DataFrame]
379
                          ) -> Dict[str, Any]:
380
        params_details = self._get_params_details(params_dict)
1✔
381

382
        strat_stats = generate_strategy_stats(
1✔
383
            self.pairlist, self.backtesting.strategy.get_strategy_name(),
384
            backtesting_results, min_date, max_date, market_change=self.market_change
385
        )
386
        results_explanation = HyperoptTools.format_results_explanation_string(
1✔
387
            strat_stats, self.config['stake_currency'])
388

389
        not_optimized = self.backtesting.strategy.get_no_optimize_params()
1✔
390
        not_optimized = deep_merge_dicts(not_optimized, self._get_no_optimize_details())
1✔
391

392
        trade_count = strat_stats['total_trades']
1✔
393
        total_profit = strat_stats['profit_total']
1✔
394

395
        # If this evaluation contains too short amount of trades to be
396
        # interesting -- consider it as 'bad' (assigned max. loss value)
397
        # in order to cast this hyperspace point away from optimization
398
        # path. We do not want to optimize 'hodl' strategies.
399
        loss: float = MAX_LOSS
1✔
400
        if trade_count >= self.config['hyperopt_min_trades']:
1✔
401
            loss = self.calculate_loss(results=backtesting_results['results'],
1✔
402
                                       trade_count=trade_count,
403
                                       min_date=min_date, max_date=max_date,
404
                                       config=self.config, processed=processed,
405
                                       backtest_stats=strat_stats)
406
        return {
1✔
407
            'loss': loss,
408
            'params_dict': params_dict,
409
            'params_details': params_details,
410
            'params_not_optimized': not_optimized,
411
            'results_metrics': strat_stats,
412
            'results_explanation': results_explanation,
413
            'total_profit': total_profit,
414
        }
415

416
    def get_optimizer(self, dimensions: List[Dimension], cpu_count) -> Optimizer:
1✔
417
        estimator = self.custom_hyperopt.generate_estimator(dimensions=dimensions)
1✔
418

419
        acq_optimizer = "sampling"
1✔
420
        if isinstance(estimator, str):
1✔
421
            if estimator not in ("GP", "RF", "ET", "GBRT"):
1✔
422
                raise OperationalException(f"Estimator {estimator} not supported.")
1✔
423
            else:
424
                acq_optimizer = "auto"
1✔
425

426
        logger.info(f"Using estimator {estimator}.")
1✔
427
        return Optimizer(
1✔
428
            dimensions,
429
            base_estimator=estimator,
430
            acq_optimizer=acq_optimizer,
431
            n_initial_points=INITIAL_POINTS,
432
            acq_optimizer_kwargs={'n_jobs': cpu_count},
433
            random_state=self.random_state,
434
            model_queue_size=SKOPT_MODEL_QUEUE_SIZE,
435
        )
436

437
    def run_optimizer_parallel(
1✔
438
            self, parallel: Parallel, asked: List[List]) -> List[Dict[str, Any]]:
439
        """ Start optimizer in a parallel way """
440
        return parallel(delayed(
1✔
441
                        wrap_non_picklable_objects(self.generate_optimizer))(v) for v in asked)
442

443
    def _set_random_state(self, random_state: Optional[int]) -> int:
1✔
444
        return random_state or random.randint(1, 2**16 - 1)
1✔
445

446
    def advise_and_trim(self, data: Dict[str, DataFrame]) -> Dict[str, DataFrame]:
1✔
447
        preprocessed = self.backtesting.strategy.advise_all_indicators(data)
1✔
448

449
        # Trim startup period from analyzed dataframe to get correct dates for output.
450
        trimmed = trim_dataframes(preprocessed, self.timerange, self.backtesting.required_startup)
1✔
451
        self.min_date, self.max_date = get_timerange(trimmed)
1✔
452
        if not self.market_change:
1✔
453
            self.market_change = calculate_market_change(trimmed, 'close')
1✔
454

455
        # Real trimming will happen as part of backtesting.
456
        return preprocessed
1✔
457

458
    def prepare_hyperopt_data(self) -> None:
1✔
459
        HyperoptStateContainer.set_state(HyperoptState.DATALOAD)
1✔
460
        data, self.timerange = self.backtesting.load_bt_data()
1✔
461
        self.backtesting.load_bt_data_detail()
1✔
462
        logger.info("Dataload complete. Calculating indicators")
1✔
463

464
        if not self.analyze_per_epoch:
1✔
465
            HyperoptStateContainer.set_state(HyperoptState.INDICATORS)
1✔
466

467
            preprocessed = self.advise_and_trim(data)
1✔
468

469
            logger.info(f'Hyperopting with data from '
1✔
470
                        f'{self.min_date.strftime(DATETIME_PRINT_FORMAT)} '
471
                        f'up to {self.max_date.strftime(DATETIME_PRINT_FORMAT)} '
472
                        f'({(self.max_date - self.min_date).days} days)..')
473
            # Store non-trimmed data - will be trimmed after signal generation.
474
            dump(preprocessed, self.data_pickle_file)
1✔
475
        else:
476
            dump(data, self.data_pickle_file)
1✔
477

478
    def get_asked_points(self, n_points: int) -> Tuple[List[List[Any]], List[bool]]:
1✔
479
        """
480
        Enforce points returned from `self.opt.ask` have not been already evaluated
481

482
        Steps:
483
        1. Try to get points using `self.opt.ask` first
484
        2. Discard the points that have already been evaluated
485
        3. Retry using `self.opt.ask` up to 3 times
486
        4. If still some points are missing in respect to `n_points`, random sample some points
487
        5. Repeat until at least `n_points` points in the `asked_non_tried` list
488
        6. Return a list with length truncated at `n_points`
489
        """
490
        def unique_list(a_list):
1✔
491
            new_list = []
1✔
492
            for item in a_list:
1✔
493
                if item not in new_list:
1✔
494
                    new_list.append(item)
1✔
495
            return new_list
1✔
496
        i = 0
1✔
497
        asked_non_tried: List[List[Any]] = []
1✔
498
        is_random_non_tried: List[bool] = []
1✔
499
        while i < 5 and len(asked_non_tried) < n_points:
1✔
500
            if i < 3:
1✔
501
                self.opt.cache_ = {}
1✔
502
                asked = unique_list(self.opt.ask(n_points=n_points * 5))
1✔
503
                is_random = [False for _ in range(len(asked))]
1✔
504
            else:
505
                asked = unique_list(self.opt.space.rvs(n_samples=n_points * 5))
×
506
                is_random = [True for _ in range(len(asked))]
×
507
            is_random_non_tried += [rand for x, rand in zip(asked, is_random)
1✔
508
                                    if x not in self.opt.Xi
509
                                    and x not in asked_non_tried]
510
            asked_non_tried += [x for x in asked
1✔
511
                                if x not in self.opt.Xi
512
                                and x not in asked_non_tried]
513
            i += 1
1✔
514

515
        if asked_non_tried:
1✔
516
            return (
1✔
517
                asked_non_tried[:min(len(asked_non_tried), n_points)],
518
                is_random_non_tried[:min(len(asked_non_tried), n_points)]
519
            )
520
        else:
521
            return self.opt.ask(n_points=n_points), [False for _ in range(n_points)]
×
522

523
    def get_progressbar_widgets(self):
1✔
524
        if self.print_colorized:
1✔
525
            widgets = [
×
526
                ' [Epoch ', progressbar.Counter(), ' of ', str(self.total_epochs),
527
                ' (', progressbar.Percentage(), ')] ',
528
                progressbar.Bar(marker=progressbar.AnimatedMarker(
529
                    fill='\N{FULL BLOCK}',
530
                    fill_wrap=Fore.GREEN + '{}' + Fore.RESET,
531
                    marker_wrap=Style.BRIGHT + '{}' + Style.RESET_ALL,
532
                )),
533
                ' [', progressbar.ETA(), ', ', progressbar.Timer(), ']',
534
            ]
535
        else:
536
            widgets = [
1✔
537
                ' [Epoch ', progressbar.Counter(), ' of ', str(self.total_epochs),
538
                ' (', progressbar.Percentage(), ')] ',
539
                progressbar.Bar(marker=progressbar.AnimatedMarker(
540
                    fill='\N{FULL BLOCK}',
541
                )),
542
                ' [', progressbar.ETA(), ', ', progressbar.Timer(), ']',
543
            ]
544
        return widgets
1✔
545

546
    def evaluate_result(self, val: Dict[str, Any], current: int, is_random: bool):
1✔
547
        """
548
        Evaluate results returned from generate_optimizer
549
        """
550
        val['current_epoch'] = current
1✔
551
        val['is_initial_point'] = current <= INITIAL_POINTS
1✔
552

553
        logger.debug("Optimizer epoch evaluated: %s", val)
1✔
554

555
        is_best = HyperoptTools.is_best_loss(val, self.current_best_loss)
1✔
556
        # This value is assigned here and not in the optimization method
557
        # to keep proper order in the list of results. That's because
558
        # evaluations can take different time. Here they are aligned in the
559
        # order they will be shown to the user.
560
        val['is_best'] = is_best
1✔
561
        val['is_random'] = is_random
1✔
562
        self.print_results(val)
1✔
563

564
        if is_best:
1✔
565
            self.current_best_loss = val['loss']
1✔
566
            self.current_best_epoch = val
1✔
567

568
        self._save_result(val)
1✔
569

570
    def start(self) -> None:
1✔
571
        self.random_state = self._set_random_state(self.config.get('hyperopt_random_state'))
1✔
572
        logger.info(f"Using optimizer random state: {self.random_state}")
1✔
573
        self.hyperopt_table_header = -1
1✔
574
        # Initialize spaces ...
575
        self.init_spaces()
1✔
576

577
        self.prepare_hyperopt_data()
1✔
578

579
        # We don't need exchange instance anymore while running hyperopt
580
        self.backtesting.exchange.close()
1✔
581
        self.backtesting.exchange._api = None
1✔
582
        self.backtesting.exchange._api_async = None
1✔
583
        self.backtesting.exchange.loop = None  # type: ignore
1✔
584
        self.backtesting.exchange._loop_lock = None  # type: ignore
1✔
585
        self.backtesting.exchange._cache_lock = None  # type: ignore
1✔
586
        # self.backtesting.exchange = None  # type: ignore
587
        self.backtesting.pairlists = None  # type: ignore
1✔
588

589
        cpus = cpu_count()
1✔
590
        logger.info(f"Found {cpus} CPU cores. Let's make them scream!")
1✔
591
        config_jobs = self.config.get('hyperopt_jobs', -1)
1✔
592
        logger.info(f'Number of parallel jobs set as: {config_jobs}')
1✔
593

594
        self.opt = self.get_optimizer(self.dimensions, config_jobs)
1✔
595

596
        if self.print_colorized:
1✔
597
            colorama_init(autoreset=True)
×
598

599
        try:
1✔
600
            with Parallel(n_jobs=config_jobs) as parallel:
1✔
601
                jobs = parallel._effective_n_jobs()
1✔
602
                logger.info(f'Effective number of parallel workers used: {jobs}')
1✔
603

604
                # Define progressbar
605
                widgets = self.get_progressbar_widgets()
1✔
606
                with progressbar.ProgressBar(
1✔
607
                    max_value=self.total_epochs, redirect_stdout=False, redirect_stderr=False,
608
                    widgets=widgets
609
                ) as pbar:
610
                    start = 0
1✔
611

612
                    if self.analyze_per_epoch:
1✔
613
                        # First analysis not in parallel mode when using --analyze-per-epoch.
614
                        # This allows dataprovider to load it's informative cache.
615
                        asked, is_random = self.get_asked_points(n_points=1)
1✔
616
                        f_val0 = self.generate_optimizer(asked[0])
1✔
617
                        self.opt.tell(asked, [f_val0['loss']])
1✔
618
                        self.evaluate_result(f_val0, 1, is_random[0])
1✔
619
                        pbar.update(1)
1✔
620
                        start += 1
1✔
621

622
                    evals = ceil((self.total_epochs - start) / jobs)
1✔
623
                    for i in range(evals):
1✔
624
                        # Correct the number of epochs to be processed for the last
625
                        # iteration (should not exceed self.total_epochs in total)
626
                        n_rest = (i + 1) * jobs - (self.total_epochs - start)
1✔
627
                        current_jobs = jobs - n_rest if n_rest > 0 else jobs
1✔
628

629
                        asked, is_random = self.get_asked_points(n_points=current_jobs)
1✔
630
                        f_val = self.run_optimizer_parallel(parallel, asked)
1✔
631
                        self.opt.tell(asked, [v['loss'] for v in f_val])
1✔
632

633
                        # Calculate progressbar outputs
634
                        for j, val in enumerate(f_val):
1✔
635
                            # Use human-friendly indexes here (starting from 1)
636
                            current = i * jobs + j + 1 + start
1✔
637

638
                            self.evaluate_result(val, current, is_random[j])
1✔
639

640
                            pbar.update(current)
1✔
641

642
        except KeyboardInterrupt:
×
643
            print('User interrupted..')
×
644

645
        logger.info(f"{self.num_epochs_saved} {plural(self.num_epochs_saved, 'epoch')} "
1✔
646
                    f"saved to '{self.results_file}'.")
647

648
        if self.current_best_epoch:
1✔
649
            HyperoptTools.try_export_params(
1✔
650
                self.config,
651
                self.backtesting.strategy.get_strategy_name(),
652
                self.current_best_epoch)
653

654
            HyperoptTools.show_epoch_details(self.current_best_epoch, self.total_epochs,
1✔
655
                                             self.print_json)
656
        else:
657
            # This is printed when Ctrl+C is pressed quickly, before first epochs have
658
            # a chance to be evaluated.
659
            print("No epochs evaluated yet, no best result.")
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc