• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

freqtrade / freqtrade / 9394559170

26 Apr 2024 06:36AM UTC coverage: 94.656% (-0.02%) from 94.674%
9394559170

push

github

xmatthias
Loader should be passed as kwarg for clarity

20280 of 21425 relevant lines covered (94.66%)

0.95 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

97.96
/freqtrade/data/history/history_utils.py
1
import logging
1✔
2
import operator
1✔
3
from datetime import datetime, timedelta
1✔
4
from pathlib import Path
1✔
5
from typing import Dict, List, Optional, Tuple
1✔
6

7
from pandas import DataFrame, concat
1✔
8

9
from freqtrade.configuration import TimeRange
1✔
10
from freqtrade.constants import (DATETIME_PRINT_FORMAT, DEFAULT_DATAFRAME_COLUMNS,
1✔
11
                                 DL_DATA_TIMEFRAMES, DOCS_LINK, Config)
12
from freqtrade.data.converter import (clean_ohlcv_dataframe, convert_trades_to_ohlcv,
1✔
13
                                      ohlcv_to_dataframe, trades_df_remove_duplicates,
14
                                      trades_list_to_df)
15
from freqtrade.data.history.datahandlers import IDataHandler, get_datahandler
1✔
16
from freqtrade.enums import CandleType, TradingMode
1✔
17
from freqtrade.exceptions import OperationalException
1✔
18
from freqtrade.exchange import Exchange
1✔
19
from freqtrade.plugins.pairlist.pairlist_helpers import dynamic_expand_pairlist
1✔
20
from freqtrade.util import dt_ts, format_ms_time
1✔
21
from freqtrade.util.datetime_helpers import dt_now
1✔
22
from freqtrade.util.migrations import migrate_data
1✔
23

24

25
logger = logging.getLogger(__name__)
1✔
26

27

28
def load_pair_history(pair: str,
1✔
29
                      timeframe: str,
30
                      datadir: Path, *,
31
                      timerange: Optional[TimeRange] = None,
32
                      fill_up_missing: bool = True,
33
                      drop_incomplete: bool = False,
34
                      startup_candles: int = 0,
35
                      data_format: Optional[str] = None,
36
                      data_handler: Optional[IDataHandler] = None,
37
                      candle_type: CandleType = CandleType.SPOT
38
                      ) -> DataFrame:
39
    """
40
    Load cached ohlcv history for the given pair.
41

42
    :param pair: Pair to load data for
43
    :param timeframe: Timeframe (e.g. "5m")
44
    :param datadir: Path to the data storage location.
45
    :param data_format: Format of the data. Ignored if data_handler is set.
46
    :param timerange: Limit data to be loaded to this timerange
47
    :param fill_up_missing: Fill missing values with "No action"-candles
48
    :param drop_incomplete: Drop last candle assuming it may be incomplete.
49
    :param startup_candles: Additional candles to load at the start of the period
50
    :param data_handler: Initialized data-handler to use.
51
                         Will be initialized from data_format if not set
52
    :param candle_type: Any of the enum CandleType (must match trading mode!)
53
    :return: DataFrame with ohlcv data, or empty DataFrame
54
    """
55
    data_handler = get_datahandler(datadir, data_format, data_handler)
1✔
56

57
    return data_handler.ohlcv_load(pair=pair,
1✔
58
                                   timeframe=timeframe,
59
                                   timerange=timerange,
60
                                   fill_missing=fill_up_missing,
61
                                   drop_incomplete=drop_incomplete,
62
                                   startup_candles=startup_candles,
63
                                   candle_type=candle_type,
64
                                   )
65

66

67
def load_data(datadir: Path,
1✔
68
              timeframe: str,
69
              pairs: List[str], *,
70
              timerange: Optional[TimeRange] = None,
71
              fill_up_missing: bool = True,
72
              startup_candles: int = 0,
73
              fail_without_data: bool = False,
74
              data_format: str = 'feather',
75
              candle_type: CandleType = CandleType.SPOT,
76
              user_futures_funding_rate: Optional[int] = None,
77
              ) -> Dict[str, DataFrame]:
78
    """
79
    Load ohlcv history data for a list of pairs.
80

81
    :param datadir: Path to the data storage location.
82
    :param timeframe: Timeframe (e.g. "5m")
83
    :param pairs: List of pairs to load
84
    :param timerange: Limit data to be loaded to this timerange
85
    :param fill_up_missing: Fill missing values with "No action"-candles
86
    :param startup_candles: Additional candles to load at the start of the period
87
    :param fail_without_data: Raise OperationalException if no data is found.
88
    :param data_format: Data format which should be used. Defaults to json
89
    :param candle_type: Any of the enum CandleType (must match trading mode!)
90
    :return: dict(<pair>:<Dataframe>)
91
    """
92
    result: Dict[str, DataFrame] = {}
1✔
93
    if startup_candles > 0 and timerange:
1✔
94
        logger.info(f'Using indicator startup period: {startup_candles} ...')
1✔
95

96
    data_handler = get_datahandler(datadir, data_format)
1✔
97

98
    for pair in pairs:
1✔
99
        hist = load_pair_history(pair=pair, timeframe=timeframe,
1✔
100
                                 datadir=datadir, timerange=timerange,
101
                                 fill_up_missing=fill_up_missing,
102
                                 startup_candles=startup_candles,
103
                                 data_handler=data_handler,
104
                                 candle_type=candle_type,
105
                                 )
106
        if not hist.empty:
1✔
107
            result[pair] = hist
1✔
108
        else:
109
            if candle_type is CandleType.FUNDING_RATE and user_futures_funding_rate is not None:
1✔
110
                logger.warn(f"{pair} using user specified [{user_futures_funding_rate}]")
×
111
            elif candle_type not in (CandleType.SPOT, CandleType.FUTURES):
1✔
112
                result[pair] = DataFrame(columns=["date", "open", "close", "high", "low", "volume"])
1✔
113

114
    if fail_without_data and not result:
1✔
115
        raise OperationalException("No data found. Terminating.")
1✔
116
    return result
1✔
117

118

119
def refresh_data(*, datadir: Path,
1✔
120
                 timeframe: str,
121
                 pairs: List[str],
122
                 exchange: Exchange,
123
                 data_format: Optional[str] = None,
124
                 timerange: Optional[TimeRange] = None,
125
                 candle_type: CandleType,
126
                 ) -> None:
127
    """
128
    Refresh ohlcv history data for a list of pairs.
129

130
    :param datadir: Path to the data storage location.
131
    :param timeframe: Timeframe (e.g. "5m")
132
    :param pairs: List of pairs to load
133
    :param exchange: Exchange object
134
    :param data_format: dataformat to use
135
    :param timerange: Limit data to be loaded to this timerange
136
    :param candle_type: Any of the enum CandleType (must match trading mode!)
137
    """
138
    data_handler = get_datahandler(datadir, data_format)
1✔
139
    for idx, pair in enumerate(pairs):
1✔
140
        process = f'{idx}/{len(pairs)}'
1✔
141
        _download_pair_history(pair=pair, process=process,
1✔
142
                               timeframe=timeframe, datadir=datadir,
143
                               timerange=timerange, exchange=exchange, data_handler=data_handler,
144
                               candle_type=candle_type)
145

146

147
def _load_cached_data_for_updating(
1✔
148
    pair: str,
149
    timeframe: str,
150
    timerange: Optional[TimeRange],
151
    data_handler: IDataHandler,
152
    candle_type: CandleType,
153
    prepend: bool = False,
154
) -> Tuple[DataFrame, Optional[int], Optional[int]]:
155
    """
156
    Load cached data to download more data.
157
    If timerange is passed in, checks whether data from an before the stored data will be
158
    downloaded.
159
    If that's the case then what's available should be completely overwritten.
160
    Otherwise downloads always start at the end of the available data to avoid data gaps.
161
    Note: Only used by download_pair_history().
162
    """
163
    start = None
1✔
164
    end = None
1✔
165
    if timerange:
1✔
166
        if timerange.starttype == 'date':
1✔
167
            start = timerange.startdt
1✔
168
        if timerange.stoptype == 'date':
1✔
169
            end = timerange.stopdt
1✔
170

171
    # Intentionally don't pass timerange in - since we need to load the full dataset.
172
    data = data_handler.ohlcv_load(pair, timeframe=timeframe,
1✔
173
                                   timerange=None, fill_missing=False,
174
                                   drop_incomplete=True, warn_no_data=False,
175
                                   candle_type=candle_type)
176
    if not data.empty:
1✔
177
        if not prepend and start and start < data.iloc[0]['date']:
1✔
178
            # Earlier data than existing data requested, redownload all
179
            data = DataFrame(columns=DEFAULT_DATAFRAME_COLUMNS)
1✔
180
        else:
181
            if prepend:
1✔
182
                end = data.iloc[0]['date']
1✔
183
            else:
184
                start = data.iloc[-1]['date']
1✔
185
    start_ms = int(start.timestamp() * 1000) if start else None
1✔
186
    end_ms = int(end.timestamp() * 1000) if end else None
1✔
187
    return data, start_ms, end_ms
1✔
188

189

190
def _download_pair_history(pair: str, *,
1✔
191
                           datadir: Path,
192
                           exchange: Exchange,
193
                           timeframe: str = '5m',
194
                           process: str = '',
195
                           new_pairs_days: int = 30,
196
                           data_handler: Optional[IDataHandler] = None,
197
                           timerange: Optional[TimeRange] = None,
198
                           candle_type: CandleType,
199
                           erase: bool = False,
200
                           prepend: bool = False,
201
                           ) -> bool:
202
    """
203
    Download latest candles from the exchange for the pair and timeframe passed in parameters
204
    The data is downloaded starting from the last correct data that
205
    exists in a cache. If timerange starts earlier than the data in the cache,
206
    the full data will be redownloaded
207

208
    :param pair: pair to download
209
    :param timeframe: Timeframe (e.g "5m")
210
    :param timerange: range of time to download
211
    :param candle_type: Any of the enum CandleType (must match trading mode!)
212
    :param erase: Erase existing data
213
    :return: bool with success state
214
    """
215
    data_handler = get_datahandler(datadir, data_handler=data_handler)
1✔
216

217
    try:
1✔
218
        if erase:
1✔
219
            if data_handler.ohlcv_purge(pair, timeframe, candle_type=candle_type):
×
220
                logger.info(f'Deleting existing data for pair {pair}, {timeframe}, {candle_type}.')
×
221

222
        data, since_ms, until_ms = _load_cached_data_for_updating(
1✔
223
            pair, timeframe, timerange,
224
            data_handler=data_handler,
225
            candle_type=candle_type,
226
            prepend=prepend)
227

228
        logger.info(f'({process}) - Download history data for "{pair}", {timeframe}, '
1✔
229
                    f'{candle_type} and store in {datadir}. '
230
                    f'From {format_ms_time(since_ms) if since_ms else "start"} to '
231
                    f'{format_ms_time(until_ms) if until_ms else "now"}'
232
                    )
233

234
        logger.debug("Current Start: %s",
1✔
235
                     f"{data.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}"
236
                     if not data.empty else 'None')
237
        logger.debug("Current End: %s",
1✔
238
                     f"{data.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}"
239
                     if not data.empty else 'None')
240

241
        # Default since_ms to 30 days if nothing is given
242
        new_data = exchange.get_historic_ohlcv(pair=pair,
1✔
243
                                               timeframe=timeframe,
244
                                               since_ms=since_ms if since_ms else
245
                                               int((datetime.now() - timedelta(days=new_pairs_days)
246
                                                    ).timestamp()) * 1000,
247
                                               is_new_pair=data.empty,
248
                                               candle_type=candle_type,
249
                                               until_ms=until_ms if until_ms else None
250
                                               )
251
        # TODO: Maybe move parsing to exchange class (?)
252
        new_dataframe = ohlcv_to_dataframe(new_data, timeframe, pair,
1✔
253
                                           fill_missing=False, drop_incomplete=True)
254
        if data.empty:
1✔
255
            data = new_dataframe
1✔
256
        else:
257
            # Run cleaning again to ensure there were no duplicate candles
258
            # Especially between existing and new data.
259
            data = clean_ohlcv_dataframe(concat([data, new_dataframe], axis=0), timeframe, pair,
1✔
260
                                         fill_missing=False, drop_incomplete=False)
261

262
        logger.debug("New Start: %s",
1✔
263
                     f"{data.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}"
264
                     if not data.empty else 'None')
265
        logger.debug("New End: %s",
1✔
266
                     f"{data.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}"
267
                     if not data.empty else 'None')
268

269
        data_handler.ohlcv_store(pair, timeframe, data=data, candle_type=candle_type)
1✔
270
        return True
1✔
271

272
    except Exception:
1✔
273
        logger.exception(
1✔
274
            f'Failed to download history data for pair: "{pair}", timeframe: {timeframe}.'
275
        )
276
        return False
1✔
277

278

279
def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes: List[str],
1✔
280
                                datadir: Path, trading_mode: str,
281
                                timerange: Optional[TimeRange] = None,
282
                                new_pairs_days: int = 30, erase: bool = False,
283
                                data_format: Optional[str] = None,
284
                                prepend: bool = False,
285
                                ) -> List[str]:
286
    """
287
    Refresh stored ohlcv data for backtesting and hyperopt operations.
288
    Used by freqtrade download-data subcommand.
289
    :return: List of pairs that are not available.
290
    """
291
    pairs_not_available = []
1✔
292
    data_handler = get_datahandler(datadir, data_format)
1✔
293
    candle_type = CandleType.get_default(trading_mode)
1✔
294
    process = ''
1✔
295
    for idx, pair in enumerate(pairs, start=1):
1✔
296
        if pair not in exchange.markets:
1✔
297
            pairs_not_available.append(pair)
1✔
298
            logger.info(f"Skipping pair {pair}...")
1✔
299
            continue
1✔
300
        for timeframe in timeframes:
1✔
301

302
            logger.debug(f'Downloading pair {pair}, {candle_type}, interval {timeframe}.')
1✔
303
            process = f'{idx}/{len(pairs)}'
1✔
304
            _download_pair_history(pair=pair, process=process,
1✔
305
                                   datadir=datadir, exchange=exchange,
306
                                   timerange=timerange, data_handler=data_handler,
307
                                   timeframe=str(timeframe), new_pairs_days=new_pairs_days,
308
                                   candle_type=candle_type,
309
                                   erase=erase, prepend=prepend)
310
        if trading_mode == 'futures':
1✔
311
            # Predefined candletype (and timeframe) depending on exchange
312
            # Downloads what is necessary to backtest based on futures data.
313
            tf_mark = exchange.get_option('mark_ohlcv_timeframe')
1✔
314
            tf_funding_rate = exchange.get_option('funding_fee_timeframe')
1✔
315

316
            fr_candle_type = CandleType.from_string(exchange.get_option('mark_ohlcv_price'))
1✔
317
            # All exchanges need FundingRate for futures trading.
318
            # The timeframe is aligned to the mark-price timeframe.
319
            combs = ((CandleType.FUNDING_RATE, tf_funding_rate), (fr_candle_type, tf_mark))
1✔
320
            for candle_type_f, tf in combs:
1✔
321
                logger.debug(f'Downloading pair {pair}, {candle_type_f}, interval {tf}.')
1✔
322
                _download_pair_history(pair=pair, process=process,
1✔
323
                                       datadir=datadir, exchange=exchange,
324
                                       timerange=timerange, data_handler=data_handler,
325
                                       timeframe=str(tf), new_pairs_days=new_pairs_days,
326
                                       candle_type=candle_type_f,
327
                                       erase=erase, prepend=prepend)
328

329
    return pairs_not_available
1✔
330

331

332
def _download_trades_history(exchange: Exchange,
1✔
333
                             pair: str, *,
334
                             new_pairs_days: int = 30,
335
                             timerange: Optional[TimeRange] = None,
336
                             data_handler: IDataHandler,
337
                             trading_mode: TradingMode,
338
                             ) -> bool:
339
    """
340
    Download trade history from the exchange.
341
    Appends to previously downloaded trades data.
342
    """
343
    try:
1✔
344

345
        until = None
1✔
346
        since = 0
1✔
347
        if timerange:
1✔
348
            if timerange.starttype == 'date':
1✔
349
                since = timerange.startts * 1000
1✔
350
            if timerange.stoptype == 'date':
1✔
351
                until = timerange.stopts * 1000
×
352

353
        trades = data_handler.trades_load(pair, trading_mode)
1✔
354

355
        # TradesList columns are defined in constants.DEFAULT_TRADES_COLUMNS
356
        # DEFAULT_TRADES_COLUMNS: 0 -> timestamp
357
        # DEFAULT_TRADES_COLUMNS: 1 -> id
358

359
        if not trades.empty and since > 0 and since < trades.iloc[0]['timestamp']:
1✔
360
            # since is before the first trade
361
            logger.info(f"Start ({trades.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}) earlier than "
1✔
362
                        f"available data. Redownloading trades for {pair}...")
363
            trades = trades_list_to_df([])
1✔
364

365
        from_id = trades.iloc[-1]['id'] if not trades.empty else None
1✔
366
        if not trades.empty and since < trades.iloc[-1]['timestamp']:
1✔
367
            # Reset since to the last available point
368
            # - 5 seconds (to ensure we're getting all trades)
369
            since = trades.iloc[-1]['timestamp'] - (5 * 1000)
1✔
370
            logger.info(f"Using last trade date -5s - Downloading trades for {pair} "
1✔
371
                        f"since: {format_ms_time(since)}.")
372

373
        if not since:
1✔
374
            since = dt_ts(dt_now() - timedelta(days=new_pairs_days))
1✔
375

376
        logger.debug("Current Start: %s", 'None' if trades.empty else
1✔
377
                     f"{trades.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}")
378
        logger.debug("Current End: %s", 'None' if trades.empty else
1✔
379
                     f"{trades.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}")
380
        logger.info(f"Current Amount of trades: {len(trades)}")
1✔
381

382
        # Default since_ms to 30 days if nothing is given
383
        new_trades = exchange.get_historic_trades(pair=pair,
1✔
384
                                                  since=since,
385
                                                  until=until,
386
                                                  from_id=from_id,
387
                                                  )
388
        new_trades_df = trades_list_to_df(new_trades[1])
1✔
389
        trades = concat([trades, new_trades_df], axis=0)
1✔
390
        # Remove duplicates to make sure we're not storing data we don't need
391
        trades = trades_df_remove_duplicates(trades)
1✔
392
        data_handler.trades_store(pair, trades, trading_mode)
1✔
393

394
        logger.debug("New Start: %s", 'None' if trades.empty else
1✔
395
                     f"{trades.iloc[0]['date']:{DATETIME_PRINT_FORMAT}}")
396
        logger.debug("New End: %s", 'None' if trades.empty else
1✔
397
                     f"{trades.iloc[-1]['date']:{DATETIME_PRINT_FORMAT}}")
398
        logger.info(f"New Amount of trades: {len(trades)}")
1✔
399
        return True
1✔
400

401
    except Exception:
1✔
402
        logger.exception(
1✔
403
            f'Failed to download historic trades for pair: "{pair}". '
404
        )
405
        return False
1✔
406

407

408
def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir: Path,
1✔
409
                                 timerange: TimeRange, trading_mode: TradingMode,
410
                                 new_pairs_days: int = 30,
411
                                 erase: bool = False, data_format: str = 'feather',
412
                                 ) -> List[str]:
413
    """
414
    Refresh stored trades data for backtesting and hyperopt operations.
415
    Used by freqtrade download-data subcommand.
416
    :return: List of pairs that are not available.
417
    """
418
    pairs_not_available = []
1✔
419
    data_handler = get_datahandler(datadir, data_format=data_format)
1✔
420
    for pair in pairs:
1✔
421
        if pair not in exchange.markets:
1✔
422
            pairs_not_available.append(pair)
1✔
423
            logger.info(f"Skipping pair {pair}...")
1✔
424
            continue
1✔
425

426
        if erase:
1✔
427
            if data_handler.trades_purge(pair, trading_mode):
1✔
428
                logger.info(f'Deleting existing data for pair {pair}.')
1✔
429

430
        logger.info(f'Downloading trades for pair {pair}.')
1✔
431
        _download_trades_history(exchange=exchange,
1✔
432
                                 pair=pair,
433
                                 new_pairs_days=new_pairs_days,
434
                                 timerange=timerange,
435
                                 data_handler=data_handler,
436
                                 trading_mode=trading_mode)
437
    return pairs_not_available
1✔
438

439

440
def get_timerange(data: Dict[str, DataFrame]) -> Tuple[datetime, datetime]:
1✔
441
    """
442
    Get the maximum common timerange for the given backtest data.
443

444
    :param data: dictionary with preprocessed backtesting data
445
    :return: tuple containing min_date, max_date
446
    """
447
    timeranges = [
1✔
448
        (frame['date'].min().to_pydatetime(), frame['date'].max().to_pydatetime())
449
        for frame in data.values()
450
    ]
451
    return (min(timeranges, key=operator.itemgetter(0))[0],
1✔
452
            max(timeranges, key=operator.itemgetter(1))[1])
453

454

455
def validate_backtest_data(data: DataFrame, pair: str, min_date: datetime,
1✔
456
                           max_date: datetime, timeframe_min: int) -> bool:
457
    """
458
    Validates preprocessed backtesting data for missing values and shows warnings about it that.
459

460
    :param data: preprocessed backtesting data (as DataFrame)
461
    :param pair: pair used for log output.
462
    :param min_date: start-date of the data
463
    :param max_date: end-date of the data
464
    :param timeframe_min: Timeframe in minutes
465
    """
466
    # total difference in minutes / timeframe-minutes
467
    expected_frames = int((max_date - min_date).total_seconds() // 60 // timeframe_min)
1✔
468
    found_missing = False
1✔
469
    dflen = len(data)
1✔
470
    if dflen < expected_frames:
1✔
471
        found_missing = True
1✔
472
        logger.warning("%s has missing frames: expected %s, got %s, that's %s missing values",
1✔
473
                       pair, expected_frames, dflen, expected_frames - dflen)
474
    return found_missing
1✔
475

476

477
def download_data_main(config: Config) -> None:
1✔
478

479
    timerange = TimeRange()
1✔
480
    if 'days' in config:
1✔
481
        time_since = (datetime.now() - timedelta(days=config['days'])).strftime("%Y%m%d")
1✔
482
        timerange = TimeRange.parse_timerange(f'{time_since}-')
1✔
483

484
    if 'timerange' in config:
1✔
485
        timerange = timerange.parse_timerange(config['timerange'])
1✔
486

487
    # Remove stake-currency to skip checks which are not relevant for datadownload
488
    config['stake_currency'] = ''
1✔
489

490
    pairs_not_available: List[str] = []
1✔
491

492
    # Init exchange
493
    from freqtrade.resolvers.exchange_resolver import ExchangeResolver
1✔
494
    exchange = ExchangeResolver.load_exchange(config, validate=False)
1✔
495
    available_pairs = [
1✔
496
        p for p in exchange.get_markets(
497
            tradable_only=True, active_only=not config.get('include_inactive')
498
            ).keys()
499
    ]
500

501
    expanded_pairs = dynamic_expand_pairlist(config, available_pairs)
1✔
502
    if 'timeframes' not in config:
1✔
503
        config['timeframes'] = DL_DATA_TIMEFRAMES
1✔
504

505
    # Manual validations of relevant settings
506
    if not config['exchange'].get('skip_pair_validation', False):
1✔
507
        exchange.validate_pairs(expanded_pairs)
1✔
508
    logger.info(f"About to download pairs: {expanded_pairs}, "
1✔
509
                f"intervals: {config['timeframes']} to {config['datadir']}")
510

511
    if len(expanded_pairs) == 0:
1✔
512
        logger.warning(
1✔
513
            "No pairs available for download. "
514
            "Please make sure you're using the correct Pair naming for your selected trade mode. \n"
515
            f"More info: {DOCS_LINK}/bot-basics/#pair-naming")
516

517
    for timeframe in config['timeframes']:
1✔
518
        exchange.validate_timeframes(timeframe)
1✔
519

520
    # Start downloading
521
    try:
1✔
522
        if config.get('download_trades'):
1✔
523
            pairs_not_available = refresh_backtest_trades_data(
1✔
524
                exchange, pairs=expanded_pairs, datadir=config['datadir'],
525
                timerange=timerange, new_pairs_days=config['new_pairs_days'],
526
                erase=bool(config.get('erase')), data_format=config['dataformat_trades'],
527
                trading_mode=config.get('trading_mode', TradingMode.SPOT),
528
                )
529

530
            # Convert downloaded trade data to different timeframes
531
            convert_trades_to_ohlcv(
1✔
532
                pairs=expanded_pairs, timeframes=config['timeframes'],
533
                datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')),
534
                data_format_ohlcv=config['dataformat_ohlcv'],
535
                data_format_trades=config['dataformat_trades'],
536
                candle_type=config.get('candle_type_def', CandleType.SPOT),
537
            )
538
        else:
539
            if not exchange.get_option('ohlcv_has_history', True):
1✔
540
                raise OperationalException(
1✔
541
                    f"Historic klines not available for {exchange.name}. "
542
                    "Please use `--dl-trades` instead for this exchange "
543
                    "(will unfortunately take a long time)."
544
                    )
545
            migrate_data(config, exchange)
1✔
546
            pairs_not_available = refresh_backtest_ohlcv_data(
1✔
547
                exchange, pairs=expanded_pairs, timeframes=config['timeframes'],
548
                datadir=config['datadir'], timerange=timerange,
549
                new_pairs_days=config['new_pairs_days'],
550
                erase=bool(config.get('erase')), data_format=config['dataformat_ohlcv'],
551
                trading_mode=config.get('trading_mode', 'spot'),
552
                prepend=config.get('prepend_data', False)
553
            )
554
    finally:
555
        if pairs_not_available:
1✔
556
            logger.info(f"Pairs [{','.join(pairs_not_available)}] not available "
1✔
557
                        f"on exchange {exchange.name}.")
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc