• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

freqtrade / freqtrade / 9394559170

26 Apr 2024 06:36AM UTC coverage: 94.656% (-0.02%) from 94.674%
9394559170

push

github

xmatthias
Loader should be passed as kwarg for clarity

20280 of 21425 relevant lines covered (94.66%)

0.95 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

96.55
/freqtrade/freqai/prediction_models/PyTorchTransformerRegressor.py
1
from typing import Any, Dict, Tuple
1✔
2

3
import numpy as np
1✔
4
import numpy.typing as npt
1✔
5
import pandas as pd
1✔
6
import torch
1✔
7

8
from freqtrade.freqai.base_models.BasePyTorchRegressor import BasePyTorchRegressor
1✔
9
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
1✔
10
from freqtrade.freqai.torch.PyTorchDataConvertor import (DefaultPyTorchDataConvertor,
1✔
11
                                                         PyTorchDataConvertor)
12
from freqtrade.freqai.torch.PyTorchModelTrainer import PyTorchTransformerTrainer
1✔
13
from freqtrade.freqai.torch.PyTorchTransformerModel import PyTorchTransformerModel
1✔
14

15

16
class PyTorchTransformerRegressor(BasePyTorchRegressor):
1✔
17
    """
18
    This class implements the fit method of IFreqaiModel.
19
    in the fit method we initialize the model and trainer objects.
20
    the only requirement from the model is to be aligned to PyTorchRegressor
21
    predict method that expects the model to predict tensor of type float.
22
    the trainer defines the training loop.
23

24
    parameters are passed via `model_training_parameters` under the freqai
25
    section in the config file. e.g:
26
    {
27
        ...
28
        "freqai": {
29
            ...
30
            "conv_width": 30,  // PyTorchTransformer is based on windowing
31
            "feature_parameters": {
32
                ...
33
                "include_shifted_candles": 0,  // which removes the need for shifted candles
34
                ...
35
            },
36
            "model_training_parameters" : {
37
                "learning_rate": 3e-4,
38
                "trainer_kwargs": {
39
                    "n_steps": 5000,
40
                    "batch_size": 64,
41
                    "n_epochs": null
42
                },
43
                "model_kwargs": {
44
                    "hidden_dim": 512,
45
                    "dropout_percent": 0.2,
46
                    "n_layer": 1,
47
                },
48
            }
49
        }
50
    }
51
    """
52

53
    @property
1✔
54
    def data_convertor(self) -> PyTorchDataConvertor:
1✔
55
        return DefaultPyTorchDataConvertor(target_tensor_type=torch.float)
1✔
56

57
    def __init__(self, **kwargs) -> None:
1✔
58
        super().__init__(**kwargs)
1✔
59
        config = self.freqai_info.get("model_training_parameters", {})
1✔
60
        self.learning_rate: float = config.get("learning_rate",  3e-4)
1✔
61
        self.model_kwargs: Dict[str, Any] = config.get("model_kwargs",  {})
1✔
62
        self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs",  {})
1✔
63

64
    def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
1✔
65
        """
66
        User sets up the training and test data to fit their desired model here
67
        :param data_dictionary: the dictionary holding all data for train, test,
68
            labels, weights
69
        :param dk: The datakitchen object for the current coin/model
70
        """
71

72
        n_features = data_dictionary["train_features"].shape[-1]
1✔
73
        n_labels = data_dictionary["train_labels"].shape[-1]
1✔
74
        model = PyTorchTransformerModel(
1✔
75
            input_dim=n_features,
76
            output_dim=n_labels,
77
            time_window=self.window_size,
78
            **self.model_kwargs
79
        )
80
        model.to(self.device)
1✔
81
        optimizer = torch.optim.AdamW(model.parameters(), lr=self.learning_rate)
1✔
82
        criterion = torch.nn.MSELoss()
1✔
83
        # check if continual_learning is activated, and retrieve the model to continue training
84
        trainer = self.get_init_model(dk.pair)
1✔
85
        if trainer is None:
1✔
86
            trainer = PyTorchTransformerTrainer(
1✔
87
                model=model,
88
                optimizer=optimizer,
89
                criterion=criterion,
90
                device=self.device,
91
                data_convertor=self.data_convertor,
92
                window_size=self.window_size,
93
                tb_logger=self.tb_logger,
94
                **self.trainer_kwargs,
95
            )
96
        trainer.fit(data_dictionary, self.splits)
1✔
97
        return trainer
1✔
98

99
    def predict(
1✔
100
        self, unfiltered_df: pd.DataFrame, dk: FreqaiDataKitchen, **kwargs
101
    ) -> Tuple[pd.DataFrame, npt.NDArray[np.int_]]:
102
        """
103
        Filter the prediction features data and predict with it.
104
        :param unfiltered_df: Full dataframe for the current backtest period.
105
        :return:
106
        :pred_df: dataframe containing the predictions
107
        :do_predict: np.array of 1s and 0s to indicate places where freqai needed to remove
108
        data (NaNs) or felt uncertain about data (PCA and DI index)
109
        """
110

111
        dk.find_features(unfiltered_df)
1✔
112
        dk.data_dictionary["prediction_features"], _ = dk.filter_features(
1✔
113
            unfiltered_df, dk.training_features_list, training_filter=False
114
        )
115

116
        dk.data_dictionary["prediction_features"], outliers, _ = dk.feature_pipeline.transform(
1✔
117
            dk.data_dictionary["prediction_features"], outlier_check=True)
118

119
        x = self.data_convertor.convert_x(
1✔
120
            dk.data_dictionary["prediction_features"],
121
            device=self.device
122
        )
123
        # if user is asking for multiple predictions, slide the window
124
        # along the tensor
125
        x = x.unsqueeze(0)
1✔
126
        # create empty torch tensor
127
        self.model.model.eval()
1✔
128
        yb = torch.empty(0).to(self.device)
1✔
129
        if x.shape[1] > self.window_size:
1✔
130
            ws = self.window_size
1✔
131
            for i in range(0, x.shape[1] - ws):
1✔
132
                xb = x[:, i:i + ws, :].to(self.device)
1✔
133
                y = self.model.model(xb)
1✔
134
                yb = torch.cat((yb, y), dim=1)
1✔
135
        else:
136
            yb = self.model.model(x)
×
137

138
        yb = yb.cpu().squeeze(0)
1✔
139
        pred_df = pd.DataFrame(yb.detach().numpy(), columns=dk.label_list)
1✔
140
        pred_df, _, _ = dk.label_pipeline.inverse_transform(pred_df)
1✔
141

142
        if self.freqai_info.get("DI_threshold", 0) > 0:
1✔
143
            dk.DI_values = dk.feature_pipeline["di"].di_values
×
144
        else:
145
            dk.DI_values = np.zeros(outliers.shape[0])
1✔
146
        dk.do_predict = outliers
1✔
147

148
        if x.shape[1] > 1:
1✔
149
            zeros_df = pd.DataFrame(np.zeros((x.shape[1] - len(pred_df), len(pred_df.columns))),
1✔
150
                                    columns=pred_df.columns)
151
            pred_df = pd.concat([zeros_df, pred_df], axis=0, ignore_index=True)
1✔
152
        return (pred_df, dk.do_predict)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc