• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

freqtrade / freqtrade / 6181253459

08 Sep 2023 06:04AM UTC coverage: 94.614% (+0.06%) from 94.556%
6181253459

push

github-actions

web-flow
Merge pull request #9159 from stash86/fix-adjust

remove old codes when we only can do partial entries

2 of 2 new or added lines in 1 file covered. (100.0%)

19114 of 20202 relevant lines covered (94.61%)

0.95 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

96.55
/freqtrade/freqai/prediction_models/PyTorchTransformerRegressor.py
1
from typing import Any, Dict, Tuple
1✔
2

3
import numpy as np
1✔
4
import numpy.typing as npt
1✔
5
import pandas as pd
1✔
6
import torch
1✔
7

8
from freqtrade.freqai.base_models.BasePyTorchRegressor import BasePyTorchRegressor
1✔
9
from freqtrade.freqai.data_kitchen import FreqaiDataKitchen
1✔
10
from freqtrade.freqai.torch.PyTorchDataConvertor import (DefaultPyTorchDataConvertor,
1✔
11
                                                         PyTorchDataConvertor)
12
from freqtrade.freqai.torch.PyTorchModelTrainer import PyTorchTransformerTrainer
1✔
13
from freqtrade.freqai.torch.PyTorchTransformerModel import PyTorchTransformerModel
1✔
14

15

16
class PyTorchTransformerRegressor(BasePyTorchRegressor):
1✔
17
    """
18
    This class implements the fit method of IFreqaiModel.
19
    in the fit method we initialize the model and trainer objects.
20
    the only requirement from the model is to be aligned to PyTorchRegressor
21
    predict method that expects the model to predict tensor of type float.
22
    the trainer defines the training loop.
23

24
    parameters are passed via `model_training_parameters` under the freqai
25
    section in the config file. e.g:
26
    {
27
        ...
28
        "freqai": {
29
            ...
30
            "model_training_parameters" : {
31
                "learning_rate": 3e-4,
32
                "trainer_kwargs": {
33
                    "n_steps": 5000,
34
                    "batch_size": 64,
35
                    "n_epochs": null
36
                },
37
                "model_kwargs": {
38
                    "hidden_dim": 512,
39
                    "dropout_percent": 0.2,
40
                    "n_layer": 1,
41
                },
42
            }
43
        }
44
    }
45
    """
46

47
    @property
1✔
48
    def data_convertor(self) -> PyTorchDataConvertor:
1✔
49
        return DefaultPyTorchDataConvertor(target_tensor_type=torch.float)
1✔
50

51
    def __init__(self, **kwargs) -> None:
1✔
52
        super().__init__(**kwargs)
1✔
53
        config = self.freqai_info.get("model_training_parameters", {})
1✔
54
        self.learning_rate: float = config.get("learning_rate",  3e-4)
1✔
55
        self.model_kwargs: Dict[str, Any] = config.get("model_kwargs",  {})
1✔
56
        self.trainer_kwargs: Dict[str, Any] = config.get("trainer_kwargs",  {})
1✔
57

58
    def fit(self, data_dictionary: Dict, dk: FreqaiDataKitchen, **kwargs) -> Any:
1✔
59
        """
60
        User sets up the training and test data to fit their desired model here
61
        :param data_dictionary: the dictionary holding all data for train, test,
62
            labels, weights
63
        :param dk: The datakitchen object for the current coin/model
64
        """
65

66
        n_features = data_dictionary["train_features"].shape[-1]
1✔
67
        n_labels = data_dictionary["train_labels"].shape[-1]
1✔
68
        model = PyTorchTransformerModel(
1✔
69
            input_dim=n_features,
70
            output_dim=n_labels,
71
            time_window=self.window_size,
72
            **self.model_kwargs
73
        )
74
        model.to(self.device)
1✔
75
        optimizer = torch.optim.AdamW(model.parameters(), lr=self.learning_rate)
1✔
76
        criterion = torch.nn.MSELoss()
1✔
77
        # check if continual_learning is activated, and retreive the model to continue training
78
        trainer = self.get_init_model(dk.pair)
1✔
79
        if trainer is None:
1✔
80
            trainer = PyTorchTransformerTrainer(
1✔
81
                model=model,
82
                optimizer=optimizer,
83
                criterion=criterion,
84
                device=self.device,
85
                data_convertor=self.data_convertor,
86
                window_size=self.window_size,
87
                tb_logger=self.tb_logger,
88
                **self.trainer_kwargs,
89
            )
90
        trainer.fit(data_dictionary, self.splits)
1✔
91
        return trainer
1✔
92

93
    def predict(
1✔
94
        self, unfiltered_df: pd.DataFrame, dk: FreqaiDataKitchen, **kwargs
95
    ) -> Tuple[pd.DataFrame, npt.NDArray[np.int_]]:
96
        """
97
        Filter the prediction features data and predict with it.
98
        :param unfiltered_df: Full dataframe for the current backtest period.
99
        :return:
100
        :pred_df: dataframe containing the predictions
101
        :do_predict: np.array of 1s and 0s to indicate places where freqai needed to remove
102
        data (NaNs) or felt uncertain about data (PCA and DI index)
103
        """
104

105
        dk.find_features(unfiltered_df)
1✔
106
        dk.data_dictionary["prediction_features"], _ = dk.filter_features(
1✔
107
            unfiltered_df, dk.training_features_list, training_filter=False
108
        )
109

110
        dk.data_dictionary["prediction_features"], outliers, _ = dk.feature_pipeline.transform(
1✔
111
            dk.data_dictionary["prediction_features"], outlier_check=True)
112

113
        x = self.data_convertor.convert_x(
1✔
114
            dk.data_dictionary["prediction_features"],
115
            device=self.device
116
        )
117
        # if user is asking for multiple predictions, slide the window
118
        # along the tensor
119
        x = x.unsqueeze(0)
1✔
120
        # create empty torch tensor
121
        self.model.model.eval()
1✔
122
        yb = torch.empty(0).to(self.device)
1✔
123
        if x.shape[1] > 1:
1✔
124
            ws = self.window_size
1✔
125
            for i in range(0, x.shape[1] - ws):
1✔
126
                xb = x[:, i:i + ws, :].to(self.device)
1✔
127
                y = self.model.model(xb)
1✔
128
                yb = torch.cat((yb, y), dim=0)
1✔
129
        else:
130
            yb = self.model.model(x)
×
131

132
        yb = yb.cpu().squeeze()
1✔
133
        pred_df = pd.DataFrame(yb.detach().numpy(), columns=dk.label_list)
1✔
134
        pred_df, _, _ = dk.label_pipeline.inverse_transform(pred_df)
1✔
135

136
        if self.freqai_info.get("DI_threshold", 0) > 0:
1✔
137
            dk.DI_values = dk.feature_pipeline["di"].di_values
×
138
        else:
139
            dk.DI_values = np.zeros(outliers.shape[0])
1✔
140
        dk.do_predict = outliers
1✔
141

142
        if x.shape[1] > 1:
1✔
143
            zeros_df = pd.DataFrame(np.zeros((x.shape[1] - len(pred_df), len(pred_df.columns))),
1✔
144
                                    columns=pred_df.columns)
145
            pred_df = pd.concat([zeros_df, pred_df], axis=0, ignore_index=True)
1✔
146
        return (pred_df, dk.do_predict)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc