• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

tonegas / nnodely / 14360836942

09 Apr 2025 03:17PM UTC coverage: 97.602% (+0.6%) from 97.035%
14360836942

push

github

web-flow
Merge pull request #86 from tonegas/smallclasses

Smallclasses

2292 of 2419 new or added lines in 54 files covered. (94.75%)

3 existing lines in 1 file now uncovered.

11683 of 11970 relevant lines covered (97.6%)

0.98 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

96.62
/nnodely/operators/validator.py
1
import torch
1✔
2

3
import numpy as np
1✔
4

5
from nnodely.basic.loss import CustomLoss
1✔
6
from nnodely.support.utils import  check, TORCH_DTYPE, enforce_types
1✔
7

8
class Validator():
1✔
9
    def __init__(self):
1✔
10
        check(type(self) is not Validator, TypeError, "Validator class cannot be instantiated directly")
1✔
11

12
        # Validation Parameters
13
        self._performance = {}
1✔
14
        self._prediction = {}
1✔
15
        self._training = {}
1✔
16

17
    @enforce_types
1✔
18
    def resultAnalysis(self,
1✔
19
                       dataset: str,
20
                       data: dict | None = None,
21
                       minimize_gain: dict = {},
22
                       closed_loop: dict = {},
23
                       connect: dict = {},
24
                       prediction_samples: int | str | None = None,
25
                       step: int = 0,
26
                       batch_size: int | None = None
27
                       ) -> None:
28
        import warnings
1✔
29
        json_inputs = self.json['Inputs'] | self.json['States']
1✔
30
        calculate_grad = False
1✔
31
        for key, value in json_inputs.items():
1✔
32
            if 'type' in value.keys():
1✔
33
                calculate_grad = True
1✔
34
                break
1✔
35
        with torch.enable_grad() if calculate_grad else torch.inference_mode():
1✔
36
            ## Init model for retults analysis
37
            self._model.eval()
1✔
38
            self._performance[dataset] = {}
1✔
39
            self._prediction[dataset] = {}
1✔
40
            A = {}
1✔
41
            B = {}
1✔
42
            total_losses = {}
1✔
43

44
            # Create the losses
45
            losses = {}
1✔
46
            for name, values in self._model_def['Minimizers'].items():
1✔
47
                losses[name] = CustomLoss(values['loss'])
1✔
48

49
            recurrent = False
1✔
50
            if (closed_loop or connect or self._model_def['States']) and prediction_samples is not None:
1✔
51
                recurrent = True
1✔
52

53
            if data is None:
1✔
54
                check(dataset in self._data.keys(), ValueError, f'The dataset {dataset} is not loaded!')
1✔
55
                data = {key: torch.from_numpy(val).to(TORCH_DTYPE) for key, val in self._data[dataset].items()}
1✔
56
            n_samples = len(data[list(data.keys())[0]])
1✔
57

58
            if recurrent:
1✔
59
                batch_size = batch_size if batch_size is not None else n_samples - prediction_samples
1✔
60

61
                model_inputs = list(self._model_def['Inputs'].keys())
1✔
62

63
                state_closed_loop = [key for key, value in self._model_def['States'].items() if 'closedLoop' in value.keys()] + list(closed_loop.keys())
1✔
64
                state_connect = [key for key, value in self._model_def['States'].items() if 'connect' in value.keys()] + list(connect.keys())
1✔
65

66
                non_mandatory_inputs = state_closed_loop + state_connect
1✔
67
                mandatory_inputs = list(set(model_inputs) - set(non_mandatory_inputs))
1✔
68

69
                for key, value in self._model_def['Minimizers'].items():
1✔
70
                    total_losses[key], A[key], B[key] = [], [], []
1✔
71
                    for horizon_idx in range(prediction_samples + 1):
1✔
72
                        A[key].append([])
1✔
73
                        B[key].append([])
1✔
74

75
                list_of_batch_indexes = list(range(n_samples - prediction_samples))
1✔
76
                ## Remove forbidden indexes in case of a multi-file dataset
77
                if dataset in self._multifile.keys(): ## Multi-file Dataset
1✔
NEW
78
                    if n_samples == self.run_training_params['n_samples_train']: ## Training
×
NEW
79
                        list_of_batch_indexes, step = self.__get_batch_indexes(dataset, n_samples, prediction_samples, batch_size, step, type='train')
×
NEW
80
                    elif n_samples == self.run_training_params['n_samples_val']: ## Validation
×
NEW
81
                        list_of_batch_indexes, step = self.__get_batch_indexes(dataset, n_samples, prediction_samples, batch_size, step, type='val')
×
82
                    else:
NEW
83
                        list_of_batch_indexes, step = self.__get_batch_indexes(dataset, n_samples, prediction_samples, batch_size, step, type='test')
×
84

85
                X = {}
1✔
86
                ## Update with virtual states
87
                self._model.update(closed_loop=closed_loop, connect=connect)
1✔
88
                while len(list_of_batch_indexes) >= batch_size:
1✔
89
                    idxs = list_of_batch_indexes[:batch_size]
1✔
90
                    for num in idxs:
1✔
91
                        list_of_batch_indexes.remove(num)
1✔
92
                    if step > 0:
1✔
93
                        if len(list_of_batch_indexes) >= step:
1✔
94
                            step_idxs =  list_of_batch_indexes[:step]
1✔
95
                            for num in step_idxs:
1✔
96
                                list_of_batch_indexes.remove(num)
1✔
97
                        else:
98
                            list_of_batch_indexes = []
1✔
99
                    ## Reset
100
                    horizon_losses = {key: [] for key in self._model_def['Minimizers'].keys()}
1✔
101
                    for key in non_mandatory_inputs:
1✔
102
                        if key in data.keys():
1✔
103
                            ## with data
104
                            X[key] = data[key][idxs]
1✔
105
                        else:  ## with zeros
106
                            window_size = self._input_n_samples[key]
1✔
107
                            dim = json_inputs[key]['dim']
1✔
108
                            if 'type' in json_inputs[key]:
1✔
109
                                X[key] = torch.zeros(size=(batch_size, window_size, dim), dtype=TORCH_DTYPE, requires_grad=True)
1✔
110
                            else:
111
                                X[key] = torch.zeros(size=(batch_size, window_size, dim), dtype=TORCH_DTYPE, requires_grad=False)
1✔
112
                            self._states[key] = X[key]
1✔
113

114
                    for horizon_idx in range(prediction_samples + 1):
1✔
115
                        ## Get data
116
                        for key in mandatory_inputs:
1✔
117
                            X[key] = data[key][[idx+horizon_idx for idx in idxs]]
1✔
118
                        ## Forward pass
119
                        out, minimize_out, out_closed_loop, out_connect = self._model(X)
1✔
120

121
                        ## Loss Calculation
122
                        for key, value in self._model_def['Minimizers'].items():
1✔
123
                            A[key][horizon_idx].append(minimize_out[value['A']].detach().numpy())
1✔
124
                            B[key][horizon_idx].append(minimize_out[value['B']].detach().numpy())
1✔
125
                            loss = losses[key](minimize_out[value['A']], minimize_out[value['B']])
1✔
126
                            loss = (loss * minimize_gain[key]) if key in minimize_gain.keys() else loss  ## Multiply by the gain if necessary
1✔
127
                            horizon_losses[key].append(loss)
1✔
128

129
                        ## Update
130
                        self._updateState(X, out_closed_loop, out_connect)
1✔
131

132
                    ## Calculate the total loss
133
                    for key in self._model_def['Minimizers'].keys():
1✔
134
                        loss = sum(horizon_losses[key]) / (prediction_samples + 1)
1✔
135
                        total_losses[key].append(loss.detach().numpy())
1✔
136

137
                for key, value in self._model_def['Minimizers'].items():
1✔
138
                    for horizon_idx in range(prediction_samples + 1):
1✔
139
                        A[key][horizon_idx] = np.concatenate(A[key][horizon_idx])
1✔
140
                        B[key][horizon_idx] = np.concatenate(B[key][horizon_idx])
1✔
141
                    total_losses[key] = np.mean(total_losses[key])
1✔
142

143
            else:
144
                if batch_size is None:
1✔
145
                    batch_size = n_samples
1✔
146

147
                for key, value in self._model_def['Minimizers'].items():
1✔
148
                    total_losses[key], A[key], B[key] = [], [], []
1✔
149

150
                for idx in range(0, (n_samples - batch_size + 1), batch_size):
1✔
151
                    ## Build the input tensor
152
                    XY = {key: val[idx:idx + batch_size] for key, val in data.items()}
1✔
153

154
                    ## Model Forward
155
                    _, minimize_out, _, _ = self._model(XY)  ## Forward pass
1✔
156
                    ## Loss Calculation
157
                    for key, value in self._model_def['Minimizers'].items():
1✔
158
                        A[key].append(minimize_out[value['A']].detach().numpy())
1✔
159
                        B[key].append(minimize_out[value['B']].detach().numpy())
1✔
160
                        loss = losses[key](minimize_out[value['A']], minimize_out[value['B']])
1✔
161
                        loss = (loss * minimize_gain[key]) if key in minimize_gain.keys() else loss
1✔
162
                        total_losses[key].append(loss.detach().numpy())
1✔
163

164
                for key, value in self._model_def['Minimizers'].items():
1✔
165
                    A[key] = np.concatenate(A[key])
1✔
166
                    B[key] = np.concatenate(B[key])
1✔
167
                    total_losses[key] = np.mean(total_losses[key])
1✔
168

169
            for ind, (key, value) in enumerate(self._model_def['Minimizers'].items()):
1✔
170
                A_np = np.array(A[key])
1✔
171
                B_np = np.array(B[key])
1✔
172
                self._performance[dataset][key] = {}
1✔
173
                self._performance[dataset][key][value['loss']] = np.mean(total_losses[key]).item()
1✔
174
                self._performance[dataset][key]['fvu'] = {}
1✔
175
                # Compute FVU
176
                residual = A_np - B_np
1✔
177
                error_var = np.var(residual)
1✔
178
                error_mean = np.mean(residual)
1✔
179
                #error_var_manual = np.sum((residual-error_mean) ** 2) / (len(self._prediction['B'][ind]) - 0)
180
                #print(f"{key} var np:{new_error_var} and var manual:{error_var_manual}")
181
                with warnings.catch_warnings(record=True) as w:
1✔
182
                    self._performance[dataset][key]['fvu']['A'] = (error_var / np.var(A_np)).item()
1✔
183
                    self._performance[dataset][key]['fvu']['B'] = (error_var / np.var(B_np)).item()
1✔
184
                    if w and np.var(A_np) == 0.0 and  np.var(B_np) == 0.0:
1✔
185
                        self._performance[dataset][key]['fvu']['A'] = np.nan
1✔
186
                        self._performance[dataset][key]['fvu']['B'] = np.nan
1✔
187
                self._performance[dataset][key]['fvu']['total'] = np.mean([self._performance[dataset][key]['fvu']['A'],self._performance[dataset][key]['fvu']['B']]).item()
1✔
188
                # Compute AIC
189
                #normal_dist = norm(0, error_var ** 0.5)
190
                #probability_of_residual = normal_dist.pdf(residual)
191
                #log_likelihood_first = sum(np.log(probability_of_residual))
192
                p1 = -len(residual)/2.0*np.log(2*np.pi)
1✔
193
                with warnings.catch_warnings(record=True) as w:
1✔
194
                    p2 = -len(residual)/2.0*np.log(error_var)
1✔
195
                    p3 = -1 / (2.0 * error_var) * np.sum(residual ** 2)
1✔
196
                    if w and p2 == np.float32(np.inf) and p3 == np.float32(-np.inf):
1✔
197
                        p2 = p3 = 0.0
1✔
198
                log_likelihood = p1+p2+p3
1✔
199
                #print(f"{key} log likelihood second mode:{log_likelihood} = {p1}+{p2}+{p3} first mode: {log_likelihood_first}")
200
                total_params = sum(p.numel() for p in self._model.parameters() if p.requires_grad)
1✔
201
                #print(f"{key} total_params:{total_params}")
202
                aic = - 2 * log_likelihood + 2 * total_params
1✔
203
                #print(f"{key} aic:{aic}")
204
                self._performance[dataset][key]['aic'] = {'value':aic,'total_params':total_params,'log_likelihood':log_likelihood}
1✔
205
                # Prediction and target
206
                self._prediction[dataset][key] = {}
1✔
207
                self._prediction[dataset][key]['A'] = A_np.tolist()
1✔
208
                self._prediction[dataset][key]['B'] = B_np.tolist()
1✔
209

210
            ## Remove virtual states
211
            self._removeVirtualStates(connect, closed_loop)
1✔
212

213
            self._performance[dataset]['total'] = {}
1✔
214
            self._performance[dataset]['total']['mean_error'] = np.mean([value for key,value in total_losses.items()])
1✔
215
            self._performance[dataset]['total']['fvu'] = np.mean([self._performance[dataset][key]['fvu']['total'] for key in self._model_def['Minimizers'].keys()])
1✔
216
            self._performance[dataset]['total']['aic'] = np.mean([self._performance[dataset][key]['aic']['value']for key in self._model_def['Minimizers'].keys()])
1✔
217

218
        self.visualizer.showResult(dataset)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc