• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

tonegas / nnodely / 14319828903

07 Apr 2025 09:27PM UTC coverage: 97.259% (+0.2%) from 97.035%
14319828903

Pull #86

github

web-flow
Merge 44b7c25ee into e9c323c4f
Pull Request #86: Smallclasses

2275 of 2409 new or added lines in 54 files covered. (94.44%)

1 existing line in 1 file now uncovered.

11637 of 11965 relevant lines covered (97.26%)

0.97 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

99.44
/nnodely/operators/network.py
1
import copy, torch
1✔
2

3
import numpy as np
1✔
4

5
from nnodely.basic.modeldef import ModelDef
1✔
6
from nnodely.basic.model import Model
1✔
7
from nnodely.support.utils import check, log, TORCH_DTYPE, NP_DTYPE, argmax_dict, argmin_dict, enforce_types
1✔
8
from nnodely.operators.memory import Memory
1✔
9
from nnodely.basic.relation import Stream
1✔
10
from nnodely.layers.input import State
1✔
11
from nnodely.layers.output import Output
1✔
12

13
class Network(Memory):
1✔
14
    def __init__(self):
1✔
15
        check(type(self) is not Network, TypeError, "Network class cannot be instantiated directly")
1✔
16

17
    def __addInfo(self):
1✔
18
        total_params = sum(p.numel() for p in self._model.parameters() if p.requires_grad)
1✔
19
        self._model_def['Info']['num_parameters'] = total_params
1✔
20
        from nnodely import __version__
1✔
21
        self._model_def['Info']['nnodely_version'] = __version__
1✔
22

23
    @enforce_types
1✔
24
    def addModel(self, name:str, stream_list:list|Output|Stream) -> None:
1✔
25
        """
26
        Adds a new model with the given name along with a list of Outputs.
27

28
        Parameters
29
        ----------
30
        name : str
31
            The name of the model.
32
        stream_list : list of Stream
33
            The list of Outputs stream in the model.
34

35
        Example
36
        -------
37
        Example usage:
38
            >>> model = Modely()
39
            >>> x = Input('x')
40
            >>> out = Output('out', Fir(x.last()))
41
            >>> model.addModel('example_model', [out])
42
        """
43
        try:
1✔
44
            self._model_def.addModel(name, stream_list)
1✔
45
        except Exception as e:
1✔
46
            self._model_def.removeModel(name)
1✔
47
            raise e
1✔
48

49
    @enforce_types
1✔
50
    def removeModel(self, name_list:list) -> None:
1✔
51
        """
52
        Removes models with the given list of names.
53

54
        Parameters
55
        ----------
56
        name_list : list of str
57
            The list of model names to remove.
58

59
        Example
60
        -------
61
        Example usage:
62
            >>> model.removeModel(['sub_model1', 'sub_model2'])
63
        """
NEW
64
        self._model_def.removeModel(name_list)
×
65

66
    @enforce_types
1✔
67
    def addConnect(self, stream_out:Output|Stream, state_list_in:State) -> None:
1✔
68
        """
69
        Adds a connection from a relation stream to an input state.
70

71
        Parameters
72
        ----------
73
        stream_out : Stream
74
            The relation stream to connect from.
75
        state_list_in : State
76
            The states to connect to.
77

78
        Examples
79
        --------
80
        .. image:: https://colab.research.google.com/assets/colab-badge.svg
81
            :target: https://colab.research.google.com/github/tonegas/nnodely/blob/main/examples/states.ipynb
82
            :alt: Open in Colab
83

84
        Example:
85
            >>> model = Modely()
86
            >>> x = Input('x')
87
            >>> y = State('y')
88
            >>> relation = Fir(x.last())
89
            >>> model.addConnect(relation, y)
90
        """
91
        self._model_def.addConnect(stream_out, state_list_in)
1✔
92

93
    @enforce_types
1✔
94
    def addClosedLoop(self, stream_out:Output|Stream, state_list_in:State) -> None:
1✔
95
        """
96
        Adds a closed loop connection from a relation stream to an input state.
97

98
        Parameters
99
        ----------
100
        stream_out : Stream
101
            The relation stream to connect from.
102
        state_list_in : list of State
103
            The list of input states to connect to.
104

105
        Examples
106
        --------
107
        .. image:: https://colab.research.google.com/assets/colab-badge.svg
108
            :target: https://colab.research.google.com/github/tonegas/nnodely/blob/main/examples/states.ipynb
109
            :alt: Open in Colab
110

111
        Example:
112
            >>> model = Modely()
113
            >>> x = Input('x')
114
            >>> y = State('y')
115
            >>> relation = Fir(x.last())
116
            >>> model.addClosedLoop(relation, y)
117
        """
118
        self._model_def.addClosedLoop(stream_out, state_list_in)
1✔
119

120
    @enforce_types
1✔
121
    def neuralizeModel(self, sample_time:float|int|None = None, clear_model:bool = False, model_def:dict|None = None) -> None:
1✔
122
        """
123
        Neuralizes the model, preparing it for inference and training. This method creates a neural network model starting from the model definition.
124
        It will also create all the time windows for the inputs and states.
125

126
        Parameters
127
        ----------
128
        sample_time : float or None, optional
129
            The sample time for the model. Default is None.
130
        clear_model : bool, optional
131
            Whether to clear the existing model definition. Default is False.
132
        model_def : dict or None, optional
133
            A dictionary defining the model. If provided, it overrides the existing model definition. Default is None.
134

135
        Raises
136
        ------
137
        ValueError
138
            If sample_time is not None and model_def is provided.
139
            If clear_model is True and model_def is provided.
140

141
        Example
142
        -------
143
        Example usage:
144
            >>> model = Modely(name='example_model')
145
            >>> model.neuralizeModel(sample_time=0.1, clear_model=True)
146
        """
147
        if model_def is not None:
1✔
148
            check(sample_time == None, ValueError, 'The sample_time must be None if a model_def is provided')
1✔
149
            check(clear_model == False, ValueError, 'The clear_model must be False if a model_def is provided')
1✔
150
            self._model_def = ModelDef(model_def)
1✔
151
        else:
152
            if clear_model:
1✔
153
                self._model_def.update()
1✔
154
            else:
155
                self._model_def.updateParameters(self._model)
1✔
156

157
        for key, state in self._model_def['States'].items():
1✔
158
            check("connect" in state.keys() or  'closedLoop' in state.keys(), KeyError, f'The connect or closed loop missing for state "{key}"')
1✔
159

160
        self._model_def.setBuildWindow(sample_time)
1✔
161
        self._model = Model(self._model_def.getJson())
1✔
162
        self.__addInfo()
1✔
163

164
        self._input_ns_backward = {key:value['ns'][0] for key, value in (self._model_def['Inputs']|self._model_def['States']).items()}
1✔
165
        self._input_ns_forward = {key:value['ns'][1] for key, value in (self._model_def['Inputs']|self._model_def['States']).items()}
1✔
166
        self._max_samples_backward = max(self._input_ns_backward.values())
1✔
167
        self._max_samples_forward = max(self._input_ns_forward.values())
1✔
168
        self._input_n_samples = {}
1✔
169
        for key, value in (self._model_def['Inputs'] | self._model_def['States']).items():
1✔
170
            self._input_n_samples[key] = self._input_ns_backward[key] + self._input_ns_forward[key]
1✔
171
        self._max_n_samples = max(self._input_ns_backward.values()) + max(self._input_ns_forward.values())
1✔
172

173
        ## Initialize States
174
        self.resetStates()
1✔
175

176
        self._neuralized = True
1✔
177
        self._traced = False
1✔
178
        self.visualizer.showModel(self._model_def.getJson())
1✔
179
        self.visualizer.showModelInputWindow()
1✔
180
        self.visualizer.showBuiltModel()
1✔
181

182
    @enforce_types
1✔
183
    def __call__(self, inputs:dict={}, sampled:bool=False, closed_loop:dict={}, connect:dict={}, prediction_samples:str|int|None='auto',
1✔
184
                 num_of_samples:int|None=None) -> dict:  ##, align_input=False):
185
        """
186
        Performs inference on the model.
187

188
        Parameters
189
        ----------
190
        inputs : dict, optional
191
            A dictionary of input data. The keys are input names and the values are the corresponding data. Default is an empty dictionary.
192
        sampled : bool, optional
193
            A boolean indicating whether the inputs are already sampled. Default is False.
194
        closed_loop : dict, optional
195
            A dictionary specifying closed loop connections. The keys are input names and the values are output names. Default is an empty dictionary.
196
        connect : dict, optional
197
            A dictionary specifying connections. The keys are input names and the values are output names. Default is an empty dictionary.
198
        prediction_samples : str or int, optional
199
            The number of prediction samples. Can be 'auto', None or an integer. Default is 'auto'.
200
        num_of_samples : str or int, optional
201
            The number of samples. Can be 'auto', None or an integer. Default is 'auto'.
202

203
        Returns
204
        -------
205
        dict
206
            A dictionary containing the model's prediction outputs.
207

208
        Raises
209
        ------
210
        RuntimeError
211
            If the network is not neuralized.
212
        ValueError
213
            If an input variable is not in the model definition or if an output variable is not in the model definition.
214

215
        Examples
216
        --------
217
        .. image:: https://colab.research.google.com/assets/colab-badge.svg
218
            :target: https://colab.research.google.com/github/tonegas/nnodely/blob/main/examples/inference.ipynb
219
            :alt: Open in Colab
220

221
        Example usage:
222
            >>> model = Modely()
223
            >>> x = Input('x')
224
            >>> out = Output('out', Fir(x.last()))
225
            >>> model.addModel('example_model', [out])
226
            >>> model.neuralizeModel()
227
            >>> predictions = model(inputs={'x': [1, 2, 3]})
228
        """
229

230
        ## Copy dict for avoid python bug
231
        inputs = copy.deepcopy(inputs)
1✔
232
        closed_loop = copy.deepcopy(closed_loop)
1✔
233
        connect = copy.deepcopy(connect)
1✔
234

235
        ## Check neuralize
236
        check(self.neuralized, RuntimeError, "The network is not neuralized.")
1✔
237

238
        ## Check closed loop integrity
239
        for close_in, close_out in (closed_loop | connect).items():
1✔
240
            check(close_in in self._model_def['Inputs'], ValueError, f'the tag "{close_in}" is not an input variable.')
1✔
241
            check(close_out in self._model_def['Outputs'], ValueError,
1✔
242
                  f'the tag "{close_out}" is not an output of the network')
243

244
        ## List of keys
245
        model_inputs = list(self._model_def['Inputs'].keys())
1✔
246
        model_states = list(self._model_def['States'].keys())
1✔
247
        json_inputs = self._model_def['Inputs'] | self._model_def['States']
1✔
248
        state_closed_loop = [key for key, value in self._model_def['States'].items() if
1✔
249
                             'closedLoop' in value.keys()] + list(closed_loop.keys())
250
        state_connect = [key for key, value in self._model_def['States'].items() if 'connect' in value.keys()] + list(
1✔
251
            connect.keys())
252
        extra_inputs = list(set(list(inputs.keys())) - set(model_inputs) - set(model_states))
1✔
253
        non_mandatory_inputs = state_closed_loop + state_connect
1✔
254
        mandatory_inputs = list(set(model_inputs) - set(non_mandatory_inputs))
1✔
255

256
        ## Remove extra inputs
257
        for key in extra_inputs:
1✔
258
            log.warning(
1✔
259
                f'The provided input {key} is not used inside the network. the inference will continue without using it')
260
            del inputs[key]
1✔
261

262
        ## Get the number of data windows for each input/state
263
        num_of_windows = {key: len(value) for key, value in inputs.items()} if sampled else {
1✔
264
            key: len(value) - self._input_n_samples[key] + 1 for key, value in inputs.items()}
265

266
        ## Get the maximum inference window
267
        if num_of_samples:
1✔
268
            window_dim = num_of_samples
1✔
269
            for key in inputs.keys():
1✔
270
                input_dim = self._model_def['Inputs'][key]['dim'] if key in model_inputs else \
1✔
271
                self._model_def['States'][key]['dim']
272
                new_samples = num_of_samples - (len(inputs[key]) - self._input_n_samples[key] + 1)
1✔
273
                if input_dim > 1:
1✔
274
                    log.warning(f'The variable {key} is filled with {new_samples} samples equal to zeros.')
1✔
275
                    inputs[key] += [[0 for _ in range(input_dim)] for _ in range(new_samples)]
1✔
276
                else:
277
                    log.warning(f'The variable {key} is filled with {new_samples} samples equal to zeros.')
1✔
278
                    inputs[key] += [0 for _ in range(new_samples)]
1✔
279
        elif inputs:
1✔
280
            windows = []
1✔
281
            for key in inputs.keys():
1✔
282
                if key in mandatory_inputs:
1✔
283
                    n_samples = len(inputs[key]) if sampled else len(inputs[key]) - self._model_def['Inputs'][key][
1✔
284
                        'ntot'] + 1
285
                    windows.append(n_samples)
1✔
286
            if not windows:
1✔
287
                for key in inputs.keys():
1✔
288
                    if key in non_mandatory_inputs:
1✔
289
                        if key in model_inputs:
1✔
290
                            n_samples = len(inputs[key]) if sampled else len(inputs[key]) - \
1✔
291
                                                                         self._model_def['Inputs'][key]['ntot'] + 1
292
                        else:
293
                            n_samples = len(inputs[key]) if sampled else len(inputs[key]) - \
1✔
294
                                                                         self._model_def['States'][key]['ntot'] + 1
295
                        windows.append(n_samples)
1✔
296
            window_dim = min(windows) if windows else 0
1✔
297
        else:  ## No inputs
298
            window_dim = 1 if non_mandatory_inputs else 0
1✔
299
        check(window_dim > 0, StopIteration, f'Missing samples in the input window')
1✔
300

301
        if len(set(num_of_windows.values())) > 1:
1✔
302
            max_ind_key, max_dim = argmax_dict(num_of_windows)
1✔
303
            min_ind_key, min_dim = argmin_dict(num_of_windows)
1✔
304
            log.warning(
1✔
305
                f'Different number of samples between inputs [MAX {num_of_windows[max_ind_key]} = {max_dim}; MIN {num_of_windows[min_ind_key]} = {min_dim}]')
306

307
        ## Autofill the missing inputs
308
        provided_inputs = list(inputs.keys())
1✔
309
        missing_inputs = list(set(mandatory_inputs) - set(provided_inputs))
1✔
310
        if missing_inputs:
1✔
311
            log.warning(f'Inputs not provided: {missing_inputs}. Autofilling with zeros..')
1✔
312
            for key in missing_inputs:
1✔
313
                inputs[key] = np.zeros(
1✔
314
                    shape=(self._input_n_samples[key] + window_dim - 1, self._model_def['Inputs'][key]['dim']),
315
                    dtype=NP_DTYPE).tolist()
316

317
        ## Transform inputs in 3D Tensors
318
        for key in inputs.keys():
1✔
319
            input_dim = json_inputs[key]['dim']
1✔
320
            inputs[key] = torch.from_numpy(np.array(inputs[key])).to(TORCH_DTYPE)
1✔
321

322
            if input_dim > 1:
1✔
323
                correct_dim = 3 if sampled else 2
1✔
324
                check(len(inputs[key].shape) == correct_dim, ValueError,
1✔
325
                      f'The input {key} must have {correct_dim} dimensions')
326
                check(inputs[key].shape[correct_dim - 1] == input_dim, ValueError,
1✔
327
                      f'The second dimension of the input "{key}" must be equal to {input_dim}')
328

329
            if input_dim == 1 and inputs[key].shape[-1] != 1:  ## add the input dimension
1✔
330
                inputs[key] = inputs[key].unsqueeze(-1)
1✔
331
            if inputs[key].ndim <= 1:  ## add the batch dimension
1✔
332
                inputs[key] = inputs[key].unsqueeze(0)
1✔
333
            if inputs[key].ndim <= 2:  ## add the time dimension
1✔
334
                inputs[key] = inputs[key].unsqueeze(0)
1✔
335

336
        ## initialize the resulting dictionary
337
        result_dict = {}
1✔
338
        for key in self._model_def['Outputs'].keys():
1✔
339
            result_dict[key] = []
1✔
340

341
        ## Inference
342
        calculate_grad = False
1✔
343
        for key, value in json_inputs.items():
1✔
344
            if 'type' in value.keys():
1✔
345
                calculate_grad = True
1✔
346
                break
1✔
347
        with torch.enable_grad() if calculate_grad else torch.inference_mode():
1✔
348
            ## Update with virtual states
349
            if prediction_samples is not None:
1✔
350
                self._model.update(closed_loop=closed_loop, connect=connect)
1✔
351
            else:
352
                prediction_samples = 0
1✔
353
            X = {}
1✔
354
            count = 0
1✔
355
            first = True
1✔
356
            for idx in range(window_dim):
1✔
357
                ## Get mandatory data inputs
358
                for key in mandatory_inputs:
1✔
359
                    X[key] = inputs[key][idx:idx + 1] if sampled else inputs[key][:,
1✔
360
                                                                      idx:idx + self._input_n_samples[key]]
361
                    if 'type' in json_inputs[key].keys():
1✔
362
                        X[key] = X[key].requires_grad_(True)
1✔
363
                ## reset states
364
                if count == 0 or prediction_samples == 'auto':
1✔
365
                    count = prediction_samples
1✔
366
                    for key in non_mandatory_inputs:  ## Get non mandatory data (from inputs, from states, or with zeros)
1✔
367
                        ## if prediction_samples is 'auto' and i have enough samples
368
                        ## if prediction_samples is NOT 'auto' but i have enough extended window (with zeros)
369
                        if (key in inputs.keys() and prediction_samples == 'auto' and idx < num_of_windows[key]) or (
1✔
370
                                key in inputs.keys() and prediction_samples != 'auto' and idx < inputs[key].shape[1]):
371
                            X[key] = inputs[key][idx:idx + 1] if sampled else inputs[key][:,
1✔
372
                                                                              idx:idx + self._input_n_samples[key]]
373
                        ## if im in the first reset
374
                        ## if i have a state in memory
375
                        ## if i have prediction_samples = 'auto' and not enough samples
376
                        elif (key in self._states.keys() and (first or prediction_samples == 'auto')) and (
1✔
377
                                prediction_samples == 'auto' or prediction_samples == None):
378
                            X[key] = self._states[key]
1✔
379
                        else:  ## if i have no samples and no states
380
                            window_size = self._input_n_samples[key]
1✔
381
                            dim = json_inputs[key]['dim']
1✔
382
                            X[key] = torch.zeros(size=(1, window_size, dim), dtype=TORCH_DTYPE, requires_grad=False)
1✔
383
                            self._states[key] = X[key]
1✔
384
                        if 'type' in json_inputs[key].keys():
1✔
385
                            X[key] = X[key].requires_grad_(True)
1✔
386
                    first = False
1✔
387
                else:
388
                    # Remove the gradient of the previous forward
389
                    for key in X.keys():
1✔
390
                        if 'type' in json_inputs[key].keys():
1✔
391
                            X[key] = X[key].detach().requires_grad_(True)
1✔
392
                    count -= 1
1✔
393
                ## Forward pass
394
                result, _, out_closed_loop, out_connect = self._model(X)
1✔
395

396
                ## Append the prediction of the current sample to the result dictionary
397
                for key in self._model_def['Outputs'].keys():
1✔
398
                    if result[key].shape[-1] == 1:
1✔
399
                        result[key] = result[key].squeeze(-1)
1✔
400
                        if result[key].shape[-1] == 1:
1✔
401
                            result[key] = result[key].squeeze(-1)
1✔
402
                    result_dict[key].append(result[key].detach().squeeze(dim=0).tolist())
1✔
403

404
                ## Update closed_loop and connect
405
                if prediction_samples:
1✔
406
                    self._updateState(X, out_closed_loop, out_connect)
1✔
407

408
        ## Remove virtual states
409
        self._removeVirtualStates(connect, closed_loop)
1✔
410

411
        return result_dict
1✔
412

413

STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc