• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

tonegas / nnodely / 16502811447

24 Jul 2025 04:44PM UTC coverage: 97.767% (+0.1%) from 97.651%
16502811447

push

github

web-flow
New version 1.5.0

This pull request introduces version 1.5.0 of **nnodely**, featuring several updates:
1. Improved clarity of documentation and examples.
2. Support for managing multi-dataset features is now available.
3. DataFrames can now be used to create datasets.
4. Datasets can now be resampled.
5. Random data training has been fixed for both classic and recurrent training.
6. The `state` variable has been removed.
7. It is now possible to add or remove a connection or a closed loop.
8. Partial models can now be exported.
9. The `train` function and the result analysis have been separated.
10. A new function, `trainAndAnalyse`, is now available.
11. The report now works across all network types.
12. The training function code has been reorganized.

2901 of 2967 new or added lines in 53 files covered. (97.78%)

16 existing lines in 6 files now uncovered.

12652 of 12941 relevant lines covered (97.77%)

0.98 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

87.93
/nnodely/support/utils.py
1
import torch, inspect
1✔
2
import types
1✔
3

4
from collections import OrderedDict
1✔
5

6
import numpy as np
1✔
7
from functools import wraps
1✔
8
from typing import get_type_hints
1✔
9
import keyword
1✔
10

11
from nnodely.support.logger import logging, nnLogger
1✔
12
log = nnLogger(__name__, logging.CRITICAL)
1✔
13

14
TORCH_DTYPE = torch.float32
1✔
15
NP_DTYPE = np.float32
1✔
16

17
ForbiddenTags = keyword.kwlist
1✔
18

19
class ReadOnlyDict:
1✔
20
    def __init__(self, data):
1✔
21
        self._data = data
1✔
22

23
    def __getitem__(self, key):
1✔
24
        value = self._data[key]
1✔
25
        if isinstance(value, dict):
1✔
26
            return dict(ReadOnlyDict(value))
1✔
27
        return value
1✔
28

29
    def __len__(self):
1✔
30
        return len(self._data)
1✔
31

32
    def __iter__(self):
1✔
33
        return iter(self._data)
1✔
34

35
    def keys(self):
1✔
36
        return self._data.keys()
1✔
37

38
    def items(self):
1✔
39
        return self._data.items()
1✔
40

41
    def values(self):
1✔
42
        return self._data.values()
×
43

44
    def __repr__(self):
1✔
45
        from pprint import pformat
1✔
46
        return pformat(self._data)
1✔
47

48
    def __or__(self, other):
1✔
UNCOV
49
        if not isinstance(other, ReadOnlyDict):
×
50
            return NotImplemented
×
UNCOV
51
        combined_data = {**self._data, **other._data}
×
UNCOV
52
        return ReadOnlyDict(combined_data)
×
53

54
    def __str__(self):
1✔
NEW
55
        from nnodely.visualizer.emptyvisualizer import color, GREEN
×
56
        from pprint import pformat
×
57
        return color(pformat(self._data), GREEN)
×
58

59
    def __eq__(self, other):
1✔
UNCOV
60
        if not isinstance(other, ReadOnlyDict):
×
NEW
61
            return self._data == other
×
UNCOV
62
        return self._data == other._data
×
63

64
class ParamDict(ReadOnlyDict):
1✔
65
    def __init__(self, data, internal_data = None):
1✔
66
        super().__init__(data)
1✔
67
        self._internal_data = internal_data if internal_data is not None else {}
1✔
68

69
    def __setitem__(self, key, value):
1✔
70
        self._data[key]['values'] = value
1✔
71
        self._internal_data[key] = self._internal_data[key].new_tensor(value)
1✔
72

73
    def __getitem__(self, key):
1✔
74
        value = self._data[key]['values'] if 'values' in self._data[key] else None
1✔
75
        return value
1✔
76

77
def enforce_types(func):
1✔
78
    @wraps(func)
1✔
79
    def wrapper(*args, **kwargs):
1✔
80
        hints = get_type_hints(func)
1✔
81
        all_args = kwargs.copy()
1✔
82

83
        sig = OrderedDict(inspect.signature(func).parameters)
1✔
84
        if len(sig) != len(args):
1✔
85
            var_type = None
1✔
86
            for ind, arg in enumerate(args):
1✔
87
                if ind < len(list(sig.values())) and list(sig.values())[ind].kind == inspect.Parameter.VAR_POSITIONAL:
1✔
88
                    var_name = list(sig.keys())[ind]
1✔
89
                    var_type = sig.pop(var_name)
1✔
90
                if var_type:
1✔
91
                    sig[var_name+str(ind)] = var_type
1✔
92

93
        all_args.update(dict(zip(sig, args)))
1✔
94
        if 'self' in sig.keys():
1✔
95
            sig.pop('self')
1✔
96

97
        for arg_name, arg in all_args.items():
1✔
98
            if (arg_name in hints.keys() or arg_name in sig.keys()) and not isinstance(arg,sig[arg_name].annotation):
1✔
99
                class_name = func.__qualname__.split('.')[0]
1✔
100
                if isinstance(sig[arg_name].annotation, types.UnionType):
1✔
101
                    type_list = [val.__name__ for val in sig[arg_name].annotation.__args__]
1✔
102
                else:
103
                    type_list = sig[arg_name].annotation.__name__
1✔
104
                raise TypeError(
1✔
105
                    f"In Function or Class {class_name} the argument '{arg_name}' to be of type {type_list}, but got {type(arg).__name__}")
106

107
        # for arg, arg_type in hints.items():
108
        #     if arg in all_args and not isinstance(all_args[arg], arg_type):
109
        #         raise TypeError(
110
        #             f"In Function or Class {func} Expected argument '{arg}' to be of type {arg_type}, but got {type(all_args[arg]).__name__}")
111

112
        return func(*args, **kwargs)
1✔
113

114
    return wrapper
1✔
115

116
def is_notebook():
1✔
117
    try:
1✔
118
        from IPython import get_ipython
1✔
NEW
119
        if 'IPKernelApp' in get_ipython().config:
×
NEW
120
            return True  # È un notebook
×
121
    except Exception:
1✔
122
        pass
1✔
123
    return False  # È uno script
1✔
124

125
def tensor_to_list(data):
1✔
126
    if isinstance(data, torch.Tensor):
1✔
127
        # Converte il tensore in una lista
128
        return data.tolist()
1✔
129
    elif isinstance(data, dict):
1✔
130
        # Ricorsione per i dizionari
131
        return {key: tensor_to_list(value) for key, value in data.items()}
1✔
132
    elif isinstance(data, list):
1✔
133
        # Ricorsione per le liste
134
        return [tensor_to_list(item) for item in data]
1✔
135
    elif isinstance(data, tuple):
1✔
136
        # Ricorsione per tuple
137
        return tuple(tensor_to_list(item) for item in data)
×
138
    elif isinstance(data, torch.nn.modules.container.ParameterDict):
1✔
139
        # Ricorsione per parameter dict
140
        return {key: tensor_to_list(value) for key, value in data.items()}
1✔
141
    else:
142
        # Altri tipi di dati rimangono invariati
143
        return data
1✔
144

145
def get_batch_size(n_samples, batch_size = None, predicion_samples = 0):
1✔
146
    batch_size = batch_size if batch_size is not None else n_samples
1✔
147
    predicion_samples = 0 if predicion_samples == -1 else predicion_samples #This value is used to disconnect the connect
1✔
148
    return batch_size if batch_size <= n_samples - predicion_samples else max(0, n_samples - predicion_samples)
1✔
149

150
def check_and_get_list(name_list, available_names, error_fun):
1✔
151
    if type(name_list) is str:
1✔
152
        name_list = [name_list]
1✔
153
    if type(name_list) is list:
1✔
154
        for name in name_list:
1✔
155
            check(name in available_names, IndexError,  error_fun(name))
1✔
156
    return name_list
1✔
157

158
def check(condition, exception, string):
1✔
159
    if not condition:
1✔
160
        raise exception(string)
1✔
161

162
# Function used to verified the number of gradient operations in the graph
163
# def count_gradient_operations(grad_fn):
164
#     count = 0
165
#     if grad_fn is None:
166
#         return count
167
#     nodes = [grad_fn]
168
#     while nodes:
169
#         node = nodes.pop()
170
#         count += 1
171
#         nodes.extend(next_fn[0] for next_fn in node.next_functions if next_fn[0] is not None)
172
#     return count
173

174
# def check_gradient_operations(X:dict):
175
#     count = 0
176
#     for key in X.keys():
177
#         count += count_gradient_operations(X[key].grad_fn)
178
#     return count
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc