• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

tonegas / nnodely / 20071179148

09 Dec 2025 04:40PM UTC coverage: 96.588% (-1.2%) from 97.767%
20071179148

Pull #109

github

tonegas
Edits of the README
Pull Request #109: New version of nnodely

813 of 858 new or added lines in 37 files covered. (94.76%)

153 existing lines in 4 files now uncovered.

13021 of 13481 relevant lines covered (96.59%)

0.97 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

72.16
/nnodely/visualizer/mplvisualizer.py
1
import subprocess, json, os, importlib
1✔
2
import numpy as np
1✔
3

4
from nnodely.visualizer.textvisualizer import TextVisualizer
1✔
5
from nnodely.layers.fuzzify import return_fuzzify
1✔
6
from nnodely.layers.parametricfunction import return_standard_inputs, return_function
1✔
7
from nnodely.support.utils import check
1✔
8
from nnodely.basic.modeldef import ModelDef
1✔
9

10
from nnodely.support.logger import logging, nnLogger
1✔
11
log = nnLogger(__name__, logging.INFO)
1✔
12

13
def get_library_path(library_name):
1✔
14
    spec = importlib.util.find_spec(library_name)
1✔
15
    if spec is None:
1✔
16
        raise ImportError(f"Library {library_name} not found")
×
17
    return os.path.dirname(spec.origin)
1✔
18

19
class MPLVisualizer(TextVisualizer):
1✔
20
    def __init__(self, verbose = 1):
1✔
21
        super().__init__(verbose)
1✔
22
        # Path to the data visualizer script
23
        import signal
1✔
24
        import sys
1✔
25
        get_library_path('nnodely')
1✔
26
        self.__training_visualizer_script = os.path.join(get_library_path('nnodely'),'visualizer','dynamicmpl','trainingplot.py')
1✔
27
        self.__time_series_visualizer_script = os.path.join(get_library_path('nnodely'),'visualizer','dynamicmpl','resultsplot.py')
1✔
28
        self.__fuzzy_visualizer_script = os.path.join(get_library_path('nnodely'),'visualizer','dynamicmpl','fuzzyplot.py')
1✔
29
        self.__function_visualizer_script = os.path.join(get_library_path('nnodely'),'visualizer','dynamicmpl','functionplot.py')
1✔
30
        self.__process_training = {}
1✔
31
        self.__process_results = {}
1✔
32
        self.__process_function = {}
1✔
33
        def signal_handler(sig, frame):
1✔
34
            for key in self.__process_training.keys():
×
35
                self.__process_training[key].terminate()
×
36
                self.__process_training[key].wait()
×
37
            for name_data in self.__process_results.keys():
×
38
                for key in self.__process_results[name_data].keys():
×
39
                    self.__process_results[name_data][key].terminate()
×
40
                    self.__process_results[name_data][key].wait()
×
41
            self.__process_results = {}
×
42
            for key in self.__process_function.keys():
×
43
                self.__process_function[key].terminate()
×
44
                self.__process_functios[key].wait()
×
45
            sys.exit()
×
46

47
        signal.signal(signal.SIGINT, signal_handler)
1✔
48

49
    def showStartTraining(self):
1✔
50
        pass
1✔
51

52
    def showTraining(self, epoch, train_losses, val_losses):
1✔
53
        if epoch == 0:
1✔
54
            for key in self.__process_training.keys():
1✔
55
                if self.__process_training[key].poll() is None:
1✔
56
                    self.__process_training[key].terminate()
1✔
57
                    self.__process_training[key].wait()
1✔
58
                self.__process_training[key] = {}
1✔
59

60
            self.__process_training = {}
1✔
61
            for key in self.modely._model_def['Minimizers'].keys():
1✔
62
                self.__process_training[key] = subprocess.Popen(['python', self.__training_visualizer_script], stdin=subprocess.PIPE, text=True)
1✔
63

64
        num_of_epochs = self.modely.running_parameters['num_of_epochs']
1✔
65
        train_tag = self.modely.running_parameters['train_tag']
1✔
66
        val_tag = self.modely.running_parameters['val_tag']
1✔
67
        if epoch+1 <= num_of_epochs:
1✔
68
            for key in self.modely._model_def['Minimizers'].keys():
1✔
69
                if val_losses:
1✔
70
                    val_loss = val_losses[key][epoch]
×
NEW
71
                    title = f"Training on {train_tag} and {val_tag}"
×
72
                else:
73
                    val_loss = []
1✔
74
                    title = f"Training on {train_tag}"
1✔
75
                data = {"title":title, "key": key, "last": num_of_epochs - (epoch + 1), "epoch": epoch,
1✔
76
                        "train_losses": train_losses[key][epoch], "val_losses": val_loss}
77
                try:
1✔
78
                    # Send data to the visualizer process
79
                    self.__process_training[key].stdin.write(f"{json.dumps(data)}\n")
1✔
80
                    self.__process_training[key].stdin.flush()
1✔
81
                except BrokenPipeError:
×
82
                    self.closeTraining()
×
83
                    log.warning("The visualizer process has been closed.")
×
84

85
        if epoch+1 == num_of_epochs:
1✔
86
            for key in self.modely._model_def['Minimizers'].keys():
1✔
87
                self.__process_training[key].stdin.close()
1✔
88

89
    def showResult(self, name_data):
1✔
90
        super().showResult(name_data)
1✔
91
        check(name_data in self.modely.performance, ValueError, f"Results not available for {name_data}.")
1✔
92
        if name_data in self.__process_results:
1✔
93
            for key in self.modely._model_def['Minimizers'].keys():
1✔
94
                if key in self.__process_results[name_data] and self.__process_results[name_data][key].poll() is None:
1✔
95
                    self.__process_results[name_data][key].terminate()
1✔
96
                    self.__process_results[name_data][key].wait()
1✔
97
                self.__process_results[name_data][key] = None
1✔
98
        self.__process_results[name_data] = {}
1✔
99

100
        for key in self.modely._model_def['Minimizers'].keys():
1✔
101
            # Start the data visualizer process
102
            self.__process_results[name_data][key] = subprocess.Popen(['python', self.__time_series_visualizer_script], stdin=subprocess.PIPE,
1✔
103
                                                    text=True)
104
            np_data_A = np.array(self.modely.prediction[name_data][key]['A'])
1✔
105
            if len(np_data_A.shape) > 3 and np_data_A.shape[1] > 30:
1✔
NEW
106
                np_data_B = np.array(self.modely.prediction[name_data][key]['B'])
×
NEW
107
                indices = np.linspace(0, np_data_A.shape[1] - 1, 30, dtype=int)
×
NEW
108
                data_A = np_data_A[:, indices, :, :].tolist()
×
NEW
109
                data_B = np_data_B[:, indices, :, :].tolist()
×
NEW
110
                data_idxs = np.array(self.modely.prediction[name_data]['idxs'])[:,indices].tolist()
×
111
            else:
112
                data_A = self.modely.prediction[name_data][key]['A']
1✔
113
                data_B = self.modely.prediction[name_data][key]['B']
1✔
114
                data_idxs = self.modely.prediction[name_data]['idxs'] if len(np_data_A.shape) > 3 else None
1✔
115

116
            data = {"name_data": name_data,
1✔
117
                    "key": key,
118
                    "performance": self.modely.performance[name_data][key],
119
                    "prediction_A": data_A,
120
                    "prediction_B": data_B,
121
                    "data_idxs": data_idxs,
122
                    "sample_time": self.modely._model_def['Info']["SampleTime"]}
123
            try:
1✔
124
                # Send data to the visualizer process
125
                self.__process_results[name_data][key].stdin.write(f"{json.dumps(data)}\n")
1✔
126
                self.__process_results[name_data][key].stdin.flush()
1✔
127
                self.__process_results[name_data][key].stdin.close()
1✔
128
            except BrokenPipeError:
×
129
                self.closeResult(self, name_data)
×
130
                log.warning(f"The visualizer {name_data} process has been closed.")
×
131

132
    def showWeights(self, weights = None):
1✔
133
        pass
×
134

135
    def showFunctions(self, functions = None, xlim = None, num_points = 1000):
1✔
136
        check(self.modely.neuralized, ValueError, "The model has not been neuralized.")
1✔
137
        for key, value in self.modely._model_def['Functions'].items():
1✔
138
            if key in functions:
1✔
139
                if key in self.__process_function and self.__process_function[key].poll() is None:
1✔
140
                    self.__process_function[key].terminate()
×
141
                    self.__process_function[key].wait()
×
142

143
                if 'functions' in self.modely._model_def['Functions'][key]:
1✔
144
                    x, activ_fun = return_fuzzify(value, xlim, num_points)
1✔
145
                    data = {"name": key,
1✔
146
                            "x": x,
147
                            "y": activ_fun,
148
                            "chan_centers": value['centers']}
149
                    # Start the data visualizer process
150
                    self.__process_function[key] = subprocess.Popen(['python', self.__fuzzy_visualizer_script],
1✔
151
                                                                  stdin=subprocess.PIPE,
152
                                                                  text=True)
153
                elif 'code':
1✔
154
                    model_def = ModelDef(self.modely._model_def)
1✔
155
                    model_def.updateParameters(self.modely._model)
1✔
156
                    function_inputs = return_standard_inputs(value, model_def, xlim, num_points)
1✔
157
                    function_output, function_input_list = return_function(value, function_inputs)
1✔
158

159
                    data = {"name": key}
1✔
160
                    if value['n_input'] == 2:
1✔
161
                        data['x0'] = function_inputs[0].reshape(num_points, num_points).tolist()
1✔
162
                        data['x1'] = function_inputs[1].reshape(num_points, num_points).tolist()
1✔
163
                        data['output'] = function_output.reshape(num_points, num_points).tolist()
1✔
164
                    else:
165
                        data['x0'] = function_inputs[0].reshape(num_points).tolist()
×
166
                        data['output'] = function_output.reshape(num_points).tolist()
×
167
                    data['params'] = []
1✔
168
                    for i, key in enumerate(value['params_and_consts']):
1✔
169
                        data['params'] += [function_inputs[i+value['n_input']].tolist()]
1✔
170
                    data['input_names'] = function_input_list
1✔
171

172
                    # Start the data visualizer process
173
                    self.__process_function[key] = subprocess.Popen(['python', self.__function_visualizer_script],
1✔
174
                                                                  stdin=subprocess.PIPE,
175
                                                                  text=True)
176
                try:
1✔
177
                    # Send data to the visualizer process
178
                    self.__process_function[key].stdin.write(f"{json.dumps(data)}\n")
1✔
179
                    self.__process_function[key].stdin.flush()
1✔
180
                    self.__process_function[key].stdin.close()
1✔
181
                except BrokenPipeError:
×
182
                    self.closeFunctions()
×
183
                    log.warning(f"The visualizer {functions} process has been closed.")
×
184

185
    def closeFunctions(self, functions = None):
1✔
186
        if functions is None:
1✔
187
            for key in self.__process_function.keys():
1✔
188
                self.__process_function[key].terminate()
1✔
189
                self.__process_function[key].wait()
1✔
190
            self.__process_function = {}
1✔
191
        else:
192
            for key in functions:
×
193
                self.__process_function[key].terminate()
×
194
                self.__process_function[key].wait()
×
195
                self.__process_function.pop(key)
×
196

197
    def closeTraining(self, minimizer = None):
1✔
198
        if minimizer is None:
1✔
199
            for key in self.modely._model_def['Minimizers'].keys():
1✔
200
                if key in self.__process_training and self.__process_training[key].poll() is None:
1✔
201
                    self.__process_training[key].terminate()
1✔
202
                    self.__process_training[key].wait()
1✔
203
                self.__process_training[key] = {}
1✔
204
        else:
205
            self.__process_training[minimizer].terminate()
×
206
            self.__process_training[minimizer].wait()
×
207
            self.__process_training.pop(minimizer)
×
208

209
    def closeResult(self, name_data = None, minimizer = None):
1✔
210
        if name_data is None:
1✔
211
            check(minimizer is None, ValueError, "If name_data is None, minimizer must be None.")
1✔
212
            for name_data in self.__process_results.keys():
1✔
213
                for key in self.__process_results[name_data].keys():
1✔
214
                    self.__process_results[name_data][key].terminate()
1✔
215
                    self.__process_results[name_data][key].wait()
1✔
216
            self.__process_results = {}
1✔
217
        else:
218
            if minimizer is None:
×
219
                for key in self.__process_results[name_data].keys():
×
220
                    self.__process_results[name_data][key].terminate()
×
221
                    self.__process_results[name_data][key].wait()
×
222
                self.__process_results[name_data] = {}
×
223
            else:
224
                self.__process_results[name_data][minimizer].terminate()
×
225
                self.__process_results[name_data][minimizer].wait()
×
226
                self.__process_results[name_data].pop(minimizer)
×
227

228

229

230

231

STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc