• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

tonegas / nnodely / 20078894364

09 Dec 2025 09:20PM UTC coverage: 98.1% (-0.01%) from 98.114%
20078894364

push

github

tonegas
Change backend for windows test

13267 of 13524 relevant lines covered (98.1%)

0.98 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

74.01
/nnodely/visualizer/mplvisualizer.py
1
import subprocess, json, os, importlib
1✔
2
import numpy as np
1✔
3

4
from nnodely.visualizer.textvisualizer import TextVisualizer
1✔
5
from nnodely.layers.fuzzify import return_fuzzify
1✔
6
from nnodely.layers.parametricfunction import return_standard_inputs, return_function
1✔
7
from nnodely.support.utils import check
1✔
8
from nnodely.basic.modeldef import ModelDef
1✔
9

10
from nnodely.support.logger import logging, nnLogger
1✔
11
log = nnLogger(__name__, logging.INFO)
1✔
12

13
def get_library_path(library_name):
1✔
14
    spec = importlib.util.find_spec(library_name)
1✔
15
    if spec is None:
1✔
16
        raise ImportError(f"Library {library_name} not found")
×
17
    return os.path.dirname(spec.origin)
1✔
18

19
class MPLVisualizer(TextVisualizer):
1✔
20
    def __init__(self, verbose = 1):
1✔
21
        super().__init__(verbose)
1✔
22
        # Path to the data visualizer script
23
        import signal
1✔
24
        import sys
1✔
25
        get_library_path('nnodely')
1✔
26
        self.__training_visualizer_script = os.path.join(get_library_path('nnodely'),'visualizer','dynamicmpl','trainingplot.py')
1✔
27
        self.__time_series_visualizer_script = os.path.join(get_library_path('nnodely'),'visualizer','dynamicmpl','resultsplot.py')
1✔
28
        self.__fuzzy_visualizer_script = os.path.join(get_library_path('nnodely'),'visualizer','dynamicmpl','fuzzyplot.py')
1✔
29
        self.__function_visualizer_script = os.path.join(get_library_path('nnodely'),'visualizer','dynamicmpl','functionplot.py')
1✔
30
        self.__process_training = {}
1✔
31
        self.__process_results = {}
1✔
32
        self.__process_function = {}
1✔
33
        def signal_handler(sig, frame):
1✔
34
            for key in self.__process_training.keys():
×
35
                self.__process_training[key].terminate()
×
36
                self.__process_training[key].wait()
×
37
            for name_data in self.__process_results.keys():
×
38
                for key in self.__process_results[name_data].keys():
×
39
                    self.__process_results[name_data][key].terminate()
×
40
                    self.__process_results[name_data][key].wait()
×
41
            self.__process_results = {}
×
42
            for key in self.__process_function.keys():
×
43
                self.__process_function[key].terminate()
×
44
                self.__process_functios[key].wait()
×
45
            sys.exit()
×
46

47
        signal.signal(signal.SIGINT, signal_handler)
1✔
48

49
    def showStartTraining(self):
1✔
50
        pass
1✔
51

52
    def showTraining(self, epoch, train_losses, val_losses):
1✔
53
        if epoch == 0:
1✔
54
            for key in self.__process_training.keys():
1✔
55
                if self.__process_training[key] is not None and self.__process_training[key].poll() is None:
1✔
56
                    self.__process_training[key].terminate()
×
57
                    self.__process_training[key].wait()
×
58
                self.__process_training[key] = None
1✔
59

60
            self.__process_training = {}
1✔
61
            for key in self.modely._model_def['Minimizers'].keys():
1✔
62
                self.__process_training[key] = subprocess.Popen(['python', self.__training_visualizer_script], stdin=subprocess.PIPE, text=True)
1✔
63

64
        num_of_epochs = self.modely.running_parameters['num_of_epochs']
1✔
65
        train_tag = self.modely.running_parameters['train_tag']
1✔
66
        val_tag = self.modely.running_parameters['val_tag']
1✔
67
        if epoch+1 <= num_of_epochs:
1✔
68
            for key in self.modely._model_def['Minimizers'].keys():
1✔
69
                if val_losses:
1✔
70
                    val_loss = val_losses[key][epoch]
1✔
71
                    title = f"Training on {train_tag} and {val_tag}"
1✔
72
                else:
73
                    val_loss = []
1✔
74
                    title = f"Training on {train_tag}"
1✔
75
                data = {"title":title, "key": key, "last": num_of_epochs - (epoch + 1), "epoch": epoch,
1✔
76
                        "train_losses": train_losses[key][epoch], "val_losses": val_loss}
77
                try:
1✔
78
                    # Send data to the visualizer process
79
                    self.__process_training[key].stdin.write(f"{json.dumps(data)}\n")
1✔
80
                    self.__process_training[key].stdin.flush()
1✔
81
                except:
1✔
82
                    self.closeTraining()
1✔
83
                    log.warning("The visualizer process has been closed.")
1✔
84

85
        if epoch+1 == num_of_epochs:
1✔
86
            for key in self.modely._model_def['Minimizers'].keys():
1✔
87
                if self.__process_training[key] is not None:
1✔
88
                    self.__process_training[key].stdin.close()
×
89

90
    def showResult(self, name_data):
1✔
91
        super().showResult(name_data)
1✔
92
        check(name_data in self.modely.performance, ValueError, f"Results not available for {name_data}.")
1✔
93
        if name_data in self.__process_results:
1✔
94
            for key in self.modely._model_def['Minimizers'].keys():
1✔
95
                if key in self.__process_results[name_data] and self.__process_results[name_data][key].poll() is None:
1✔
96
                    self.__process_results[name_data][key].terminate()
×
97
                    self.__process_results[name_data][key].wait()
×
98
                self.__process_results[name_data][key] = None
1✔
99
        self.__process_results[name_data] = {}
1✔
100

101
        for key in self.modely._model_def['Minimizers'].keys():
1✔
102
            # Start the data visualizer process
103
            self.__process_results[name_data][key] = subprocess.Popen(['python', self.__time_series_visualizer_script], stdin=subprocess.PIPE,
1✔
104
                                                    text=True)
105
            np_data_A = np.array(self.modely.prediction[name_data][key]['A'])
1✔
106
            if len(np_data_A.shape) > 3 and np_data_A.shape[1] > 30:
1✔
107
                np_data_B = np.array(self.modely.prediction[name_data][key]['B'])
1✔
108
                indices = np.linspace(0, np_data_A.shape[1] - 1, 30, dtype=int)
1✔
109
                data_A = np_data_A[:, indices, :, :].tolist()
1✔
110
                data_B = np_data_B[:, indices, :, :].tolist()
1✔
111
                data_idxs = np.array(self.modely.prediction[name_data]['idxs'])[:,indices].tolist()
1✔
112
            else:
113
                data_A = self.modely.prediction[name_data][key]['A']
1✔
114
                data_B = self.modely.prediction[name_data][key]['B']
1✔
115
                data_idxs = self.modely.prediction[name_data]['idxs'] if len(np_data_A.shape) > 3 else None
1✔
116

117
            data = {"name_data": name_data,
1✔
118
                    "key": key,
119
                    "performance": self.modely.performance[name_data][key],
120
                    "prediction_A": data_A,
121
                    "prediction_B": data_B,
122
                    "data_idxs": data_idxs,
123
                    "sample_time": self.modely._model_def['Info']["SampleTime"]}
124
            try:
1✔
125
                # Send data to the visualizer process
126
                self.__process_results[name_data][key].stdin.write(f"{json.dumps(data)}\n")
1✔
127
                self.__process_results[name_data][key].stdin.flush()
1✔
128
                self.__process_results[name_data][key].stdin.close()
1✔
129
            except:
×
130
                self.closeResult(self, name_data)
×
131
                log.warning(f"The visualizer {name_data} process has been closed.")
×
132

133
    def showWeights(self, weights = None):
1✔
134
        pass
×
135

136
    def showFunctions(self, functions = None, xlim = None, num_points = 1000):
1✔
137
        check(self.modely.neuralized, ValueError, "The model has not been neuralized.")
1✔
138
        for key, value in self.modely._model_def['Functions'].items():
1✔
139
            if key in functions:
1✔
140
                if key in self.__process_function and self.__process_function[key].poll() is None:
1✔
141
                    self.__process_function[key].terminate()
×
142
                    self.__process_function[key].wait()
×
143

144
                if 'functions' in self.modely._model_def['Functions'][key]:
1✔
145
                    x, activ_fun = return_fuzzify(value, xlim, num_points)
1✔
146
                    data = {"name": key,
1✔
147
                            "x": x,
148
                            "y": activ_fun,
149
                            "chan_centers": value['centers']}
150
                    # Start the data visualizer process
151
                    self.__process_function[key] = subprocess.Popen(['python', self.__fuzzy_visualizer_script],
1✔
152
                                                                  stdin=subprocess.PIPE,
153
                                                                  text=True)
154
                elif 'code':
1✔
155
                    model_def = ModelDef(self.modely._model_def)
1✔
156
                    model_def.updateParameters(self.modely._model)
1✔
157
                    function_inputs = return_standard_inputs(value, model_def, xlim, num_points)
1✔
158
                    function_output, function_input_list = return_function(value, function_inputs)
1✔
159

160
                    data = {"name": key}
1✔
161
                    if value['n_input'] == 2:
1✔
162
                        data['x0'] = function_inputs[0].reshape(num_points, num_points).tolist()
1✔
163
                        data['x1'] = function_inputs[1].reshape(num_points, num_points).tolist()
1✔
164
                        data['output'] = function_output.reshape(num_points, num_points).tolist()
1✔
165
                    else:
166
                        data['x0'] = function_inputs[0].reshape(num_points).tolist()
×
167
                        data['output'] = function_output.reshape(num_points).tolist()
×
168
                    data['params'] = []
1✔
169
                    for i, key in enumerate(value['params_and_consts']):
1✔
170
                        data['params'] += [function_inputs[i+value['n_input']].tolist()]
1✔
171
                    data['input_names'] = function_input_list
1✔
172

173
                    # Start the data visualizer process
174
                    self.__process_function[key] = subprocess.Popen(['python', self.__function_visualizer_script],
1✔
175
                                                                  stdin=subprocess.PIPE,
176
                                                                  text=True)
177
                try:
1✔
178
                    # Send data to the visualizer process
179
                    self.__process_function[key].stdin.write(f"{json.dumps(data)}\n")
1✔
180
                    self.__process_function[key].stdin.flush()
1✔
181
                    self.__process_function[key].stdin.close()
1✔
182
                except:
×
183
                    self.closeFunctions()
×
184
                    log.warning(f"The visualizer {functions} process has been closed.")
×
185

186
    def closeFunctions(self, functions = None):
1✔
187
        if functions is None:
1✔
188
            for key in self.__process_function.keys():
1✔
189
                self.__process_function[key].terminate()
1✔
190
                self.__process_function[key].wait()
1✔
191
            self.__process_function = {}
1✔
192
        else:
193
            for key in functions:
×
194
                self.__process_function[key].terminate()
×
195
                self.__process_function[key].wait()
×
196
                self.__process_function.pop(key)
×
197

198
    def closeTraining(self, minimizer = None):
1✔
199
        if minimizer is None:
1✔
200
            for key in self.modely._model_def['Minimizers'].keys():
1✔
201
                if key in self.__process_training and self.__process_training[key] is not None and self.__process_training[key].poll() is None:
1✔
202
                    self.__process_training[key].terminate()
×
203
                    self.__process_training[key].wait()
×
204
                self.__process_training[key] = None
1✔
205
        else:
206
            self.__process_training[minimizer].terminate()
×
207
            self.__process_training[minimizer].wait()
×
208
            self.__process_training.pop(minimizer)
×
209

210
    def closeResult(self, name_data = None, minimizer = None):
1✔
211
        if name_data is None:
1✔
212
            check(minimizer is None, ValueError, "If name_data is None, minimizer must be None.")
1✔
213
            for name_data in self.__process_results.keys():
1✔
214
                for key in self.__process_results[name_data].keys():
1✔
215
                    self.__process_results[name_data][key].terminate()
1✔
216
                    self.__process_results[name_data][key].wait()
1✔
217
            self.__process_results = {}
1✔
218
        else:
219
            if minimizer is None:
×
220
                for key in self.__process_results[name_data].keys():
×
221
                    self.__process_results[name_data][key].terminate()
×
222
                    self.__process_results[name_data][key].wait()
×
223
                self.__process_results[name_data] = {}
×
224
            else:
225
                self.__process_results[name_data][minimizer].terminate()
×
226
                self.__process_results[name_data][minimizer].wait()
×
227
                self.__process_results[name_data].pop(minimizer)
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc