• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

tonegas / nnodely / 18305449975

07 Oct 2025 07:32AM UTC coverage: 97.691% (-0.04%) from 97.727%
18305449975

push

github

tonegas
Modified the version

1 of 1 new or added line in 1 file covered. (100.0%)

38 existing lines in 5 files now uncovered.

12733 of 13034 relevant lines covered (97.69%)

0.98 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

95.33
/nnodely/layers/fuzzify.py
1
import inspect, copy, textwrap, torch
1✔
2

3
import numpy as np
1✔
4
import torch.nn as nn
1✔
5

6
from collections.abc import Callable
1✔
7

8
from nnodely.basic.relation import NeuObj, Stream
1✔
9
from nnodely.basic.model import Model
1✔
10
from nnodely.support.utils import check, enforce_types
1✔
11
from nnodely.support.jsonutils import merge
1✔
12

13
fuzzify_relation_name = 'Fuzzify'
1✔
14

15
class Fuzzify(NeuObj):
1✔
16
    """
17
    Represents a Fuzzify relation in the neural network model.
18

19
    Parameters
20
    ----------
21
    output_dimension : int, optional
22
        The output dimension of the Fuzzify relation. If provided, `range` must also be provided and `centers` must be None.
23
    range : list, optional
24
        A list containing the start and end values for the range. Required if `output_dimension` is provided.
25
    centers : list, optional
26
        A list of center values for the fuzzy functions. Required if `output_dimension` is None.
27
        The `output_dimension` will be inferred from the number of centers provided.
28
    functions : str, list, or Callable, optional
29
        The fuzzy functions to use. Can be a string specifying a predefined function type, a custom function, or a list of callable functions. Default is 'Triangular'.
30
    
31
    Notes
32
    -----
33
    .. note::
34
        The predefined function types are 'Triangular' and 'Rectangular'.
35
        It is also possible to pass a list of custom functions. In this case, each center will be associated with the respective function in the list.
36

37
    Attributes
38
    ----------
39
    relation_name : str
40
        The name of the relation.
41
    output_dimension : dict
42
        The output dimension of the Fuzzify relation.
43
    json : dict
44
        A dictionary containing the configuration of the Fuzzify relation.
45

46
    Examples
47
    --------
48
    .. image:: https://colab.research.google.com/assets/colab-badge.svg
49
        :target: https://colab.research.google.com/github/tonegas/nnodely/blob/main/examples/fuzzify.ipynb
50
        :alt: Open in Colab
51
        
52
    Example - basic usage:
53
        >>> x = Input('x')
54
        >>> fuz = Fuzzify(output_dimension=5, range=[1,5])
55
        >>> out = Output('out', fuz(x.last()))
56

57
    Example - passing the centers:
58
        >>> fuz = Fuzzify(centers=[-1,0,3,5], functions='Rectangular')
59
        >>> out = Output('out', fuz(x.last()))
60

61
    Example - using a custom function:
62
        >>> def fun(x):
63
        >>>     import torch
64
        >>>     return torch.tanh(x)
65
        >>> fuz = Fuzzify(output_dimension=11, range=[-5,5], functions=fun)
66
        >>> out = Output('out', fuz(x.last()))
67
    """
68
    @enforce_types
1✔
69
    def __init__(self, output_dimension: int | None = None,
1✔
70
                 range: list | None = None, *,
71
                 centers: list | None = None,
72
                 functions: str | list | Callable = 'Triangular'):
73

74
        self.relation_name = fuzzify_relation_name
1✔
75
        super().__init__('F' + fuzzify_relation_name + str(NeuObj.count))
1✔
76
        self.json['Functions'][self.name] = {}
1✔
77
        if output_dimension is not None:
1✔
78
            check(range is not None, ValueError, 'if "output_dimension" is not None, "range" must be not setted')
1✔
79
            check(centers is None, ValueError,
1✔
80
                  'if "output_dimension" and "range" are not None, then "centers" must be None')
81
            self.output_dimension = {'dim': output_dimension}
1✔
82
            interval = ((range[1] - range[0]) / (output_dimension - 1))
1✔
83
            self.json['Functions'][self.name]['centers'] = np.arange(range[0], range[1] + interval, interval).tolist()
1✔
84
        else:
85
            check(centers is not None, ValueError, 'if "output_dimension" is None and "centers" must be setted')
1✔
86
            self.output_dimension = {'dim': len(centers)}
1✔
87
            self.json['Functions'][self.name]['centers'] = np.array(centers).tolist()
1✔
88
        self.json['Functions'][self.name]['dim_out'] = copy.deepcopy(self.output_dimension)
1✔
89

90
        if type(functions) is str:
1✔
91
            self.json['Functions'][self.name]['functions'] = functions
1✔
92
            self.json['Functions'][self.name]['names'] = functions
1✔
93
        elif type(functions) is list:
1✔
94
            self.json['Functions'][self.name]['functions'] = []
1✔
95
            self.json['Functions'][self.name]['names'] = []
1✔
96
            for func in functions:
1✔
97
                code = textwrap.dedent(inspect.getsource(func)).replace('\"', '\'')
1✔
98
                self.json['Functions'][self.name]['functions'].append(code)
1✔
99
                self.json['Functions'][self.name]['names'].append(func.__name__)
1✔
100
        else:
101
            code = textwrap.dedent(inspect.getsource(functions)).replace('\"', '\'')
1✔
102
            self.json['Functions'][self.name]['functions'] = code
1✔
103
            self.json['Functions'][self.name]['names'] = functions.__name__
1✔
104

105
    @enforce_types
1✔
106
    def __call__(self, obj: Stream) -> Stream:
1✔
107
        stream_name = fuzzify_relation_name + str(Stream.count)
1✔
108
        check(type(obj) is Stream, TypeError,
1✔
109
              f"The type of {obj} is {type(obj)} and is not supported for Fuzzify operation.")
110
        check('dim' in obj.dim and obj.dim['dim'] == 1, ValueError, 'Input dimension must be scalar')
1✔
111
        output_dimension = copy.deepcopy(obj.dim)
1✔
112
        output_dimension.update(self.output_dimension)
1✔
113
        stream_json = merge(self.json, obj.json)
1✔
114
        stream_json['Relations'][stream_name] = [fuzzify_relation_name, [obj.name], self.name]
1✔
115
        return Stream(stream_name, stream_json, output_dimension)
1✔
116

117
def return_fuzzify(json, xlim=None, num_points=1000):
1✔
118
    if xlim is not None:
1✔
UNCOV
119
        x = torch.from_numpy(np.linspace(xlim[0], xlim[1], num=num_points))
×
120
    else:
121
        x = torch.from_numpy(np.linspace(json['centers'][0] - 2, json['centers'][-1] + 2, num=num_points))
1✔
122
    chan_centers = np.array(json['centers'])
1✔
123
    activ_fun = {}
1✔
124
    if isinstance(json['names'], list):
1✔
125
        n_func = len(json['names'])
1✔
126
    else:
127
        n_func = 1
1✔
128
    for i in range(len(chan_centers)):
1✔
129
        if json['functions'] == 'Triangular':
1✔
130
            activ_fun[i] = triangular(x, i, chan_centers).tolist()
1✔
131
        elif json['functions'] == 'Rectangular':
1✔
132
            activ_fun[i] = rectangular(x, i, chan_centers).tolist()
1✔
133
        else:
134
            if isinstance(json['names'], list):
1✔
135
                if i >= n_func:
1✔
136
                    func_idx = i - round(n_func * (i // n_func))
1✔
137
                else:
138
                    func_idx = i
1✔
139
                exec(json['functions'][func_idx], globals())
1✔
140
                function_to_call = globals()[json['names'][func_idx]]
1✔
141
            else:
142
                exec(json['functions'], globals())
1✔
143
                function_to_call = globals()[json['names']]
1✔
144
            activ_fun[i] = custom_function(function_to_call, x, i, chan_centers).tolist()
1✔
145
    return x.tolist(), activ_fun
1✔
146

147
def triangular(x, idx_channel, chan_centers):
1✔
148
    # Compute the number of channels
149
    num_channels = len(chan_centers)
1✔
150
    # First dimension of activation
151
    if idx_channel == 0:
1✔
152
        if num_channels != 1:
1✔
153
            ampl = chan_centers[1] - chan_centers[0]
1✔
154
            act_fcn = torch.minimum(torch.maximum(-(x - chan_centers[0]) / ampl + 1, torch.tensor(0.0)), torch.tensor(1.0))
1✔
155
        else:
156
            # In case the user only wants one channel
UNCOV
157
            act_fcn = 1
×
158
    elif idx_channel != 0 and idx_channel == (num_channels - 1):
1✔
159
        ampl = chan_centers[-1] - chan_centers[-2]
1✔
160
        act_fcn = torch.minimum(torch.maximum((x - chan_centers[-2]) / ampl, torch.tensor(0.0)), torch.tensor(1.0))
1✔
161
    else:
162
        ampl_1 = chan_centers[idx_channel] - chan_centers[idx_channel - 1]
1✔
163
        ampl_2 = chan_centers[idx_channel + 1] - chan_centers[idx_channel]
1✔
164
        act_fcn = torch.minimum(torch.maximum((x - chan_centers[idx_channel - 1]) / ampl_1, torch.tensor(0.0)), torch.maximum(-(x - chan_centers[idx_channel]) / ampl_2 + 1, torch.tensor(0.0)))
1✔
165
    return act_fcn
1✔
166

167
def rectangular(x, idx_channel, chan_centers):
1✔
168
    ## compute number of channels
169
    num_channels = len(chan_centers)
1✔
170
    ## First dimension of activation
171
    if idx_channel == 0:
1✔
172
        if num_channels != 1:
1✔
173
            width = abs(chan_centers[idx_channel + 1] - chan_centers[idx_channel]) / 2
1✔
174
            act_fcn = torch.where(x < (chan_centers[idx_channel] + width), 1.0, 0.0)
1✔
175
        else:
176
            # In case the user only wants one channel
UNCOV
177
            act_fcn = 1.0
×
178
    elif idx_channel != 0 and idx_channel == (num_channels - 1):
1✔
179
        width = abs(chan_centers[idx_channel] - chan_centers[idx_channel - 1]) / 2
1✔
180
        act_fcn = torch.where(x >= chan_centers[idx_channel] - width, 1.0, 0.0)
1✔
181
    else:
182
        width_forward = abs(chan_centers[idx_channel + 1] - chan_centers[idx_channel]) / 2
1✔
183
        width_backward = abs(chan_centers[idx_channel] - chan_centers[idx_channel - 1]) / 2
1✔
184
        act_fcn = torch.where((x >= chan_centers[idx_channel] - width_backward) & (x < chan_centers[idx_channel] + width_forward), 1.0, 0.0)
1✔
185
    return act_fcn
1✔
186

187

188
def custom_function(func, x, idx_channel, chan_centers):
1✔
189
    act_fcn = func(x - chan_centers[idx_channel])
1✔
190
    return act_fcn
1✔
191

192
class Fuzzify_Layer(nn.Module):
1✔
193
    def __init__(self, params):
1✔
194
        super().__init__()
1✔
195
        self.centers = params['centers']
1✔
196
        self.function = params['functions']
1✔
197
        self.dimension = params['dim_out']['dim']
1✔
198
        self.name = params['names']
1✔
199

200
        if type(self.name) is list:
1✔
201
            self.n_func = len(self.name)
1✔
202
            for func, name in zip(self.function, self.name):
1✔
203
                ## Add the function to the globals
204
                try:
1✔
205
                    code = 'import torch\n@torch.fx.wrap\n' + func
1✔
206
                    exec(code, globals())
1✔
UNCOV
207
                except Exception as e:
×
UNCOV
208
                    check(False, RuntimeError, f"An error occurred when running the function '{name}':\n {e}")
×
209
        else:
210
            self.n_func = 1
1✔
211
            if self.name not in ['Triangular', 'Rectangular']:  ## custom function
1✔
212
                ## Add the function to the globals
213
                try:
1✔
214
                    code = 'import torch\n@torch.fx.wrap\n' + self.function
1✔
215
                    exec(code, globals())
1✔
UNCOV
216
                except Exception as e:
×
UNCOV
217
                    check(False, RuntimeError, f"An error occurred when running the function '{self.name}':\n {e}")
×
218

219
    def forward(self, x):
1✔
220
        res = torch.zeros_like(x).repeat(1, 1, self.dimension)
1✔
221
        if self.function == 'Triangular':
1✔
222
            for i in range(len(self.centers)):
1✔
223
                slicing(res, torch.tensor(i), triangular(x, i, self.centers))
1✔
224
        elif self.function == 'Rectangular':
1✔
225
            for i in range(len(self.centers)):
1✔
226
                slicing(res, torch.tensor(i), rectangular(x, i, self.centers))
1✔
227
        else:  ## Custom_function
228
            if self.n_func == 1:
1✔
229
                # Retrieve the function object from the globals dictionary
230
                function_to_call = globals()[self.name]
1✔
231
                for i in range(len(self.centers)):
1✔
232
                    slicing(res, torch.tensor(i), custom_function(function_to_call, x, i, self.centers))
1✔
233
            else:  ## we have multiple functions
234
                for i in range(len(self.centers)):
1✔
235
                    if i >= self.n_func:
1✔
236
                        func_idx = i - round(self.n_func * (i // self.n_func))
1✔
237
                    else:
238
                        func_idx = i
1✔
239
                    function_to_call = globals()[self.name[func_idx]]
1✔
240
                    slicing(res, torch.tensor(i), custom_function(function_to_call, x, i, self.centers))
1✔
241
        return res
1✔
242

243
@torch.fx.wrap
1✔
244
def slicing(res, i, x):
1✔
245
    res[:, :, i:i + 1] = x
1✔
246

247
def createFuzzify(self, *params):
1✔
248
    return Fuzzify_Layer(params[0])
1✔
249

250
setattr(Model, fuzzify_relation_name, createFuzzify)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc