• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

tonegas / nnodely / 13504505673

24 Feb 2025 05:53PM UTC coverage: 95.261% (+0.3%) from 94.961%
13504505673

Pull #59

github

web-flow
Merge c15288a9e into 0c108fc0d
Pull Request #59: Features/56 dynamic parametric function

567 of 582 new or added lines in 24 files covered. (97.42%)

3 existing lines in 3 files now uncovered.

10171 of 10677 relevant lines covered (95.26%)

0.95 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

96.15
/nnodely/linear.py
1
import copy, inspect, textwrap, torch
1✔
2

3
import torch.nn as nn
1✔
4

5
from collections.abc import Callable
1✔
6

7
from nnodely.relation import NeuObj, Stream, AutoToStream
1✔
8
from nnodely.model import Model
1✔
9
from nnodely.parameter import Parameter
1✔
10
from nnodely.utils import check, merge, enforce_types
1✔
11

12
from nnodely.logger import logging, nnLogger
1✔
13
log = nnLogger(__name__, logging.WARNING)
1✔
14

15
linear_relation_name = 'Linear'
1✔
16

17
class Linear(NeuObj, AutoToStream):
1✔
18
    """
19
    Represents a Linear relation in the neural network model.
20

21
    Notes
22
    -----
23
    .. note::
24
        The Linear relation works along the input dimension (third dimension) of the input tensor.
25
        You can find some initialization functions inside the initializer module.
26

27
    Parameters
28
    ----------
29
    output_dimension : int, optional
30
        The output dimension of the Linear relation.
31
    W_init : Callable, optional
32
        A callable for initializing the weights.
33
    W_init_params : dict, optional
34
        A dictionary of parameters for the weight initializer.
35
    b_init : Callable, optional
36
        A callable for initializing the bias.
37
    b_init_params : dict, optional
38
        A dictionary of parameters for the bias initializer.
39
    W : Parameter or str, optional
40
        The weight parameter object or name. If not given a new parameter will be auto-generated.
41
    b : bool, str, or Parameter, optional
42
        The bias parameter object, name, or a boolean indicating whether to use bias. If set to 'True' a new parameter will be auto-generated.
43
    dropout : int or float, optional
44
        The dropout rate. Default is 0.
45

46
    Attributes
47
    ----------
48
    relation_name : str
49
        The name of the relation.
50
    W_init : Callable
51
        The weight initializer.
52
    W_init_params : dict
53
        The parameters for the weight initializer.
54
    b_init : Callable
55
        The bias initializer.
56
    b_init_params : dict
57
        The parameters for the bias initializer.
58
    W : Parameter or str
59
        The weight parameter object or name.
60
    b : bool, str, or Parameter
61
        The bias parameter object, name, or a boolean indicating whether to use bias.
62
    Wname : str
63
        The name of the weight parameter.
64
    bname : str
65
        The name of the bias parameter.
66
    dropout : int or float
67
        The dropout rate.
68
    output_dimension : int
69
        The output dimension of the Linear relation.
70

71
    Examples
72
    --------
73

74
    Example - basic usage:
75
        >>> input = Input('in').tw(0.05)
76
        >>> relation = Linear(input)
77

78
    Example - passing a weight and bias parameter:
79
        >>> input = Input('in').last()
80
        >>> weight = Parameter('W', values=[[[1]]])
81
        >>> bias = Parameter('b', values=[[1]])
82
        >>> relation = Linear(W=weight, b=bias)(input)
83

84
    Example - parameters initialization:
85
        >>> input = Input('in').last()
86
        >>> relation = Linear(b=True, W_init=init_negexp, b_init=init_constant, b_init_params={'value':1})(input)
87
    """
88

89
    @enforce_types
1✔
90
    def __init__(self, output_dimension:int|None = None,
1✔
91
                 W_init:Callable|None = None,
92
                 W_init_params:dict|None = None,
93
                 b_init:Callable|None = None,
94
                 b_init_params:dict|None = None,
95
                 W:Parameter|str|None = None,
96
                 b:bool|str|Parameter|None = None,
97
                 dropout:int|float = 0):
98

99
        self.relation_name = linear_relation_name
1✔
100
        self.W_init = W_init
1✔
101
        self.W_init_params = W_init_params
1✔
102
        self.b_init = b_init
1✔
103
        self.b_init_params = b_init_params
1✔
104
        self.W = W
1✔
105
        self.b = b
1✔
106
        self.bname = None
1✔
107
        self.Wname = None
1✔
108
        self.dropout = dropout
1✔
109
        super().__init__('P' + linear_relation_name + str(NeuObj.count))
1✔
110

111
        if W is None:
1✔
112
            self.output_dimension = 1 if output_dimension is None else output_dimension
1✔
113
            self.Wname = self.name + 'W'
1✔
114
            self.json['Parameters'][self.Wname] = {}
1✔
115
        elif type(W) is str:
1✔
116
            self.output_dimension = 1 if output_dimension is None else output_dimension
1✔
117
            self.Wname = W
1✔
118
            self.json['Parameters'][self.Wname] = {}
1✔
119
        else:
120
            check(type(W) is Parameter or type(W) is str, TypeError, 'The "W" must be of type Parameter or str.')
1✔
121
            check('sw' in W.dim and W.dim['sw'] == 1, ValueError, f'The "W" must have window dimension sw=1 but was {W.dim}.')
1✔
122
            check(len(W.dim['dim']) == 2, ValueError,'The "W" dimensions must be a list of 2.')
1✔
123
            self.output_dimension = W.dim['dim'][1]
1✔
124
            if output_dimension is not None:
1✔
125
                check(W.dim['dim'][1] == output_dimension, ValueError, 'output_dimension must be equal to the second dim of "W".')
×
126
            self.Wname = W.name
1✔
127
            self.json['Parameters'][W.name] = copy.deepcopy(W.json['Parameters'][W.name])
1✔
128

129
        if b is not None:
1✔
130
            check(type(b) is Parameter or type(b) is bool or type(b) is str, TypeError, 'The "b" must be of type Parameter, bool or str.')
1✔
131
            if type(b) is Parameter:
1✔
132
                check(type(b.dim['dim']) is int, ValueError, 'The "b" dimensions must be an integer.')
1✔
133
                if output_dimension is not None:
1✔
134
                    check(b.dim['dim'] == output_dimension, ValueError,
×
135
                          'output_dimension must be equal to the dim of the "b".')
136
                self.bname = b.name
1✔
137
                self.json['Parameters'][b.name] = copy.deepcopy(b.json['Parameters'][b.name])
1✔
138
            elif type(b) is str:
1✔
139
                self.bname = b
×
NEW
140
                self.json['Parameters'][self.bname] = { 'dim': self.output_dimension, 'sw': 1}
×
141
            else:
142
                self.bname = self.name + 'b'
1✔
143
                self.json['Parameters'][self.bname] = { 'dim': self.output_dimension, 'sw': 1}
1✔
144

145
        if self.W_init is not None:
1✔
146
            check('values' not in self.json['Parameters'][self.Wname], ValueError, f"The parameter {self.Wname} is already initialized.")
1✔
147
            check(inspect.isfunction(self.W_init), ValueError,
1✔
148
                  f"The W_init parameter must be a function.")
149
            code = textwrap.dedent(inspect.getsource(self.W_init)).replace('\"', '\'')
1✔
150
            self.json['Parameters'][self.Wname]['init_fun'] = { 'code' : code, 'name' : self.W_init.__name__}
1✔
151
            if self.W_init_params is not None:
1✔
152
                self.json['Parameters'][self.Wname]['init_fun']['params'] = self.W_init_params
1✔
153

154
        if self.b_init is not None:
1✔
155
            check(self.bname is not None, ValueError,f"The bias is missing.")
1✔
156
            check('values' not in self.json['Parameters'][self.bname], ValueError, f"The parameter {self.bname} is already initialized.")
1✔
157
            check(inspect.isfunction(self.b_init), ValueError,
1✔
158
                  f"The b_init parameter must be a function.")
159
            code = textwrap.dedent(inspect.getsource(self.b_init)).replace('\"', '\'')
1✔
160
            self.json['Parameters'][self.bname]['init_fun'] = { 'code' : code, 'name' : self.b_init.__name__ }
1✔
161
            if self.b_init_params is not None:
1✔
162
                self.json['Parameters'][self.bname]['init_fun']['params'] = self.b_init_params
1✔
163

164
        self.json_stream = {}
1✔
165

166
    @enforce_types
1✔
167
    def __call__(self, obj:Stream) -> Stream:
1✔
168
        stream_name = linear_relation_name + str(Stream.count)
1✔
169
        check(type(obj) is Stream, TypeError,
1✔
170
              f"The type of {obj} is {type(obj)} and is not supported for Linear operation.")
171
        window = 'tw' if 'tw' in obj.dim else ('sw' if 'sw' in obj.dim else None)
1✔
172
        assert(window is not None), f"Parameters {obj.name} with no window dimension"
1✔
173

174
        json_stream_name = obj.dim['dim']
1✔
175
        if obj.dim['dim'] not in self.json_stream:
1✔
176
            if len(self.json_stream) > 0:
1✔
177
                log.warning(
1✔
178
                    f"The Linear {self.name} was called with inputs with different dimensions. If both Linear enter in the model an error will be raised.")
179
            self.json_stream[json_stream_name] = copy.deepcopy(self.json)
1✔
180

181
            if  type(self.W) is not Parameter:
1✔
182
                self.json_stream[json_stream_name]['Parameters'][self.Wname]['dim'] = [obj.dim['dim'],self.output_dimension,]
1✔
183
                self.json_stream[json_stream_name]['Parameters'][self.Wname]['sw'] = 1
1✔
184

185
        if type(self.W) is Parameter:
1✔
186
            check(self.json['Parameters'][self.Wname]['dim'][0] == obj.dim['dim'], ValueError,
1✔
187
                  'the input dimension must be equal to the first dim of the parameter')
188

189
        stream_json = merge(self.json_stream[json_stream_name],obj.json)
1✔
190
        stream_json['Relations'][stream_name] = [linear_relation_name, [obj.name], self.Wname, self.bname, self.dropout]
1✔
191
        return Stream(stream_name, stream_json,{'dim': self.output_dimension, window:obj.dim[window]})
1✔
192

193

194
class Linear_Layer(nn.Module):
1✔
195
    def __init__(self, weights, bias=None, dropout=0):
1✔
196
        super(Linear_Layer, self).__init__()
1✔
197
        self.dropout = nn.Dropout(p=dropout) if dropout > 0 else None
1✔
198
        self.weights = weights
1✔
199
        self.bias = bias
1✔
200

201
    def forward(self, x):
1✔
202
        # x is expected to be of shape [batch, window, input_dimension]
203
        # Using torch.einsum for batch matrix multiplication
204
        y = torch.einsum('bwi,io->bwo', x, self.weights[0])  # y will have shape [batch, window, output_features]
1✔
205
        if self.bias is not None:
1✔
206
            y += self.bias  # Add bias
1✔
207
        # Add dropout if necessary
208
        if self.dropout is not None:
1✔
209
            y = self.dropout(y)
1✔
210
        return y
1✔
211

212
def createLinear(self, *inputs):
1✔
213
    return Linear_Layer(weights=inputs[0], bias=inputs[1], dropout=inputs[2])
1✔
214

215
setattr(Model, linear_relation_name, createLinear)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc