• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

openmc-dev / openmc / 10586562087

27 Aug 2024 10:05PM UTC coverage: 84.707% (-0.2%) from 84.9%
10586562087

Pull #3112

github

web-flow
Merge f7f32bf18 into 5bc04b5d7
Pull Request #3112: Revamp CI with dependency and Python caching for efficient installs

49553 of 58499 relevant lines covered (84.71%)

34324762.08 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

89.63
/openmc/data/function.py
1
from abc import ABC, abstractmethod
12✔
2
from collections.abc import Iterable, Callable
12✔
3
from functools import reduce
12✔
4
from itertools import zip_longest
12✔
5
from math import exp, log
12✔
6
from numbers import Real, Integral
12✔
7

8
import numpy as np
12✔
9

10
import openmc.checkvalue as cv
12✔
11
import openmc.data
12✔
12
from openmc.mixin import EqualityMixin
12✔
13
from .data import EV_PER_MEV
12✔
14

15
INTERPOLATION_SCHEME = {1: 'histogram', 2: 'linear-linear', 3: 'linear-log',
12✔
16
                        4: 'log-linear', 5: 'log-log'}
17

18

19
def sum_functions(funcs):
12✔
20
    """Add tabulated/polynomials functions together
21

22
    Parameters
23
    ----------
24
    funcs : list of Function1D
25
        Functions to add
26

27
    Returns
28
    -------
29
    Function1D
30
        Sum of polynomial/tabulated functions
31

32
    """
33
    # Copy so we can iterate multiple times
34
    funcs = list(funcs)
12✔
35

36
    # Get x values for all tabulated components
37
    xs = []
12✔
38
    for f in funcs:
12✔
39
        if isinstance(f, Tabulated1D):
12✔
40
            xs.append(f.x)
×
41
            if not np.all(f.interpolation == 2):
×
42
                raise ValueError('Only linear-linear tabulated functions '
×
43
                                 'can be combined')
44

45
    if xs:
12✔
46
        # Take the union of all energies (sorted)
47
        x = reduce(np.union1d, xs)
×
48

49
        # Evaluate each function and add together
50
        y = sum(f(x) for f in funcs)
×
51
        return Tabulated1D(x, y)
×
52
    else:
53
        # If no tabulated functions are present, we need to combine the
54
        # polynomials by adding their coefficients
55
        coeffs = [sum(x) for x in zip_longest(*funcs, fillvalue=0.0)]
12✔
56
        return Polynomial(coeffs)
12✔
57

58

59
class Function1D(EqualityMixin, ABC):
12✔
60
    """A function of one independent variable with HDF5 support."""
61
    @abstractmethod
12✔
62
    def __call__(self): pass
12✔
63

64
    @abstractmethod
12✔
65
    def to_hdf5(self, group, name='xy'):
12✔
66
        """Write function to an HDF5 group
67

68
        Parameters
69
        ----------
70
        group : h5py.Group
71
            HDF5 group to write to
72
        name : str
73
            Name of the dataset to create
74

75
        """
76
        pass
×
77

78
    @classmethod
12✔
79
    def from_hdf5(cls, dataset):
12✔
80
        """Generate function from an HDF5 dataset
81

82
        Parameters
83
        ----------
84
        dataset : h5py.Dataset
85
            Dataset to read from
86

87
        Returns
88
        -------
89
        openmc.data.Function1D
90
            Function read from dataset
91

92
        """
93
        for subclass in cls.__subclasses__():
12✔
94
            if dataset.attrs['type'].decode() == subclass.__name__:
12✔
95
                return subclass.from_hdf5(dataset)
12✔
96
        raise ValueError("Unrecognized Function1D class: '"
×
97
                         + dataset.attrs['type'].decode() + "'")
98

99

100
class Tabulated1D(Function1D):
12✔
101
    """A one-dimensional tabulated function.
102

103
    This class mirrors the TAB1 type from the ENDF-6 format. A tabulated
104
    function is specified by tabulated (x,y) pairs along with interpolation
105
    rules that determine the values between tabulated pairs.
106

107
    Once an object has been created, it can be used as though it were an actual
108
    function, e.g.:
109

110
    >>> f = Tabulated1D([0, 10], [4, 5])
111
    >>> [f(xi) for xi in numpy.linspace(0, 10, 5)]
112
    [4.0, 4.25, 4.5, 4.75, 5.0]
113

114
    Parameters
115
    ----------
116
    x : Iterable of float
117
        Independent variable
118
    y : Iterable of float
119
        Dependent variable
120
    breakpoints : Iterable of int
121
        Breakpoints for interpolation regions
122
    interpolation : Iterable of int
123
        Interpolation scheme identification number, e.g., 3 means y is linear in
124
        ln(x).
125

126
    Attributes
127
    ----------
128
    x : Iterable of float
129
        Independent variable
130
    y : Iterable of float
131
        Dependent variable
132
    breakpoints : Iterable of int
133
        Breakpoints for interpolation regions
134
    interpolation : Iterable of int
135
        Interpolation scheme identification number, e.g., 3 means y is linear in
136
        ln(x).
137
    n_regions : int
138
        Number of interpolation regions
139
    n_pairs : int
140
        Number of tabulated (x,y) pairs
141

142
    """
143

144
    def __init__(self, x, y, breakpoints=None, interpolation=None):
12✔
145
        if breakpoints is None or interpolation is None:
12✔
146
            # Single linear-linear interpolation region by default
147
            self.breakpoints = np.array([len(x)])
12✔
148
            self.interpolation = np.array([2])
12✔
149
        else:
150
            self.breakpoints = np.asarray(breakpoints, dtype=int)
12✔
151
            self.interpolation = np.asarray(interpolation, dtype=int)
12✔
152

153
        self.x = np.asarray(x)
12✔
154
        self.y = np.asarray(y)
12✔
155

156
    def __call__(self, x):
12✔
157
        # Check if input is scalar
158
        if not isinstance(x, Iterable):
12✔
159
            return self._interpolate_scalar(x)
12✔
160

161
        x = np.array(x)
12✔
162

163
        # Create output array
164
        y = np.zeros_like(x)
12✔
165

166
        # Get indices for interpolation
167
        idx = np.searchsorted(self.x, x, side='right') - 1
12✔
168

169
        # Loop over interpolation regions
170
        for k in range(len(self.breakpoints)):
12✔
171
            # Get indices for the begining and ending of this region
172
            i_begin = self.breakpoints[k-1] - 1 if k > 0 else 0
12✔
173
            i_end = self.breakpoints[k] - 1
12✔
174

175
            # Figure out which idx values lie within this region
176
            contained = (idx >= i_begin) & (idx < i_end)
12✔
177

178
            xk = x[contained]                 # x values in this region
12✔
179
            xi = self.x[idx[contained]]       # low edge of corresponding bins
12✔
180
            xi1 = self.x[idx[contained] + 1]  # high edge of corresponding bins
12✔
181
            yi = self.y[idx[contained]]
12✔
182
            yi1 = self.y[idx[contained] + 1]
12✔
183

184
            if self.interpolation[k] == 1:
12✔
185
                # Histogram
186
                y[contained] = yi
12✔
187

188
            elif self.interpolation[k] == 2:
12✔
189
                # Linear-linear
190
                y[contained] = yi + (xk - xi)/(xi1 - xi)*(yi1 - yi)
12✔
191

192
            elif self.interpolation[k] == 3:
12✔
193
                # Linear-log
194
                y[contained] = yi + np.log(xk/xi)/np.log(xi1/xi)*(yi1 - yi)
×
195

196
            elif self.interpolation[k] == 4:
12✔
197
                # Log-linear
198
                y[contained] = yi*np.exp((xk - xi)/(xi1 - xi)*np.log(yi1/yi))
×
199

200
            elif self.interpolation[k] == 5:
12✔
201
                # Log-log
202
                y[contained] = (yi*np.exp(np.log(xk/xi)/np.log(xi1/xi)
12✔
203
                                *np.log(yi1/yi)))
204

205
        # In some cases, x values might be outside the tabulated region due only
206
        # to precision, so we check if they're close and set them equal if so.
207
        y[np.isclose(x, self.x[0], atol=1e-14)] = self.y[0]
12✔
208
        y[np.isclose(x, self.x[-1], atol=1e-14)] = self.y[-1]
12✔
209

210
        return y
12✔
211

212
    def _interpolate_scalar(self, x):
12✔
213
        if x <= self._x[0]:
12✔
214
            return self._y[0]
12✔
215
        elif x >= self._x[-1]:
12✔
216
            return self._y[-1]
12✔
217

218
        # Get the index for interpolation
219
        idx = np.searchsorted(self._x, x, side='right') - 1
12✔
220

221
        # Loop over interpolation regions
222
        for b, p in zip(self.breakpoints, self.interpolation):
12✔
223
            if idx < b - 1:
12✔
224
                break
12✔
225

226
        xi = self._x[idx]       # low edge of the corresponding bin
12✔
227
        xi1 = self._x[idx + 1]  # high edge of the corresponding bin
12✔
228
        yi = self._y[idx]
12✔
229
        yi1 = self._y[idx + 1]
12✔
230

231
        if p == 1:
12✔
232
            # Histogram
233
            return yi
×
234

235
        elif p == 2:
12✔
236
            # Linear-linear
237
            return yi + (x - xi)/(xi1 - xi)*(yi1 - yi)
12✔
238

239
        elif p == 3:
×
240
            # Linear-log
241
            return yi + log(x/xi)/log(xi1/xi)*(yi1 - yi)
×
242

243
        elif p == 4:
×
244
            # Log-linear
245
            return yi*exp((x - xi)/(xi1 - xi)*log(yi1/yi))
×
246

247
        elif p == 5:
×
248
            # Log-log
249
            return yi*exp(log(x/xi)/log(xi1/xi)*log(yi1/yi))
×
250

251
    def __len__(self):
12✔
252
        return len(self.x)
×
253

254
    @property
12✔
255
    def x(self):
12✔
256
        return self._x
12✔
257

258
    @x.setter
12✔
259
    def x(self, x):
12✔
260
        cv.check_type('x values', x, Iterable, Real)
12✔
261
        self._x = x
12✔
262

263
    @property
12✔
264
    def y(self):
12✔
265
        return self._y
12✔
266

267
    @y.setter
12✔
268
    def y(self, y):
12✔
269
        cv.check_type('y values', y, Iterable, Real)
12✔
270
        self._y = y
12✔
271

272
    @property
12✔
273
    def breakpoints(self):
12✔
274
        return self._breakpoints
12✔
275

276
    @breakpoints.setter
12✔
277
    def breakpoints(self, breakpoints):
12✔
278
        cv.check_type('breakpoints', breakpoints, Iterable, Integral)
12✔
279
        self._breakpoints = breakpoints
12✔
280

281
    @property
12✔
282
    def interpolation(self):
12✔
283
        return self._interpolation
12✔
284

285
    @interpolation.setter
12✔
286
    def interpolation(self, interpolation):
12✔
287
        cv.check_type('interpolation', interpolation, Iterable, Integral)
12✔
288
        self._interpolation = interpolation
12✔
289

290
    @property
12✔
291
    def n_pairs(self):
12✔
292
        return len(self.x)
×
293

294
    @property
12✔
295
    def n_regions(self):
12✔
296
        return len(self.breakpoints)
12✔
297

298
    def integral(self):
12✔
299
        """Integral of the tabulated function over its tabulated range.
300

301
        Returns
302
        -------
303
        numpy.ndarray
304
            Array of same length as the tabulated data that represents partial
305
            integrals from the bottom of the range to each tabulated point.
306

307
        """
308

309
        # Create output array
310
        partial_sum = np.zeros(len(self.x) - 1)
12✔
311

312
        i_low = 0
12✔
313
        for k in range(len(self.breakpoints)):
12✔
314
            # Determine which x values are within this interpolation range
315
            i_high = self.breakpoints[k] - 1
12✔
316

317
            # Get x values and bounding (x,y) pairs
318
            x0 = self.x[i_low:i_high]
12✔
319
            x1 = self.x[i_low + 1:i_high + 1]
12✔
320
            y0 = self.y[i_low:i_high]
12✔
321
            y1 = self.y[i_low + 1:i_high + 1]
12✔
322

323
            if self.interpolation[k] == 1:
12✔
324
                # Histogram
325
                partial_sum[i_low:i_high] = y0*(x1 - x0)
×
326

327
            elif self.interpolation[k] == 2:
12✔
328
                # Linear-linear
329
                m = (y1 - y0)/(x1 - x0)
12✔
330
                partial_sum[i_low:i_high] = (y0 - m*x0)*(x1 - x0) + \
12✔
331
                                            m*(x1**2 - x0**2)/2
332

333
            elif self.interpolation[k] == 3:
12✔
334
                # Linear-log
335
                logx = np.log(x1/x0)
×
336
                m = (y1 - y0)/logx
×
337
                partial_sum[i_low:i_high] = y0 + m*(x1*(logx - 1) + x0)
×
338

339
            elif self.interpolation[k] == 4:
12✔
340
                # Log-linear
341
                m = np.log(y1/y0)/(x1 - x0)
×
342
                partial_sum[i_low:i_high] = y0/m*(np.exp(m*(x1 - x0)) - 1)
×
343

344
            elif self.interpolation[k] == 5:
12✔
345
                # Log-log
346
                m = np.log(y1/y0)/np.log(x1/x0)
12✔
347
                partial_sum[i_low:i_high] = y0/((m + 1)*x0**m)*(
12✔
348
                    x1**(m + 1) - x0**(m + 1))
349

350
            i_low = i_high
12✔
351

352
        return np.concatenate(([0.], np.cumsum(partial_sum)))
12✔
353

354
    def to_hdf5(self, group, name='xy'):
12✔
355
        """Write tabulated function to an HDF5 group
356

357
        Parameters
358
        ----------
359
        group : h5py.Group
360
            HDF5 group to write to
361
        name : str
362
            Name of the dataset to create
363

364
        """
365
        dataset = group.create_dataset(name, data=np.vstack(
12✔
366
            [self.x, self.y]))
367
        dataset.attrs['type'] = np.bytes_(type(self).__name__)
12✔
368
        dataset.attrs['breakpoints'] = self.breakpoints
12✔
369
        dataset.attrs['interpolation'] = self.interpolation
12✔
370

371
    @classmethod
12✔
372
    def from_hdf5(cls, dataset):
12✔
373
        """Generate tabulated function from an HDF5 dataset
374

375
        Parameters
376
        ----------
377
        dataset : h5py.Dataset
378
            Dataset to read from
379

380
        Returns
381
        -------
382
        openmc.data.Tabulated1D
383
            Function read from dataset
384

385
        """
386
        if dataset.attrs['type'].decode() != cls.__name__:
12✔
387
            raise ValueError("Expected an HDF5 attribute 'type' equal to '"
×
388
                             + cls.__name__ + "'")
389

390
        x = dataset[0, :]
12✔
391
        y = dataset[1, :]
12✔
392
        breakpoints = dataset.attrs['breakpoints']
12✔
393
        interpolation = dataset.attrs['interpolation']
12✔
394
        return cls(x, y, breakpoints, interpolation)
12✔
395

396
    @classmethod
12✔
397
    def from_ace(cls, ace, idx=0, convert_units=True):
12✔
398
        """Create a Tabulated1D object from an ACE table.
399

400
        Parameters
401
        ----------
402
        ace : openmc.data.ace.Table
403
            An ACE table
404
        idx : int
405
            Offset to read from in XSS array (default of zero)
406
        convert_units : bool
407
            If the abscissa represents energy, indicate whether to convert MeV
408
            to eV.
409

410
        Returns
411
        -------
412
        openmc.data.Tabulated1D
413
            Tabulated data object
414

415
        """
416

417
        # Get number of regions and pairs
418
        n_regions = int(ace.xss[idx])
12✔
419
        n_pairs = int(ace.xss[idx + 1 + 2*n_regions])
12✔
420

421
        # Get interpolation information
422
        idx += 1
12✔
423
        if n_regions > 0:
12✔
424
            breakpoints = ace.xss[idx:idx + n_regions].astype(int)
12✔
425
            interpolation = ace.xss[idx + n_regions:idx + 2*n_regions].astype(int)
12✔
426
        else:
427
            # 0 regions implies linear-linear interpolation by default
428
            breakpoints = np.array([n_pairs])
12✔
429
            interpolation = np.array([2])
12✔
430

431
        # Get (x,y) pairs
432
        idx += 2*n_regions + 1
12✔
433
        x = ace.xss[idx:idx + n_pairs].copy()
12✔
434
        y = ace.xss[idx + n_pairs:idx + 2*n_pairs].copy()
12✔
435

436
        if convert_units:
12✔
437
            x *= EV_PER_MEV
12✔
438

439
        return Tabulated1D(x, y, breakpoints, interpolation)
12✔
440

441

442
class Polynomial(np.polynomial.Polynomial, Function1D):
12✔
443
    """A power series class.
444

445
    Parameters
446
    ----------
447
    coef : Iterable of float
448
        Polynomial coefficients in order of increasing degree
449

450
    """
451
    def to_hdf5(self, group, name='xy'):
12✔
452
        """Write polynomial function to an HDF5 group
453

454
        Parameters
455
        ----------
456
        group : h5py.Group
457
            HDF5 group to write to
458
        name : str
459
            Name of the dataset to create
460

461
        """
462
        dataset = group.create_dataset(name, data=self.coef)
12✔
463
        dataset.attrs['type'] = np.bytes_(type(self).__name__)
12✔
464

465
    @classmethod
12✔
466
    def from_hdf5(cls, dataset):
12✔
467
        """Generate function from an HDF5 dataset
468

469
        Parameters
470
        ----------
471
        dataset : h5py.Dataset
472
            Dataset to read from
473

474
        Returns
475
        -------
476
        openmc.data.Function1D
477
            Function read from dataset
478

479
        """
480
        if dataset.attrs['type'].decode() != cls.__name__:
12✔
481
            raise ValueError("Expected an HDF5 attribute 'type' equal to '"
×
482
                             + cls.__name__ + "'")
483
        return cls(dataset[()])
12✔
484

485

486
class Combination(EqualityMixin):
12✔
487
    """Combination of multiple functions with a user-defined operator
488

489
    This class allows you to create a callable object which represents the
490
    combination of other callable objects by way of a series of user-defined
491
    operators connecting each of the callable objects.
492

493
    Parameters
494
    ----------
495
    functions : Iterable of Callable
496
        Functions to combine according to operations
497
    operations : Iterable of numpy.ufunc
498
        Operations to perform between functions; note that the standard order
499
        of operations will not be followed, but can be simulated by
500
        combinations of Combination objects. The operations parameter must have
501
        a length one less than the number of functions.
502

503

504
    Attributes
505
    ----------
506
    functions : Iterable of Callable
507
        Functions to combine according to operations
508
    operations : Iterable of numpy.ufunc
509
        Operations to perform between functions; note that the standard order
510
        of operations will not be followed, but can be simulated by
511
        combinations of Combination objects. The operations parameter must have
512
        a length one less than the number of functions.
513

514
    """
515

516
    def __init__(self, functions, operations):
12✔
517
        self.functions = functions
12✔
518
        self.operations = operations
12✔
519

520
    def __call__(self, x):
12✔
521
        ans = self.functions[0](x)
12✔
522
        for i, operation in enumerate(self.operations):
12✔
523
            ans = operation(ans, self.functions[i + 1](x))
12✔
524
        return ans
12✔
525

526
    @property
12✔
527
    def functions(self):
12✔
528
        return self._functions
12✔
529

530
    @functions.setter
12✔
531
    def functions(self, functions):
12✔
532
        cv.check_type('functions', functions, Iterable, Callable)
12✔
533
        self._functions = functions
12✔
534

535
    @property
12✔
536
    def operations(self):
12✔
537
        return self._operations
12✔
538

539
    @operations.setter
12✔
540
    def operations(self, operations):
12✔
541
        cv.check_type('operations', operations, Iterable, np.ufunc)
12✔
542
        length = len(self.functions) - 1
12✔
543
        cv.check_length('operations', operations, length, length_max=length)
12✔
544
        self._operations = operations
12✔
545

546

547
class Sum(Function1D):
12✔
548
    """Sum of multiple functions.
549

550
    This class allows you to create a callable object which represents the sum
551
    of other callable objects. This is used for redundant reactions whereby the
552
    cross section is defined as the sum of other cross sections.
553

554
    Parameters
555
    ----------
556
    functions : Iterable of Callable
557
        Functions which are to be added together
558

559
    Attributes
560
    ----------
561
    functions : Iterable of Callable
562
        Functions which are to be added together
563

564
    """
565

566
    def __init__(self, functions):
12✔
567
        self.functions = list(functions)
12✔
568

569
    def __call__(self, x):
12✔
570
        return sum(f(x) for f in self.functions)
12✔
571

572
    @property
12✔
573
    def functions(self):
12✔
574
        return self._functions
12✔
575

576
    @functions.setter
12✔
577
    def functions(self, functions):
12✔
578
        cv.check_type('functions', functions, Iterable, Callable)
12✔
579
        self._functions = functions
12✔
580

581
    def to_hdf5(self, group, name='xy'):
12✔
582
        """Write sum of functions to an HDF5 group
583

584
        .. versionadded:: 0.13.1
585

586
        Parameters
587
        ----------
588
        group : h5py.Group
589
            HDF5 group to write to
590
        name : str
591
            Name of the dataset to create
592

593
        """
594
        sum_group = group.create_group(name)
12✔
595
        sum_group.attrs['type'] = np.bytes_(type(self).__name__)
12✔
596
        sum_group.attrs['n'] = len(self.functions)
12✔
597
        for i, f in enumerate(self.functions):
12✔
598
            f.to_hdf5(sum_group, f'func_{i+1}')
12✔
599

600
    @classmethod
12✔
601
    def from_hdf5(cls, group):
12✔
602
        """Generate sum of functions from an HDF5 group
603

604
        .. versionadded:: 0.13.1
605

606
        Parameters
607
        ----------
608
        group : h5py.Group
609
            Group to read from
610

611
        Returns
612
        -------
613
        openmc.data.Sum
614
            Functions read from the group
615

616
        """
617
        n = group.attrs['n']
12✔
618
        functions = [
12✔
619
            Function1D.from_hdf5(group[f'func_{i+1}'])
620
            for i in range(n)
621
        ]
622
        return cls(functions)
12✔
623

624

625
class Regions1D(EqualityMixin):
12✔
626
    r"""Piecewise composition of multiple functions.
627

628
    This class allows you to create a callable object which is composed
629
    of multiple other callable objects, each applying to a specific interval
630

631
    Parameters
632
    ----------
633
    functions : Iterable of Callable
634
        Functions which are to be combined in a piecewise fashion
635
    breakpoints : Iterable of float
636
        The values of the dependent variable that define the domain of
637
        each function. The `i`\ th and `(i+1)`\ th values are the limits of the
638
        domain of the `i`\ th function. Values must be monotonically increasing.
639

640
    Attributes
641
    ----------
642
    functions : Iterable of Callable
643
        Functions which are to be combined in a piecewise fashion
644
    breakpoints : Iterable of float
645
        The breakpoints between each function
646

647
    """
648

649
    def __init__(self, functions, breakpoints):
12✔
650
        self.functions = functions
12✔
651
        self.breakpoints = breakpoints
12✔
652

653
    def __call__(self, x):
12✔
654
        i = np.searchsorted(self.breakpoints, x)
12✔
655
        if isinstance(x, Iterable):
12✔
656
            ans = np.empty_like(x)
12✔
657
            for j in range(len(i)):
12✔
658
                ans[j] = self.functions[i[j]](x[j])
12✔
659
            return ans
12✔
660
        else:
661
            return self.functions[i](x)
×
662

663
    @property
12✔
664
    def functions(self):
12✔
665
        return self._functions
12✔
666

667
    @functions.setter
12✔
668
    def functions(self, functions):
12✔
669
        cv.check_type('functions', functions, Iterable, Callable)
12✔
670
        self._functions = functions
12✔
671

672
    @property
12✔
673
    def breakpoints(self):
12✔
674
        return self._breakpoints
12✔
675

676
    @breakpoints.setter
12✔
677
    def breakpoints(self, breakpoints):
12✔
678
        cv.check_iterable_type('breakpoints', breakpoints, Real)
12✔
679
        self._breakpoints = breakpoints
12✔
680

681

682
class ResonancesWithBackground(EqualityMixin):
12✔
683
    """Cross section in resolved resonance region.
684

685
    Parameters
686
    ----------
687
    resonances : openmc.data.Resonances
688
        Resolved resonance parameter data
689
    background : Callable
690
        Background cross section as a function of energy
691
    mt : int
692
        MT value of the reaction
693

694
    Attributes
695
    ----------
696
    resonances : openmc.data.Resonances
697
        Resolved resonance parameter data
698
    background : Callable
699
        Background cross section as a function of energy
700
    mt : int
701
        MT value of the reaction
702

703
    """
704

705

706
    def __init__(self, resonances, background, mt):
12✔
707
        self.resonances = resonances
12✔
708
        self.background = background
12✔
709
        self.mt = mt
12✔
710

711
    @property
12✔
712
    def background(self):
12✔
713
        return self._background
×
714

715
    @background.setter
12✔
716
    def background(self, background):
12✔
717
        cv.check_type('background cross section', background, Callable)
12✔
718
        self._background = background
12✔
719

720
    @property
12✔
721
    def mt(self):
12✔
722
        return self._mt
×
723

724
    @mt.setter
12✔
725
    def mt(self, mt):
12✔
726
        cv.check_type('MT value', mt, Integral)
12✔
727
        self._mt = mt
12✔
728

729
    @property
12✔
730
    def resonances(self):
12✔
731
        return self._resonances
×
732

733
    @resonances.setter
12✔
734
    def resonances(self, resonances):
12✔
735
        cv.check_type('resolved resonance parameters', resonances,
12✔
736
                      openmc.data.Resonances)
737
        self._resonances = resonances
12✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc