• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

openmc-dev / openmc / 25588358527

09 May 2026 01:53AM UTC coverage: 81.388% (+0.1%) from 81.266%
25588358527

push

github

web-flow
Implement DecaySpectrum distribution type and utilize in R2S (#3930)

Co-authored-by: Copilot <copilot@github.com>

17751 of 25650 branches covered (69.2%)

Branch coverage included in aggregate %.

235 of 276 new or added lines in 7 files covered. (85.14%)

1 existing line in 1 file now uncovered.

58747 of 68342 relevant lines covered (85.96%)

47204330.4 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

71.34
/src/distribution.cpp
1
#include "openmc/distribution.h"
2

3
#include <algorithm> // for copy
4
#include <array>
5
#include <cmath>     // for sqrt, floor, max
6
#include <iterator>  // for back_inserter
7
#include <numeric>   // for accumulate
8
#include <stdexcept> // for runtime_error
9
#include <string>    // for string, stod
10
#include <unordered_set>
11

12
#include "openmc/chain.h"
13
#include "openmc/constants.h"
14
#include "openmc/error.h"
15
#include "openmc/math_functions.h"
16
#include "openmc/random_dist.h"
17
#include "openmc/random_lcg.h"
18
#include "openmc/xml_interface.h"
19

20
namespace {
21
std::unordered_set<std::string> decay_spectrum_missing_chain_nuclides;
22
}
23

24
namespace openmc {
25

26
//==============================================================================
27
// Helper function for computing importance weights from biased sampling
28
//==============================================================================
29

30
vector<double> compute_importance_weights(
11✔
31
  const vector<double>& p, const vector<double>& b)
32
{
33
  std::size_t n = p.size();
11✔
34

35
  // Normalize original probabilities
36
  double sum_p = std::accumulate(p.begin(), p.end(), 0.0);
11✔
37
  vector<double> p_norm(n);
11✔
38
  for (std::size_t i = 0; i < n; ++i) {
44✔
39
    p_norm[i] = p[i] / sum_p;
33✔
40
  }
41

42
  // Normalize bias probabilities
43
  double sum_b = std::accumulate(b.begin(), b.end(), 0.0);
11✔
44
  vector<double> b_norm(n);
11✔
45
  for (std::size_t i = 0; i < n; ++i) {
44✔
46
    b_norm[i] = b[i] / sum_b;
33✔
47
  }
48

49
  // Compute importance weights
50
  vector<double> weights(n);
11✔
51
  for (std::size_t i = 0; i < n; ++i) {
44✔
52
    weights[i] = (b_norm[i] == 0.0) ? INFTY : p_norm[i] / b_norm[i];
33!
53
  }
54
  return weights;
11✔
55
}
11✔
56

57
std::pair<double, double> Distribution::sample(uint64_t* seed) const
1,050,059,150✔
58
{
59
  if (bias_) {
1,050,059,150✔
60
    // Sample from the bias distribution and compute importance weight
61
    double val = bias_->sample_unbiased(seed);
714,871✔
62
    double wgt = this->evaluate(val) / bias_->evaluate(val);
714,871✔
63
    return {val, wgt};
714,871✔
64
  } else {
65
    // Unbiased sampling: return sampled value with weight 1.0
66
    double val = sample_unbiased(seed);
1,049,344,279✔
67
    return {val, 1.0};
1,049,344,279✔
68
  }
69
}
70

71
// PDF evaluation not supported for all distribution types
72
double Distribution::evaluate(double x) const
×
73
{
74
  throw std::runtime_error(
×
75
    "PDF evaluation not implemented for this distribution type.");
×
76
}
77

78
void Distribution::read_bias_from_xml(pugi::xml_node node)
8,684✔
79
{
80
  if (check_for_node(node, "bias")) {
8,684✔
81
    pugi::xml_node bias_node = node.child("bias");
77✔
82

83
    if (check_for_node(bias_node, "bias")) {
77!
84
      openmc::fatal_error(
×
85
        "Distribution has a bias distribution with its own bias distribution. "
86
        "Please ensure bias distributions do not have their own bias.");
87
    }
88

89
    UPtrDist bias = distribution_from_xml(bias_node);
77✔
90
    this->set_bias(std::move(bias));
77✔
91
  }
77✔
92
}
8,684✔
93

94
//==============================================================================
95
// DiscreteIndex implementation
96
//==============================================================================
97

98
DiscreteIndex::DiscreteIndex(pugi::xml_node node)
×
99
{
100
  auto params = get_node_array<double>(node, "parameters");
×
101
  std::size_t n = params.size() / 2;
×
102

103
  assign({params.data() + n, n});
×
104
}
×
105

106
DiscreteIndex::DiscreteIndex(span<const double> p)
47,858✔
107
{
108
  assign(p);
47,858✔
109
}
47,858✔
110

111
void DiscreteIndex::assign(span<const double> p)
86,313✔
112
{
113
  prob_.assign(p.begin(), p.end());
86,313✔
114
  this->init_alias();
86,313✔
115
}
86,313✔
116

117
void DiscreteIndex::init_alias()
86,313✔
118
{
119
  normalize();
86,313✔
120

121
  // The initialization and sampling method is based on Vose
122
  // (DOI: 10.1109/32.92917)
123
  // Vectors for large and small probabilities based on 1/n
124
  vector<size_t> large;
86,313✔
125
  vector<size_t> small;
86,313✔
126

127
  size_t n = prob_.size();
86,313✔
128

129
  // Set and allocate memory
130
  alias_.assign(n, 0);
86,313✔
131

132
  // Fill large and small vectors based on 1/n
133
  for (size_t i = 0; i < n; i++) {
1,703,951✔
134
    prob_[i] *= n;
1,617,638✔
135
    if (prob_[i] > 1.0) {
1,617,638✔
136
      large.push_back(i);
242,422✔
137
    } else {
138
      small.push_back(i);
1,375,216✔
139
    }
140
  }
141

142
  while (!large.empty() && !small.empty()) {
1,552,410✔
143
    int j = small.back();
1,456,733✔
144
    int k = large.back();
1,456,733✔
145

146
    // Remove last element of small
147
    small.pop_back();
1,456,733✔
148

149
    // Update probability and alias based on Vose's algorithm
150
    prob_[k] += prob_[j] - 1.0;
1,456,733✔
151
    alias_[j] = k;
1,456,733✔
152

153
    // Move large index to small vector, if it is no longer large
154
    if (prob_[k] < 1.0) {
1,456,733✔
155
      small.push_back(k);
232,878✔
156
      large.pop_back();
1,775,924✔
157
    }
158
  }
159
}
86,313✔
160

161
size_t DiscreteIndex::sample(uint64_t* seed) const
71,242,201✔
162
{
163
  // Alias sampling of discrete distribution
164
  size_t n = prob_.size();
71,242,201✔
165
  if (n > 1) {
71,242,201✔
166
    size_t u = prn(seed) * n;
18,462,918✔
167
    if (prn(seed) < prob_[u]) {
18,462,918✔
168
      return u;
169
    } else {
170
      return alias_[u];
7,630,547✔
171
    }
172
  } else {
173
    return 0;
174
  }
175
}
176

177
void DiscreteIndex::normalize()
86,313✔
178
{
179
  // Renormalize density function so that it sums to unity. Note that we save
180
  // the integral of the distribution so that if it is used as part of another
181
  // distribution (e.g., Mixture), we know its relative strength.
182
  integral_ = std::accumulate(prob_.begin(), prob_.end(), 0.0);
86,313✔
183
  for (auto& p_i : prob_) {
1,703,951✔
184
    p_i /= integral_;
1,617,638✔
185
  }
186
}
86,313✔
187

188
//==============================================================================
189
// Discrete implementation
190
//==============================================================================
191

192
Discrete::Discrete(pugi::xml_node node)
29,717✔
193
{
194
  auto params = get_node_array<double>(node, "parameters");
29,717✔
195
  std::size_t n = params.size() / 2;
29,717✔
196

197
  // First half is x values, second half is probabilities
198
  x_.assign(params.begin(), params.begin() + n);
29,717✔
199
  const double* p = params.data() + n;
29,717✔
200

201
  // Check for bias
202
  if (check_for_node(node, "bias")) {
29,717✔
203
    // Get bias probabilities
204
    auto bias_params = get_node_array<double>(node, "bias");
11✔
205
    if (bias_params.size() != n) {
11!
206
      openmc::fatal_error(
×
207
        "Size mismatch: Attempted to bias Discrete distribution with " +
×
208
        std::to_string(n) + " probability entries using a bias with " +
×
209
        std::to_string(bias_params.size()) +
×
210
        " entries. Please ensure distributions have the same size.");
211
    }
212

213
    // Compute importance weights
214
    vector<double> p_vec(p, p + n);
11✔
215
    weight_ = compute_importance_weights(p_vec, bias_params);
11✔
216

217
    // Initialize DiscreteIndex with bias probabilities for sampling
218
    di_.assign(bias_params);
11✔
219
  } else {
11✔
220
    // Unbiased case: weight_ stays empty
221
    di_.assign({p, n});
29,706✔
222
  }
223
}
29,717✔
224

225
Discrete::Discrete(const double* x, const double* p, size_t n) : di_({p, n})
47,858✔
226
{
227
  x_.assign(x, x + n);
47,858✔
228
}
47,858✔
229

230
std::pair<double, double> Discrete::sample(uint64_t* seed) const
64,420,824✔
231
{
232
  size_t idx = di_.sample(seed);
64,420,824✔
233
  double wgt = weight_.empty() ? 1.0 : weight_[idx];
64,420,824✔
234
  return {x_[idx], wgt};
64,420,824✔
235
}
236

237
double Discrete::sample_unbiased(uint64_t* seed) const
×
238
{
239
  size_t idx = di_.sample(seed);
×
240
  return x_[idx];
×
241
}
242

243
//==============================================================================
244
// Uniform implementation
245
//==============================================================================
246

247
Uniform::Uniform(pugi::xml_node node)
362✔
248
{
249
  auto params = get_node_array<double>(node, "parameters");
362✔
250
  if (params.size() != 2) {
362!
251
    fatal_error("Uniform distribution must have two "
×
252
                "parameters specified.");
253
  }
254

255
  a_ = params.at(0);
362✔
256
  b_ = params.at(1);
362✔
257

258
  read_bias_from_xml(node);
362✔
259
}
362✔
260

261
double Uniform::sample_unbiased(uint64_t* seed) const
786,504✔
262
{
263
  return a_ + prn(seed) * (b_ - a_);
786,504✔
264
}
265

266
double Uniform::evaluate(double x) const
384,871✔
267
{
268
  if (x <= a()) {
384,871!
269
    return 0.0;
270
  } else if (x >= b()) {
384,871!
271
    return 0.0;
272
  } else {
273
    return 1 / (b() - a());
384,871✔
274
  }
275
}
276

277
//==============================================================================
278
// PowerLaw implementation
279
//==============================================================================
280

281
PowerLaw::PowerLaw(pugi::xml_node node)
74✔
282
{
283
  auto params = get_node_array<double>(node, "parameters");
74✔
284
  if (params.size() != 3) {
74!
285
    fatal_error("PowerLaw distribution must have three "
×
286
                "parameters specified.");
287
  }
288

289
  const double a = params.at(0);
74✔
290
  const double b = params.at(1);
74✔
291
  const double n = params.at(2);
74✔
292

293
  offset_ = std::pow(a, n + 1);
74✔
294
  span_ = std::pow(b, n + 1) - offset_;
74✔
295
  ninv_ = 1 / (n + 1);
74✔
296

297
  read_bias_from_xml(node);
74✔
298
}
74✔
299

300
double PowerLaw::evaluate(double x) const
274,871✔
301
{
302
  if (x <= a()) {
274,871!
303
    return 0.0;
304
  } else if (x >= b()) {
274,871!
305
    return 0.0;
306
  } else {
307
    int pwr = n() + 1;
274,871✔
308
    double norm = pwr / span_;
274,871✔
309
    return norm * std::pow(std::fabs(x), n());
274,871✔
310
  }
311
}
312

313
double PowerLaw::sample_unbiased(uint64_t* seed) const
277,093✔
314
{
315
  return std::pow(offset_ + prn(seed) * span_, ninv_);
277,093✔
316
}
317

318
//==============================================================================
319
// Maxwell implementation
320
//==============================================================================
321

322
Maxwell::Maxwell(pugi::xml_node node)
93✔
323
{
324
  theta_ = std::stod(get_node_value(node, "parameters"));
186✔
325

326
  read_bias_from_xml(node);
93✔
327
}
93✔
328

329
double Maxwell::sample_unbiased(uint64_t* seed) const
223,872✔
330
{
331
  return maxwell_spectrum(theta_, seed);
223,872✔
332
}
333

334
double Maxwell::evaluate(double x) const
220,000✔
335
{
336
  double c = (2.0 / SQRT_PI) * std::pow(theta_, -1.5);
220,000✔
337
  return c * std::sqrt(x) * std::exp(-x / theta_);
220,000✔
338
}
339

340
//==============================================================================
341
// Watt implementation
342
//==============================================================================
343

344
Watt::Watt(pugi::xml_node node)
123✔
345
{
346
  auto params = get_node_array<double>(node, "parameters");
123✔
347
  if (params.size() != 2)
123!
348
    openmc::fatal_error("Watt energy distribution must have two "
×
349
                        "parameters specified.");
350

351
  a_ = params.at(0);
123✔
352
  b_ = params.at(1);
123✔
353

354
  read_bias_from_xml(node);
123✔
355
}
123✔
356

357
double Watt::sample_unbiased(uint64_t* seed) const
14,375,176✔
358
{
359
  return watt_spectrum(a_, b_, seed);
14,375,176✔
360
}
361

362
double Watt::evaluate(double x) const
220,000✔
363
{
364
  double c =
220,000✔
365
    2.0 / (std::sqrt(PI * b_) * std::pow(a_, 1.5) * std::exp(a_ * b_ / 4.0));
220,000✔
366
  return c * std::exp(-x / a_) * std::sinh(std::sqrt(b_ * x));
220,000✔
367
}
368

369
//==============================================================================
370
// Normal implementation
371
//==============================================================================
372

373
Normal::Normal(double mean_value, double std_dev, double lower, double upper)
66✔
374
  : mean_value_ {mean_value}, std_dev_ {std_dev}, lower_ {lower}, upper_ {upper}
66✔
375
{
376
  compute_normalization();
66✔
377
}
66✔
378

379
Normal::Normal(pugi::xml_node node)
44✔
380
{
381
  auto params = get_node_array<double>(node, "parameters");
44✔
382
  if (params.size() != 2 && params.size() != 4) {
44!
383
    openmc::fatal_error("Normal energy distribution must have two "
×
384
                        "parameters (mean, std_dev) or four parameters "
385
                        "(mean, std_dev, lower, upper) specified.");
386
  }
387

388
  mean_value_ = params.at(0);
44✔
389
  std_dev_ = params.at(1);
44✔
390

391
  // Optional truncation bounds
392
  if (params.size() == 4) {
44✔
393
    lower_ = params.at(2);
11✔
394
    upper_ = params.at(3);
11✔
395
  } else {
396
    lower_ = -INFTY;
33✔
397
    upper_ = INFTY;
33✔
398
  }
399

400
  compute_normalization();
44✔
401
  read_bias_from_xml(node);
44✔
402
}
44✔
403

404
void Normal::compute_normalization()
110✔
405
{
406
  // Validate bounds
407
  if (lower_ >= upper_) {
110!
408
    openmc::fatal_error(
×
409
      "Normal distribution lower bound must be less than upper bound.");
410
  }
411

412
  // Check if truncation bounds are finite
413
  is_truncated_ = (lower_ > -INFTY || upper_ < INFTY);
110✔
414

415
  if (is_truncated_) {
110✔
416
    double alpha = (lower_ - mean_value_) / std_dev_;
55✔
417
    double beta = (upper_ - mean_value_) / std_dev_;
55✔
418
    double cdf_diff = standard_normal_cdf(beta) - standard_normal_cdf(alpha);
55✔
419

420
    if (cdf_diff <= 0.0) {
55!
421
      openmc::fatal_error(
×
422
        "Normal distribution truncation bounds exclude entire distribution.");
423
    }
424
    norm_factor_ = 1.0 / cdf_diff;
55✔
425
  } else {
426
    norm_factor_ = 1.0;
55✔
427
  }
428
}
110✔
429

430
double Normal::sample_unbiased(uint64_t* seed) const
330,000✔
431
{
432
  if (!is_truncated_) {
330,000✔
433
    return normal_variate(mean_value_, std_dev_, seed);
220,000✔
434
  }
435

436
  // Rejection sampling for truncated normal
437
  double x;
161,249✔
438
  do {
161,249✔
439
    x = normal_variate(mean_value_, std_dev_, seed);
161,249✔
440
  } while (x < lower_ || x > upper_);
161,249✔
441
  return x;
442
}
443

444
double Normal::evaluate(double x) const
220,121✔
445
{
446
  // Return 0 outside truncation bounds
447
  if (x < lower_ || x > upper_) {
220,121✔
448
    return 0.0;
449
  }
450

451
  // Standard normal PDF value
452
  double pdf = (1.0 / (std::sqrt(2.0 * PI) * std_dev_)) *
220,077✔
453
               std::exp(-std::pow((x - mean_value_), 2.0) /
220,077✔
454
                        (2.0 * std::pow(std_dev_, 2.0)));
220,077✔
455

456
  // Apply normalization for truncation
457
  return pdf * norm_factor_;
220,077✔
458
}
459

460
//==============================================================================
461
// Tabular implementation
462
//==============================================================================
463

464
Tabular::Tabular(pugi::xml_node node)
7,988✔
465
{
466
  if (check_for_node(node, "interpolation")) {
7,988!
467
    std::string temp = get_node_value(node, "interpolation");
7,988✔
468
    if (temp == "histogram") {
7,988✔
469
      interp_ = Interpolation::histogram;
7,906✔
470
    } else if (temp == "linear-linear") {
82!
471
      interp_ = Interpolation::lin_lin;
82✔
472
    } else if (temp == "log-linear") {
×
473
      interp_ = Interpolation::log_lin;
×
474
    } else if (temp == "log-log") {
×
475
      interp_ = Interpolation::log_log;
×
476
    } else {
477
      openmc::fatal_error(
×
478
        "Unsupported interpolation type for distribution: " + temp);
×
479
    }
480
  } else {
7,988✔
481
    interp_ = Interpolation::histogram;
×
482
  }
483

484
  // Read and initialize tabular distribution. If number of parameters is odd,
485
  // add an extra zero for the 'p' array.
486
  auto params = get_node_array<double>(node, "parameters");
7,988✔
487
  if (params.size() % 2 != 0) {
7,988!
488
    params.push_back(0.0);
×
489
  }
490
  std::size_t n = params.size() / 2;
7,988✔
491
  const double* x = params.data();
7,988✔
492
  const double* p = x + n;
7,988✔
493
  init(x, p, n);
7,988✔
494

495
  read_bias_from_xml(node);
7,988✔
496
}
7,988✔
497

498
Tabular::Tabular(const double* x, const double* p, int n, Interpolation interp,
50,858,915✔
499
  const double* c)
50,858,915✔
500
  : interp_ {interp}
50,858,915✔
501
{
502
  init(x, p, n, c);
50,858,915✔
503
}
50,858,915✔
504

505
void Tabular::init(
50,866,903✔
506
  const double* x, const double* p, std::size_t n, const double* c)
507
{
508
  // Copy x/p arrays into vectors
509
  std::copy(x, x + n, std::back_inserter(x_));
50,866,903✔
510
  std::copy(p, p + n, std::back_inserter(p_));
50,866,903✔
511

512
  // Calculate cumulative distribution function
513
  if (c) {
50,866,903✔
514
    std::copy(c, c + n, std::back_inserter(c_));
50,858,915✔
515
  } else {
516
    c_.resize(n);
7,988✔
517
    c_[0] = 0.0;
7,988✔
518
    for (int i = 1; i < n; ++i) {
98,560✔
519
      if (interp_ == Interpolation::histogram) {
90,572✔
520
        c_[i] = c_[i - 1] + p_[i - 1] * (x_[i] - x_[i - 1]);
90,386✔
521
      } else if (interp_ == Interpolation::lin_lin) {
186!
522
        c_[i] = c_[i - 1] + 0.5 * (p_[i - 1] + p_[i]) * (x_[i] - x_[i - 1]);
186✔
523
      } else if (interp_ == Interpolation::log_lin) {
×
524
        double m = std::log(p_[i] / p_[i - 1]) / (x_[i] - x_[i - 1]);
×
525
        c_[i] = c_[i - 1] + p_[i - 1] * (x_[i] - x_[i - 1]) *
×
526
                              exprel(m * (x_[i] - x_[i - 1]));
×
527
      } else if (interp_ == Interpolation::log_log) {
×
528
        double m = std::log((x_[i] * p_[i]) / (x_[i - 1] * p_[i - 1])) /
×
529
                   std::log(x_[i] / x_[i - 1]);
×
530
        c_[i] = c_[i - 1] + x_[i - 1] * p_[i - 1] *
×
531
                              std::log(x_[i] / x_[i - 1]) *
×
532
                              exprel(m * std::log(x_[i] / x_[i - 1]));
×
533
      } else {
534
        UNREACHABLE();
×
535
      }
536
    }
537
  }
538

539
  // Normalize density and distribution functions. Note that we save the
540
  // integral of the distribution so that if it is used as part of another
541
  // distribution (e.g., Mixture), we know its relative strength.
542
  integral_ = c_[n - 1];
50,866,903✔
543
  for (int i = 0; i < n; ++i) {
730,026,048✔
544
    p_[i] = p_[i] / integral_;
679,159,145✔
545
    c_[i] = c_[i] / integral_;
679,159,145✔
546
  }
547
}
50,866,903✔
548

549
double Tabular::sample_unbiased(uint64_t* seed) const
1,034,066,505✔
550
{
551
  // Sample value of CDF
552
  double c = prn(seed);
1,034,066,505✔
553

554
  // Find first CDF bin which is above the sampled value
555
  double c_i = c_[0];
1,034,066,505✔
556
  int i;
1,034,066,505✔
557
  std::size_t n = c_.size();
1,034,066,505✔
558
  for (i = 0; i < n - 1; ++i) {
2,147,483,647!
559
    if (c <= c_[i + 1])
2,147,483,647✔
560
      break;
561
    c_i = c_[i + 1];
562
  }
563

564
  // Determine bounding PDF values
565
  double x_i = x_[i];
1,034,066,505✔
566
  double p_i = p_[i];
1,034,066,505✔
567

568
  if (interp_ == Interpolation::histogram) {
1,034,066,505✔
569
    // Histogram interpolation
570
    if (p_i > 0.0) {
3,410,891!
571
      return x_i + (c - c_i) / p_i;
3,410,891✔
572
    } else {
573
      return x_i;
574
    }
575
  } else if (interp_ == Interpolation::lin_lin) {
1,030,655,614!
576
    // Linear-linear interpolation
577
    double x_i1 = x_[i + 1];
1,030,655,614✔
578
    double p_i1 = p_[i + 1];
1,030,655,614✔
579

580
    double m = (p_i1 - p_i) / (x_i1 - x_i);
1,030,655,614✔
581
    if (m == 0.0) {
1,030,655,614✔
582
      return x_i + (c - c_i) / p_i;
410,386,938✔
583
    } else {
584
      return x_i +
620,268,676✔
585
             (std::sqrt(std::max(0.0, p_i * p_i + 2 * m * (c - c_i))) - p_i) /
1,240,537,352!
586
               m;
620,268,676✔
587
    }
588
  } else if (interp_ == Interpolation::log_lin) {
×
589
    // Log-linear interpolation
590
    double x_i1 = x_[i + 1];
×
591
    double p_i1 = p_[i + 1];
×
592

593
    double m = std::log(p_i1 / p_i) / (x_i1 - x_i);
×
594
    double f = (c - c_i) / p_i;
×
595
    return x_i + f * log1prel(m * f);
×
596
  } else if (interp_ == Interpolation::log_log) {
×
597
    // Log-Log interpolation
598
    double x_i1 = x_[i + 1];
×
599
    double p_i1 = p_[i + 1];
×
600

601
    double m = std::log((x_i1 * p_i1) / (x_i * p_i)) / std::log(x_i1 / x_i);
×
602
    double f = (c - c_i) / (p_i * x_i);
×
603
    return x_i * std::exp(f * log1prel(m * f));
×
604
  } else {
605
    UNREACHABLE();
×
606
  }
607
}
608

609
double Tabular::evaluate(double x) const
110,000✔
610
{
611
  int i;
110,000✔
612

613
  if (interp_ == Interpolation::histogram) {
110,000!
614
    i = std::upper_bound(x_.begin(), x_.end(), x) - x_.begin() - 1;
×
615
    if (i < 0 || i >= static_cast<int>(p_.size())) {
×
616
      return 0.0;
617
    } else {
618
      return p_[i];
×
619
    }
620
  } else {
621
    i = std::lower_bound(x_.begin(), x_.end(), x) - x_.begin() - 1;
110,000!
622

623
    if (i < 0 || i >= static_cast<int>(p_.size()) - 1) {
110,000!
624
      return 0.0;
625
    } else {
626
      double x0 = x_[i];
110,000✔
627
      double x1 = x_[i + 1];
110,000✔
628
      double p0 = p_[i];
110,000✔
629
      double p1 = p_[i + 1];
110,000✔
630

631
      double t = (x - x0) / (x1 - x0);
110,000✔
632
      return (1 - t) * p0 + t * p1;
110,000✔
633
    }
634
  }
635
}
636

637
//==============================================================================
638
// Equiprobable implementation
639
//==============================================================================
640

641
double Equiprobable::sample_unbiased(uint64_t* seed) const
×
642
{
643
  std::size_t n = x_.size();
×
644

645
  double r = prn(seed);
×
646
  int i = std::floor((n - 1) * r);
×
647

648
  double xl = x_[i];
×
649
  double xr = x_[i + i];
×
650
  return xl + ((n - 1) * r - i) * (xr - xl);
×
651
}
652

653
double Equiprobable::evaluate(double x) const
×
654
{
655
  double x_min = *std::min_element(x_.begin(), x_.end());
×
656
  double x_max = *std::max_element(x_.begin(), x_.end());
×
657

658
  if (x < x_min || x > x_max) {
×
659
    return 0.0;
660
  } else {
661
    return 1.0 / (x_max - x_min);
×
662
  }
663
}
664

665
//==============================================================================
666
// Mixture implementation
667
//==============================================================================
668

669
Mixture::Mixture(pugi::xml_node node)
67✔
670
{
671
  vector<double> probabilities;
67✔
672

673
  // First pass: collect distributions and their probabilities
674
  for (pugi::xml_node pair : node.children("pair")) {
425✔
675
    // Check that required data exists
676
    if (!pair.attribute("probability"))
179!
677
      fatal_error("Mixture pair element does not have probability.");
×
678
    if (!pair.child("dist"))
179!
679
      fatal_error("Mixture pair element does not have a distribution.");
×
680

681
    // Get probability and distribution
682
    double p = std::stod(pair.attribute("probability").value());
358✔
683
    auto dist = distribution_from_xml(pair.child("dist"));
179✔
684

685
    // Weight probability by the distribution's integral
686
    double weighted_prob = p * dist->integral();
179✔
687
    probabilities.push_back(weighted_prob);
179✔
688
    distribution_.push_back(std::move(dist));
179✔
689
  }
179✔
690

691
  // Save sum of weighted probabilities
692
  integral_ = std::accumulate(probabilities.begin(), probabilities.end(), 0.0);
67✔
693

694
  std::size_t n = probabilities.size();
67✔
695

696
  // Check for bias
697
  if (check_for_node(node, "bias")) {
67!
698
    // Get bias probabilities
699
    auto bias_params = get_node_array<double>(node, "bias");
×
700
    if (bias_params.size() != n) {
×
701
      openmc::fatal_error(
×
702
        "Size mismatch: Attempted to bias Mixture distribution with " +
×
703
        std::to_string(n) + " components using a bias with " +
×
704
        std::to_string(bias_params.size()) +
×
705
        " entries. Please ensure distributions have the same size.");
706
    }
707

708
    // Compute importance weights
709
    weight_ = compute_importance_weights(probabilities, bias_params);
×
710

711
    // Initialize DiscreteIndex with bias probabilities for sampling
712
    di_.assign(bias_params);
×
713
  } else {
×
714
    // Unbiased case: weight_ stays empty
715
    di_.assign(probabilities);
67✔
716
  }
717
}
67✔
718

719
std::pair<double, double> Mixture::sample(uint64_t* seed) const
223,564✔
720
{
721
  size_t idx = di_.sample(seed);
223,564✔
722

723
  // Sample the chosen distribution
724
  auto [val, sub_wgt] = distribution_[idx]->sample(seed);
223,564✔
725

726
  // Multiply by component selection weight
727
  double mix_wgt = weight_.empty() ? 1.0 : weight_[idx];
223,564!
728
  return {val, mix_wgt * sub_wgt};
223,564✔
729
}
730

731
double Mixture::sample_unbiased(uint64_t* seed) const
×
732
{
733
  size_t idx = di_.sample(seed);
×
734
  return distribution_[idx]->sample(seed).first;
×
735
}
736

737
//==============================================================================
738
// Helper function
739
//==============================================================================
740

741
UPtrDist distribution_from_xml(pugi::xml_node node)
38,501✔
742
{
743
  if (!check_for_node(node, "type"))
38,501!
744
    openmc::fatal_error("Distribution type must be specified.");
×
745

746
  // Determine type of distribution
747
  std::string type = get_node_value(node, "type", true, true);
38,501✔
748

749
  // Allocate extension of Distribution
750
  UPtrDist dist;
38,501✔
751
  if (type == "uniform") {
38,501✔
752
    dist = UPtrDist {new Uniform(node)};
362✔
753
  } else if (type == "powerlaw") {
38,139✔
754
    dist = UPtrDist {new PowerLaw(node)};
74✔
755
  } else if (type == "maxwell") {
38,065✔
756
    dist = UPtrDist {new Maxwell(node)};
93✔
757
  } else if (type == "watt") {
37,972✔
758
    dist = UPtrDist {new Watt(node)};
123✔
759
  } else if (type == "normal") {
37,849✔
760
    dist = UPtrDist {new Normal(node)};
33✔
761
  } else if (type == "discrete") {
37,816✔
762
    dist = UPtrDist {new Discrete(node)};
29,706✔
763
  } else if (type == "tabular") {
8,110✔
764
    dist = UPtrDist {new Tabular(node)};
7,988✔
765
  } else if (type == "mixture") {
122✔
766
    dist = UPtrDist {new Mixture(node)};
67✔
767
  } else if (type == "decay_spectrum") {
55!
768
    dist = UPtrDist {new DecaySpectrum(node)};
55✔
769
  } else if (type == "muir") {
×
770
    openmc::fatal_error(
×
771
      "'muir' distributions are now specified using the openmc.stats.muir() "
772
      "function in Python. Please regenerate your XML files.");
773
  } else {
774
    openmc::fatal_error("Invalid distribution type: " + type);
×
775
  }
776
  return dist;
38,501✔
777
}
38,501✔
778

779
//==============================================================================
780
// DecaySpectrum implementation
781
//==============================================================================
782

783
DecaySpectrum::DecaySpectrum(pugi::xml_node node)
55✔
784
{
785
  // Read the region volume [cm^3] needed for absolute emission rate
786
  if (!check_for_node(node, "volume"))
55!
NEW
787
    fatal_error("DecaySpectrum: 'volume' attribute is required.");
×
788
  double volume = std::stod(get_node_value(node, "volume"));
110✔
789

790
  // Read nuclide names and atom densities from XML
791
  vector<int> nuclide_indices;
55✔
792
  vector<double> atoms;
55✔
793
  auto names = get_node_array<std::string>(node, "nuclides");
55✔
794
  auto densities = get_node_array<double>(node, "parameters");
55✔
795
  if (names.size() != densities.size()) {
55!
NEW
796
    fatal_error("DecaySpectrum nuclides and parameters must have the same "
×
797
                "length.");
798
  }
799

800
  for (size_t i = 0; i < names.size(); ++i) {
561✔
801
    const auto& name = names[i];
506✔
802
    double density = densities[i];
506✔
803

804
    // Look up nuclide in the depletion chain
805
    auto it = data::chain_nuclide_map.find(name);
506✔
806
    if (it == data::chain_nuclide_map.end()) {
506!
NEW
807
      if (decay_spectrum_missing_chain_nuclides.insert(name).second) {
×
NEW
808
        warning("Nuclide '" + name +
×
809
                "' appears in a DecaySpectrum source but is not present in "
810
                "the depletion chain; it will be ignored.");
811
      }
NEW
812
      continue;
×
813
    }
814

815
    int nuclide_index = it->second;
506!
816
    const auto& chain_nuc = data::chain_nuclides[nuclide_index];
506!
817
    const Distribution* photon_dist = chain_nuc->photon_energy();
506!
818
    if (!photon_dist)
506!
NEW
819
      continue;
×
820

821
    // Skip non-positive densities and warn if negative
822
    if (density <= 0.0) {
506!
NEW
823
      if (density < 0.0) {
×
NEW
824
        warning("Nuclide '" + name +
×
825
                "' has a negative density in a DecaySpectrum source; it will "
826
                "be ignored.");
827
      }
NEW
828
      continue;
×
829
    }
830

831
    // atoms = density [atom/b-cm] * 1e24 [b/cm^2] * volume [cm^3]
832
    double atoms_i = density * 1.0e24 * volume;
506✔
833

834
    nuclide_indices.push_back(nuclide_index);
506✔
835
    atoms.push_back(atoms_i);
506✔
836
  }
837

838
  init(std::move(nuclide_indices), atoms);
110✔
839
}
55✔
840

841
void DecaySpectrum::init(
55✔
842
  vector<int> nuclide_indices, const vector<double>& atoms)
843
{
844
  if (nuclide_indices.size() != atoms.size()) {
55!
NEW
845
    fatal_error("DecaySpectrum nuclide index and atoms arrays must have "
×
846
                "the same length.");
847
  }
848

849
  vector<double> probs;
55✔
850
  probs.reserve(nuclide_indices.size());
55✔
851
  for (size_t i = 0; i < nuclide_indices.size(); ++i) {
561✔
852
    // Distribution integral is in [photons/s/atom]; multiplying by atoms gives
853
    // the total emission rate [photons/s] for this nuclide.
854
    const auto* dist =
506✔
855
      data::chain_nuclides[nuclide_indices[i]]->photon_energy();
506✔
856
    probs.push_back(atoms[i] * dist->integral());
506✔
857
  }
858

859
  nuclide_indices_ = std::move(nuclide_indices);
55✔
860
  integral_ = std::accumulate(probs.begin(), probs.end(), 0.0);
55✔
861
  if (nuclide_indices_.empty() || integral_ <= 0.0) {
55!
NEW
862
    fatal_error("DecaySpectrum source did not resolve any nuclides with decay "
×
863
                "photon spectra and positive atom densities. Ensure "
864
                "OPENMC_CHAIN_FILE is set and matches the nuclides in the "
865
                "source definition.");
866
  }
867
  di_.assign(probs);
55✔
868
}
55✔
869

870
DecaySpectrum::Sample DecaySpectrum::sample_with_parent(uint64_t* seed) const
385,000✔
871
{
872
  size_t idx = di_.sample(seed);
385,000✔
873
  int parent_nuclide = nuclide_indices_[idx];
385,000✔
874
  const auto* dist = data::chain_nuclides[parent_nuclide]->photon_energy();
385,000✔
875
  auto [energy, weight] = dist->sample(seed);
385,000✔
876
  return {energy, weight, parent_nuclide};
385,000✔
877
}
878

NEW
879
std::pair<double, double> DecaySpectrum::sample(uint64_t* seed) const
×
880
{
NEW
881
  auto sample = sample_with_parent(seed);
×
NEW
882
  return {sample.energy, sample.weight};
×
883
}
884

885
double DecaySpectrum::integral() const
55✔
886
{
887
  return integral_;
55✔
888
}
889

NEW
890
double DecaySpectrum::sample_unbiased(uint64_t* seed) const
×
891
{
NEW
892
  return sample_with_parent(seed).energy;
×
893
}
894

895
} // namespace openmc
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc