• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

openmc-dev / openmc / 21097634619

17 Jan 2026 04:47PM UTC coverage: 81.972% (-0.03%) from 81.998%
21097634619

Pull #3413

github

web-flow
Merge a47c54889 into 5847b0de2
Pull Request #3413: More interpolation types in Tabular.

17226 of 23984 branches covered (71.82%)

Branch coverage included in aggregate %.

59 of 107 new or added lines in 3 files covered. (55.14%)

42 existing lines in 1 file now uncovered.

55713 of 64996 relevant lines covered (85.72%)

43338800.0 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

70.48
/src/distribution.cpp
1
#include "openmc/distribution.h"
2

3
#include <algorithm> // for copy
4
#include <array>
5
#include <cmath>     // for sqrt, floor, max
6
#include <iterator>  // for back_inserter
7
#include <numeric>   // for accumulate
8
#include <stdexcept> // for runtime_error
9
#include <string>    // for string, stod
10

11
#include "openmc/constants.h"
12
#include "openmc/error.h"
13
#include "openmc/math_functions.h"
14
#include "openmc/random_dist.h"
15
#include "openmc/random_lcg.h"
16
#include "openmc/xml_interface.h"
17

18
namespace openmc {
19

20
//==============================================================================
21
// Helper function for computing importance weights from biased sampling
22
//==============================================================================
23

24
vector<double> compute_importance_weights(
11✔
25
  const vector<double>& p, const vector<double>& b)
26
{
27
  std::size_t n = p.size();
11✔
28

29
  // Normalize original probabilities
30
  double sum_p = std::accumulate(p.begin(), p.end(), 0.0);
11✔
31
  vector<double> p_norm(n);
11✔
32
  for (std::size_t i = 0; i < n; ++i) {
44✔
33
    p_norm[i] = p[i] / sum_p;
33✔
34
  }
35

36
  // Normalize bias probabilities
37
  double sum_b = std::accumulate(b.begin(), b.end(), 0.0);
11✔
38
  vector<double> b_norm(n);
11✔
39
  for (std::size_t i = 0; i < n; ++i) {
44✔
40
    b_norm[i] = b[i] / sum_b;
33✔
41
  }
42

43
  // Compute importance weights
44
  vector<double> weights(n);
11✔
45
  for (std::size_t i = 0; i < n; ++i) {
44✔
46
    weights[i] = (b_norm[i] == 0.0) ? INFTY : p_norm[i] / b_norm[i];
33!
47
  }
48
  return weights;
22✔
49
}
11✔
50

51
std::pair<double, double> Distribution::sample(uint64_t* seed) const
707,387,288✔
52
{
53
  if (bias_) {
707,387,288✔
54
    // Sample from the bias distribution and compute importance weight
55
    double val = bias_->sample_unbiased(seed);
715,242✔
56
    double wgt = this->evaluate(val) / bias_->evaluate(val);
715,242✔
57
    return {val, wgt};
715,242✔
58
  } else {
59
    // Unbiased sampling: return sampled value with weight 1.0
60
    double val = sample_unbiased(seed);
706,672,046✔
61
    return {val, 1.0};
706,672,046✔
62
  }
63
}
64

65
// PDF evaluation not supported for all distribution types
66
double Distribution::evaluate(double x) const
×
67
{
68
  throw std::runtime_error(
×
69
    "PDF evaluation not implemented for this distribution type.");
×
70
}
71

72
void Distribution::read_bias_from_xml(pugi::xml_node node)
8,350✔
73
{
74
  if (check_for_node(node, "bias")) {
8,350✔
75
    pugi::xml_node bias_node = node.child("bias");
77✔
76

77
    if (check_for_node(bias_node, "bias")) {
77!
78
      openmc::fatal_error(
×
79
        "Distribution has a bias distribution with its own bias distribution. "
80
        "Please ensure bias distributions do not have their own bias.");
81
    }
82

83
    UPtrDist bias = distribution_from_xml(bias_node);
77✔
84
    this->set_bias(std::move(bias));
77✔
85
  }
77✔
86
}
8,350✔
87

88
//==============================================================================
89
// DiscreteIndex implementation
90
//==============================================================================
91

92
DiscreteIndex::DiscreteIndex(pugi::xml_node node)
×
93
{
94
  auto params = get_node_array<double>(node, "parameters");
×
95
  std::size_t n = params.size() / 2;
×
96

97
  assign({params.data() + n, n});
×
98
}
×
99

100
DiscreteIndex::DiscreteIndex(span<const double> p)
47,213✔
101
{
102
  assign(p);
47,213✔
103
}
47,213✔
104

105
void DiscreteIndex::assign(span<const double> p)
82,579✔
106
{
107
  prob_.assign(p.begin(), p.end());
82,579✔
108
  this->init_alias();
82,579✔
109
}
82,579✔
110

111
void DiscreteIndex::init_alias()
82,579✔
112
{
113
  normalize();
82,579✔
114

115
  // The initialization and sampling method is based on Vose
116
  // (DOI: 10.1109/32.92917)
117
  // Vectors for large and small probabilities based on 1/n
118
  vector<size_t> large;
82,579✔
119
  vector<size_t> small;
82,579✔
120

121
  size_t n = prob_.size();
82,579✔
122

123
  // Set and allocate memory
124
  alias_.assign(n, 0);
82,579✔
125

126
  // Fill large and small vectors based on 1/n
127
  for (size_t i = 0; i < n; i++) {
1,613,468✔
128
    prob_[i] *= n;
1,530,889✔
129
    if (prob_[i] > 1.0) {
1,530,889✔
130
      large.push_back(i);
229,196✔
131
    } else {
132
      small.push_back(i);
1,301,693✔
133
    }
134
  }
135

136
  while (!large.empty() && !small.empty()) {
1,456,388✔
137
    int j = small.back();
1,373,809✔
138
    int k = large.back();
1,373,809✔
139

140
    // Remove last element of small
141
    small.pop_back();
1,373,809✔
142

143
    // Update probability and alias based on Vose's algorithm
144
    prob_[k] += prob_[j] - 1.0;
1,373,809✔
145
    alias_[j] = k;
1,373,809✔
146

147
    // Move large index to small vector, if it is no longer large
148
    if (prob_[k] < 1.0) {
1,373,809✔
149
      small.push_back(k);
220,901✔
150
      large.pop_back();
220,901✔
151
    }
152
  }
153
}
82,579✔
154

155
size_t DiscreteIndex::sample(uint64_t* seed) const
66,647,164✔
156
{
157
  // Alias sampling of discrete distribution
158
  size_t n = prob_.size();
66,647,164✔
159
  if (n > 1) {
66,647,164✔
160
    size_t u = prn(seed) * n;
16,195,440✔
161
    if (prn(seed) < prob_[u]) {
16,195,440✔
162
      return u;
9,681,752✔
163
    } else {
164
      return alias_[u];
6,513,688✔
165
    }
166
  } else {
167
    return 0;
50,451,724✔
168
  }
169
}
170

171
void DiscreteIndex::normalize()
82,579✔
172
{
173
  // Renormalize density function so that it sums to unity. Note that we save
174
  // the integral of the distribution so that if it is used as part of another
175
  // distribution (e.g., Mixture), we know its relative strength.
176
  integral_ = std::accumulate(prob_.begin(), prob_.end(), 0.0);
82,579✔
177
  for (auto& p_i : prob_) {
1,613,468✔
178
    p_i /= integral_;
1,530,889✔
179
  }
180
}
82,579✔
181

182
//==============================================================================
183
// Discrete implementation
184
//==============================================================================
185

186
Discrete::Discrete(pugi::xml_node node)
27,065✔
187
{
188
  auto params = get_node_array<double>(node, "parameters");
27,065✔
189
  std::size_t n = params.size() / 2;
27,065✔
190

191
  // First half is x values, second half is probabilities
192
  x_.assign(params.begin(), params.begin() + n);
27,065✔
193
  const double* p = params.data() + n;
27,065✔
194

195
  // Check for bias
196
  if (check_for_node(node, "bias")) {
27,065✔
197
    // Get bias probabilities
198
    auto bias_params = get_node_array<double>(node, "bias");
11✔
199
    if (bias_params.size() != n) {
11!
200
      openmc::fatal_error(
×
201
        "Size mismatch: Attempted to bias Discrete distribution with " +
×
202
        std::to_string(n) + " probability entries using a bias with " +
×
203
        std::to_string(bias_params.size()) +
×
204
        " entries. Please ensure distributions have the same size.");
205
    }
206

207
    // Compute importance weights
208
    vector<double> p_vec(p, p + n);
11✔
209
    weight_ = compute_importance_weights(p_vec, bias_params);
11✔
210

211
    // Initialize DiscreteIndex with bias probabilities for sampling
212
    di_.assign(bias_params);
11✔
213
  } else {
11✔
214
    // Unbiased case: weight_ stays empty
215
    di_.assign({p, n});
27,054✔
216
  }
217
}
27,065✔
218

219
Discrete::Discrete(const double* x, const double* p, size_t n) : di_({p, n})
47,213✔
220
{
221
  x_.assign(x, x + n);
47,213✔
222
}
47,213✔
223

224
std::pair<double, double> Discrete::sample(uint64_t* seed) const
61,639,207✔
225
{
226
  size_t idx = di_.sample(seed);
61,639,207✔
227
  double wgt = weight_.empty() ? 1.0 : weight_[idx];
61,639,207✔
228
  return {x_[idx], wgt};
61,639,207✔
229
}
230

231
double Discrete::sample_unbiased(uint64_t* seed) const
×
232
{
233
  size_t idx = di_.sample(seed);
×
234
  return x_[idx];
×
235
}
236

237
//==============================================================================
238
// Uniform implementation
239
//==============================================================================
240

241
Uniform::Uniform(pugi::xml_node node)
344✔
242
{
243
  auto params = get_node_array<double>(node, "parameters");
344✔
244
  if (params.size() != 2) {
344!
245
    fatal_error("Uniform distribution must have two "
×
246
                "parameters specified.");
247
  }
248

249
  a_ = params.at(0);
344✔
250
  b_ = params.at(1);
344✔
251

252
  read_bias_from_xml(node);
344✔
253
}
344✔
254

255
double Uniform::sample_unbiased(uint64_t* seed) const
780,633✔
256
{
257
  return a_ + prn(seed) * (b_ - a_);
780,633✔
258
}
259

260
double Uniform::evaluate(double x) const
385,242✔
261
{
262
  if (x <= a()) {
385,242!
263
    return 0.0;
×
264
  } else if (x >= b()) {
385,242!
265
    return 0.0;
×
266
  } else {
267
    return 1 / (b() - a());
385,242✔
268
  }
269
}
270

271
//==============================================================================
272
// PowerLaw implementation
273
//==============================================================================
274

275
PowerLaw::PowerLaw(pugi::xml_node node)
76✔
276
{
277
  auto params = get_node_array<double>(node, "parameters");
76✔
278
  if (params.size() != 3) {
76!
279
    fatal_error("PowerLaw distribution must have three "
×
280
                "parameters specified.");
281
  }
282

283
  const double a = params.at(0);
76✔
284
  const double b = params.at(1);
76✔
285
  const double n = params.at(2);
76✔
286

287
  offset_ = std::pow(a, n + 1);
76✔
288
  span_ = std::pow(b, n + 1) - offset_;
76✔
289
  ninv_ = 1 / (n + 1);
76✔
290

291
  read_bias_from_xml(node);
76✔
292
}
76✔
293

294
double PowerLaw::evaluate(double x) const
275,242✔
295
{
296
  if (x <= a()) {
275,242!
297
    return 0.0;
×
298
  } else if (x >= b()) {
275,242!
299
    return 0.0;
×
300
  } else {
301
    int pwr = n() + 1;
275,242✔
302
    double norm = pwr / span_;
275,242✔
303
    return norm * std::pow(std::fabs(x), n());
275,242✔
304
  }
305
}
306

307
double PowerLaw::sample_unbiased(uint64_t* seed) const
277,464✔
308
{
309
  return std::pow(offset_ + prn(seed) * span_, ninv_);
277,464✔
310
}
311

312
//==============================================================================
313
// Maxwell implementation
314
//==============================================================================
315

316
Maxwell::Maxwell(pugi::xml_node node)
97✔
317
{
318
  theta_ = std::stod(get_node_value(node, "parameters"));
97✔
319

320
  read_bias_from_xml(node);
97✔
321
}
97✔
322

323
double Maxwell::sample_unbiased(uint64_t* seed) const
223,872✔
324
{
325
  return maxwell_spectrum(theta_, seed);
223,872✔
326
}
327

328
double Maxwell::evaluate(double x) const
220,000✔
329
{
330
  double c = (2.0 / SQRT_PI) * std::pow(theta_, -1.5);
220,000✔
331
  return c * std::sqrt(x) * std::exp(-x / theta_);
220,000✔
332
}
333

334
//==============================================================================
335
// Watt implementation
336
//==============================================================================
337

338
Watt::Watt(pugi::xml_node node)
129✔
339
{
340
  auto params = get_node_array<double>(node, "parameters");
129✔
341
  if (params.size() != 2)
129!
342
    openmc::fatal_error("Watt energy distribution must have two "
×
343
                        "parameters specified.");
344

345
  a_ = params.at(0);
129✔
346
  b_ = params.at(1);
129✔
347

348
  read_bias_from_xml(node);
129✔
349
}
129✔
350

351
double Watt::sample_unbiased(uint64_t* seed) const
12,577,345✔
352
{
353
  return watt_spectrum(a_, b_, seed);
12,577,345✔
354
}
355

356
double Watt::evaluate(double x) const
220,000✔
357
{
358
  double c =
359
    2.0 / (std::sqrt(PI * b_) * std::pow(a_, 1.5) * std::exp(a_ * b_ / 4.0));
220,000✔
360
  return c * std::exp(-x / a_) * std::sinh(std::sqrt(b_ * x));
220,000✔
361
}
362

363
//==============================================================================
364
// Normal implementation
365
//==============================================================================
366
Normal::Normal(pugi::xml_node node)
33✔
367
{
368
  auto params = get_node_array<double>(node, "parameters");
33✔
369
  if (params.size() != 2) {
33!
370
    openmc::fatal_error("Normal energy distribution must have two "
×
371
                        "parameters specified.");
372
  }
373

374
  mean_value_ = params.at(0);
33✔
375
  std_dev_ = params.at(1);
33✔
376

377
  read_bias_from_xml(node);
33✔
378
}
33✔
379

380
double Normal::sample_unbiased(uint64_t* seed) const
220,000✔
381
{
382
  return normal_variate(mean_value_, std_dev_, seed);
220,000✔
383
}
384

385
double Normal::evaluate(double x) const
220,000✔
386
{
387
  return (1.0 / (std::sqrt(2.0 / PI) * std_dev_)) *
220,000✔
388
         std::exp(-(std::pow((x - mean_value_), 2.0)) /
220,000✔
389
                  (2.0 * std::pow(std_dev_, 2.0)));
220,000✔
390
}
391

392
//==============================================================================
393
// Tabular implementation
394
//==============================================================================
395

396
Tabular::Tabular(pugi::xml_node node)
7,671✔
397
{
398
  if (check_for_node(node, "interpolation")) {
7,671!
399
    std::string temp = get_node_value(node, "interpolation");
7,671✔
400
    if (temp == "histogram") {
7,671✔
401
      interp_ = Interpolation::histogram;
7,585✔
402
    } else if (temp == "linear-linear") {
86!
403
      interp_ = Interpolation::lin_lin;
86✔
NEW
404
    } else if (temp == "log-linear") {
×
NEW
405
      interp_ = Interpolation::log_lin;
×
NEW
406
    } else if (temp == "log-log") {
×
NEW
407
      interp_ = Interpolation::log_log;
×
408
    } else {
409
      openmc::fatal_error(
×
410
        "Unsupported interpolation type for distribution: " + temp);
×
411
    }
412
  } else {
7,671✔
413
    interp_ = Interpolation::histogram;
×
414
  }
415

416
  // Read and initialize tabular distribution. If number of parameters is odd,
417
  // add an extra zero for the 'p' array.
418
  auto params = get_node_array<double>(node, "parameters");
7,671✔
419
  if (params.size() % 2 != 0) {
7,671!
420
    params.push_back(0.0);
×
421
  }
422
  std::size_t n = params.size() / 2;
7,671✔
423
  const double* x = params.data();
7,671✔
424
  const double* p = x + n;
7,671✔
425
  init(x, p, n);
7,671✔
426

427
  read_bias_from_xml(node);
7,671✔
428
}
7,671✔
429

430
Tabular::Tabular(const double* x, const double* p, int n, Interpolation interp,
46,607,835✔
431
  const double* c)
46,607,835✔
432
  : interp_ {interp}
46,607,835✔
433
{
434
  init(x, p, n, c);
46,607,835✔
435
}
46,607,835✔
436

437
void Tabular::init(
46,615,506✔
438
  const double* x, const double* p, std::size_t n, const double* c)
439
{
440
  // Copy x/p arrays into vectors
441
  std::copy(x, x + n, std::back_inserter(x_));
46,615,506✔
442
  std::copy(p, p + n, std::back_inserter(p_));
46,615,506✔
443

444
  // Calculate cumulative distribution function
445
  if (c) {
46,615,506✔
446
    std::copy(c, c + n, std::back_inserter(c_));
46,607,835✔
447
  } else {
448
    c_.resize(n);
7,671✔
449
    c_[0] = 0.0;
7,671✔
450
    for (int i = 1; i < n; ++i) {
94,948✔
451
      if (interp_ == Interpolation::histogram) {
87,277✔
452
        c_[i] = c_[i - 1] + p_[i - 1] * (x_[i] - x_[i - 1]);
87,083✔
453
      } else if (interp_ == Interpolation::lin_lin) {
194!
454
        c_[i] = c_[i - 1] + 0.5 * (p_[i - 1] + p_[i]) * (x_[i] - x_[i - 1]);
194✔
NEW
455
      } else if (interp_ == Interpolation::log_lin) {
×
NEW
456
        double m = std::log(p_[i] / p_[i - 1]) / (x_[i] - x_[i - 1]);
×
NEW
457
        c_[i] = c_[i - 1] + p_[i - 1] * (x_[i] - x_[i - 1]) *
×
NEW
458
                              exprel(m * (x_[i] - x_[i - 1]));
×
NEW
459
      } else if (interp_ == Interpolation::log_log) {
×
NEW
460
        double m = std::log((x_[i] * p_[i]) / (x_[i - 1] * p_[i - 1])) /
×
NEW
461
                   std::log(x_[i] / x_[i - 1]);
×
NEW
462
        c_[i] = c_[i - 1] + x_[i - 1] * p_[i - 1] *
×
NEW
463
                              std::log(x_[i] / x_[i - 1]) *
×
NEW
464
                              exprel(m * std::log(x_[i] / x_[i - 1]));
×
465
      } else {
NEW
466
        UNREACHABLE();
×
467
      }
468
    }
469
  }
470

471
  // Normalize density and distribution functions. Note that we save the
472
  // integral of the distribution so that if it is used as part of another
473
  // distribution (e.g., Mixture), we know its relative strength.
474
  integral_ = c_[n - 1];
46,615,506✔
475
  for (int i = 0; i < n; ++i) {
672,564,009✔
476
    p_[i] = p_[i] / integral_;
625,948,503✔
477
    c_[i] = c_[i] / integral_;
625,948,503✔
478
  }
479
}
46,615,506✔
480

481
double Tabular::sample_unbiased(uint64_t* seed) const
693,307,974✔
482
{
483
  // Sample value of CDF
484
  double c = prn(seed);
693,307,974✔
485

486
  // Find first CDF bin which is above the sampled value
487
  double c_i = c_[0];
693,307,974✔
488
  int i;
489
  std::size_t n = c_.size();
693,307,974✔
490
  for (i = 0; i < n - 1; ++i) {
2,147,483,647!
491
    if (c <= c_[i + 1])
2,147,483,647✔
492
      break;
693,307,974✔
493
    c_i = c_[i + 1];
1,914,542,850✔
494
  }
495

496
  // Determine bounding PDF values
497
  double x_i = x_[i];
693,307,974✔
498
  double p_i = p_[i];
693,307,974✔
499

500
  if (interp_ == Interpolation::histogram) {
693,307,974✔
501
    // Histogram interpolation
502
    if (p_i > 0.0) {
3,421,781!
503
      return x_i + (c - c_i) / p_i;
3,421,781✔
504
    } else {
505
      return x_i;
×
506
    }
507
  } else if (interp_ == Interpolation::lin_lin) {
689,886,193!
508
    // Linear-linear interpolation
509
    double x_i1 = x_[i + 1];
689,886,193✔
510
    double p_i1 = p_[i + 1];
689,886,193✔
511

512
    double m = (p_i1 - p_i) / (x_i1 - x_i);
689,886,193✔
513
    if (m == 0.0) {
689,886,193✔
514
      return x_i + (c - c_i) / p_i;
322,150,788✔
515
    } else {
516
      return x_i +
517
             (std::sqrt(std::max(0.0, p_i * p_i + 2 * m * (c - c_i))) - p_i) /
367,735,405✔
518
               m;
367,735,405✔
519
    }
NEW
520
  } else if (interp_ == Interpolation::log_lin) {
×
521
    // Log-linear interpolation
NEW
522
    double x_i1 = x_[i + 1];
×
NEW
523
    double p_i1 = p_[i + 1];
×
524

NEW
525
    double m = std::log(p_i1 / p_i) / (x_i1 - x_i);
×
NEW
526
    double f = (c - c_i) / p_i;
×
NEW
527
    return x_i + f * log1prel(m * f);
×
NEW
528
  } else if (interp_ == Interpolation::log_log) {
×
529
    // Log-Log interpolation
NEW
530
    double x_i1 = x_[i + 1];
×
NEW
531
    double p_i1 = p_[i + 1];
×
532

NEW
533
    double m = std::log((x_i1 * p_i1) / (x_i * p_i)) / std::log(x_i1 / x_i);
×
NEW
534
    double f = (c - c_i) / (p_i * x_i);
×
NEW
535
    return x_i * std::exp(f * log1prel(m * f));
×
536
  } else {
NEW
537
    UNREACHABLE();
×
538
  }
539
}
540

541
double Tabular::evaluate(double x) const
110,000✔
542
{
543
  int i;
544

545
  if (interp_ == Interpolation::histogram) {
110,000!
546
    i = std::upper_bound(x_.begin(), x_.end(), x) - x_.begin() - 1;
×
547
    if (i < 0 || i >= static_cast<int>(p_.size())) {
×
548
      return 0.0;
×
549
    } else {
550
      return p_[i];
×
551
    }
552
  } else {
553
    i = std::lower_bound(x_.begin(), x_.end(), x) - x_.begin() - 1;
110,000✔
554

555
    if (i < 0 || i >= static_cast<int>(p_.size()) - 1) {
110,000!
556
      return 0.0;
×
557
    } else {
558
      double x0 = x_[i];
110,000✔
559
      double x1 = x_[i + 1];
110,000✔
560
      double p0 = p_[i];
110,000✔
561
      double p1 = p_[i + 1];
110,000✔
562

563
      double t = (x - x0) / (x1 - x0);
110,000✔
564
      return (1 - t) * p0 + t * p1;
110,000✔
565
    }
566
  }
567
}
568

569
//==============================================================================
570
// Equiprobable implementation
571
//==============================================================================
572

573
double Equiprobable::sample_unbiased(uint64_t* seed) const
×
574
{
575
  std::size_t n = x_.size();
×
576

577
  double r = prn(seed);
×
578
  int i = std::floor((n - 1) * r);
×
579

580
  double xl = x_[i];
×
581
  double xr = x_[i + i];
×
582
  return xl + ((n - 1) * r - i) * (xr - xl);
×
583
}
584

585
double Equiprobable::evaluate(double x) const
×
586
{
587
  double x_min = *std::min_element(x_.begin(), x_.end());
×
588
  double x_max = *std::max_element(x_.begin(), x_.end());
×
589

590
  if (x < x_min || x > x_max) {
×
591
    return 0.0;
×
592
  } else {
593
    return 1.0 / (x_max - x_min);
×
594
  }
595
}
596

597
//==============================================================================
598
// Mixture implementation
599
//==============================================================================
600

601
Mixture::Mixture(pugi::xml_node node)
70✔
602
{
603
  vector<double> probabilities;
70✔
604

605
  // First pass: collect distributions and their probabilities
606
  for (pugi::xml_node pair : node.children("pair")) {
258✔
607
    // Check that required data exists
608
    if (!pair.attribute("probability"))
188!
609
      fatal_error("Mixture pair element does not have probability.");
×
610
    if (!pair.child("dist"))
188!
611
      fatal_error("Mixture pair element does not have a distribution.");
×
612

613
    // Get probability and distribution
614
    double p = std::stod(pair.attribute("probability").value());
188✔
615
    auto dist = distribution_from_xml(pair.child("dist"));
188✔
616

617
    // Weight probability by the distribution's integral
618
    double weighted_prob = p * dist->integral();
188✔
619
    probabilities.push_back(weighted_prob);
188✔
620
    distribution_.push_back(std::move(dist));
188✔
621
  }
188✔
622

623
  // Save sum of weighted probabilities
624
  integral_ = std::accumulate(probabilities.begin(), probabilities.end(), 0.0);
70✔
625

626
  std::size_t n = probabilities.size();
70✔
627

628
  // Check for bias
629
  if (check_for_node(node, "bias")) {
70!
630
    // Get bias probabilities
631
    auto bias_params = get_node_array<double>(node, "bias");
×
632
    if (bias_params.size() != n) {
×
633
      openmc::fatal_error(
×
634
        "Size mismatch: Attempted to bias Mixture distribution with " +
×
635
        std::to_string(n) + " components using a bias with " +
×
636
        std::to_string(bias_params.size()) +
×
637
        " entries. Please ensure distributions have the same size.");
638
    }
639

640
    // Compute importance weights
641
    weight_ = compute_importance_weights(probabilities, bias_params);
×
642

643
    // Initialize DiscreteIndex with bias probabilities for sampling
644
    di_.assign(bias_params);
×
645
  } else {
×
646
    // Unbiased case: weight_ stays empty
647
    di_.assign(probabilities);
70✔
648
  }
649
}
70✔
650

651
std::pair<double, double> Mixture::sample(uint64_t* seed) const
223,564✔
652
{
653
  size_t idx = di_.sample(seed);
223,564✔
654

655
  // Sample the chosen distribution
656
  auto [val, sub_wgt] = distribution_[idx]->sample(seed);
223,564✔
657

658
  // Multiply by component selection weight
659
  double mix_wgt = weight_.empty() ? 1.0 : weight_[idx];
223,564!
660
  return {val, mix_wgt * sub_wgt};
223,564✔
661
}
662

663
double Mixture::sample_unbiased(uint64_t* seed) const
×
664
{
665
  size_t idx = di_.sample(seed);
×
666
  return distribution_[idx]->sample(seed).first;
×
667
}
668

669
//==============================================================================
670
// Helper function
671
//==============================================================================
672

673
UPtrDist distribution_from_xml(pugi::xml_node node)
35,474✔
674
{
675
  if (!check_for_node(node, "type"))
35,474!
676
    openmc::fatal_error("Distribution type must be specified.");
×
677

678
  // Determine type of distribution
679
  std::string type = get_node_value(node, "type", true, true);
35,474✔
680

681
  // Allocate extension of Distribution
682
  UPtrDist dist;
35,474✔
683
  if (type == "uniform") {
35,474✔
684
    dist = UPtrDist {new Uniform(node)};
344✔
685
  } else if (type == "powerlaw") {
35,130✔
686
    dist = UPtrDist {new PowerLaw(node)};
76✔
687
  } else if (type == "maxwell") {
35,054✔
688
    dist = UPtrDist {new Maxwell(node)};
97✔
689
  } else if (type == "watt") {
34,957✔
690
    dist = UPtrDist {new Watt(node)};
129✔
691
  } else if (type == "normal") {
34,828✔
692
    dist = UPtrDist {new Normal(node)};
33✔
693
  } else if (type == "discrete") {
34,795✔
694
    dist = UPtrDist {new Discrete(node)};
27,054✔
695
  } else if (type == "tabular") {
7,741✔
696
    dist = UPtrDist {new Tabular(node)};
7,671✔
697
  } else if (type == "mixture") {
70!
698
    dist = UPtrDist {new Mixture(node)};
70✔
699
  } else if (type == "muir") {
×
700
    openmc::fatal_error(
×
701
      "'muir' distributions are now specified using the openmc.stats.muir() "
702
      "function in Python. Please regenerate your XML files.");
703
  } else {
704
    openmc::fatal_error("Invalid distribution type: " + type);
×
705
  }
706
  return dist;
70,948✔
707
}
35,474✔
708

709
} // namespace openmc
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc