• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

sandialabs / pyttb / 15946078391

28 Jun 2025 04:29PM UTC coverage: 98.142% (-0.02%) from 98.163%
15946078391

Pull #442

github

web-flow
Merge f7ef55a3a into ae358d57e
Pull Request #442: Nick/create problem

223 of 228 new or added lines in 3 files covered. (97.81%)

4 existing lines in 1 file now uncovered.

5123 of 5220 relevant lines covered (98.14%)

0.98 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

98.19
/pyttb/create_problem.py
1
"""Create test problems for  tensor factorizations."""
2

3
import logging
1✔
4
import math
1✔
5
from dataclasses import dataclass, field
1✔
6
from typing import Callable, Optional, Tuple, Union, cast, overload
1✔
7

8
import numpy as np
1✔
9
from numpy_groupies import aggregate as accumarray
1✔
10

11
import pyttb as ttb
1✔
12
from pyttb.pyttb_utils import Shape, parse_shape
1✔
13

14
solution_generator = Callable[[Tuple[int, ...]], np.ndarray]
1✔
15
core_generator_t = Callable[
1✔
16
    [Tuple[int, ...]], Union[ttb.tensor, ttb.sptensor, np.ndarray]
17
]
18

19

20
def randn(shape: Tuple[int, ...]) -> np.ndarray:
1✔
21
    """Stub for MATLAB randn.
22

23
    TODO move somewhere shareable.
24
    """
25
    return np.random.normal(0, 1, size=shape)
1✔
26

27

28
@dataclass
1✔
29
class BaseProblem:
1✔
30
    """Parameters general to all solutions.
31

32
    Attributes
33
    ----------
34
    shape:
35
        Tensor shape for generated problem.
36
    factor_generator:
37
        Method to generate factor matrices.
38
    symmetric:
39
        List of modes that should be symmetric.
40
        For instance, `[(1,2), (3,4)]` specifies that
41
        modes 1 and 2 have identical factor matrices, and modes 3 and 4
42
        also have identical factor matrices.
43
    num_factors:
44
        Number of factors.
45
    noise:
46
        Amount of Gaussian noise to add to solution.
47
        If data is sparse noise is only added to nonzero entries.
48
    """
49

50
    shape: Shape = field(metadata={"doc": "A shape"})
1✔
51
    factor_generator: solution_generator = randn
1✔
52
    symmetric: Optional[list[Tuple[int, int]]] = None
1✔
53
    num_factors: Union[int, list[int], None] = None
1✔
54
    noise: float = 0.10
1✔
55

56
    def __post_init__(self):
1✔
57
        self.shape = ttb.pyttb_utils.parse_shape(self.shape)
1✔
58
        if not 0.0 <= self.noise <= 1.0:
1✔
59
            raise ValueError(f"Noise must be in [0,1] but got {self.noise}")
1✔
60

61

62
@dataclass
1✔
63
class CPProblem(BaseProblem):
1✔
64
    """Parameters specifying CP Solutions.
65

66
    Attributes
67
    ----------
68
    shape:
69
        Tensor shape for generated problem.
70
    factor_generator:
71
        Method to generate factor matrices.
72
    symmetric:
73
        List of modes that should be symmetric.
74
        For instance, `[(1,2), (3,4)]` specifies that
75
        modes 1 and 2 have identical factor matrices, and modes 3 and 4
76
        also have identical factor matrices.
77
    num_factors:
78
        Number of factors.
79
    noise:
80
        Amount of Gaussian noise to add to solution.
81
        If data is sparse noise is only added to nonzero entries.
82
    weight_generator:
83
        Method to generate weights for ktensor solution.
84
    """
85

86
    # NOTE inherited attributes are manually copy pasted, keep aligned between problems
87

88
    num_factors: int = 2
1✔
89
    weight_generator: solution_generator = np.random.random
1✔
90
    # TODO: This is in DataParams in MATLAB, but only works for CP problems so
91
    # feels more reasonable here
92
    sparse_generation: Optional[float] = None
1✔
93

94

95
@dataclass
1✔
96
class TuckerProblem(BaseProblem):
1✔
97
    """Parameters specifying Tucker Solutions.
98

99
    Attributes
100
    ----------
101
    shape:
102
        Tensor shape for generated problem.
103
    factor_generator:
104
        Method to generate factor matrices.
105
    symmetric:
106
        List of modes that should be symmetric.
107
        For instance, `[(1,2), (3,4)]` specifies that
108
        modes 1 and 2 have identical factor matrices, and modes 3 and 4
109
        also have identical factor matrices.
110
    num_factors:
111
        Number of factors.
112
    noise:
113
        Amount of Gaussian noise to add to solution.
114
        If data is sparse noise is only added to nonzero entries.
115
    core_generator:
116
        Method to generate weights for ttensor solution.
117
    """
118

119
    # TODO post_init set to [2, 2, 2]
120
    num_factors: Optional[list[int]] = None
1✔
121
    core_generator: core_generator_t = randn
1✔
122

123
    def __post_init__(self):
1✔
124
        super().__post_init__()
1✔
125
        self.num_factors = self.num_factors or [2, 2, 2]
1✔
126

127

128
@dataclass
1✔
129
class ExistingSolution:
1✔
130
    """Parameters for using an existing tensor solution.
131

132
    Attributes
133
    ----------
134
    solution:
135
        Pre-existing tensor solution (ktensor or ttensor).
136
    noise:
137
        Amount of Gaussian noise to add to solution.
138
        If data is sparse noise is only added to nonzero entries.
139
    """
140

141
    solution: Union[ttb.ktensor, ttb.ttensor]
1✔
142
    noise: float = 0.10
1✔
143

144
    def __post_init__(self):
1✔
145
        if not 0.0 <= self.noise <= 1.0:
1✔
146
            raise ValueError(f"Noise must be in [0,1] but got {self.noise}")
1✔
147

148
    @property
1✔
149
    def symmetric(self) -> None:
1✔
150
        """Get the symmetric modes from the solution."""
151
        # ExistingSolution doesn't support symmetry constraints
152
        return None
1✔
153

154

155
@dataclass
1✔
156
class ExistingTuckerSolution(ExistingSolution):
1✔
157
    """Parameters for using an existing tucket tensor solution.
158

159
    Attributes
160
    ----------
161
    solution:
162
        Pre-existing ttensor solution.
163
    noise:
164
        Amount of Gaussian noise to add to solution.
165
        If data is sparse noise is only added to nonzero entries.
166
    """
167

168
    solution: ttb.ttensor
1✔
169

170

171
@dataclass
1✔
172
class ExistingCPSolution(ExistingSolution):
1✔
173
    """Parameters for using an existing tucket tensor solution.
174

175
    Attributes
176
    ----------
177
    solution:
178
        Pre-existing ktensor solution.
179
    noise:
180
        Amount of Gaussian noise to add to solution.
181
        If data is sparse noise is only added to nonzero entries.
182
    sparse_generation:
183
        Generate a sparse tensor that can be scaled so that the
184
        column factors and weights are stochastic. Provide a number
185
        of nonzeros to be inserted. A value in range [0,1) will be
186
        interpreted as a ratio.
187
    """
188

189
    solution: ttb.ktensor
1✔
190
    sparse_generation: Optional[float] = None
1✔
191

192

193
@dataclass
1✔
194
class MissingData:
1✔
195
    """Parameters to control missing data.
196

197
    Attributes
198
    ----------
199
    missing_ratio:
200
        Proportion of missing data.
201
    missing_pattern:
202
        An explicit tensor representing missing data locations.
203
    sparse_model:
204
        Whether to generate sparse rather than dense missing data pattern.
205
        Only useful for large tensors that don't easily fit in memory and
206
        when missing ratio > 0.8.
207
    """
208

209
    missing_ratio: float = 0.0
1✔
210
    missing_pattern: Optional[Union[ttb.sptensor, ttb.tensor]] = None
1✔
211
    sparse_model: bool = False
1✔
212

213
    def __post_init__(self):
1✔
214
        if not 0.0 <= self.missing_ratio <= 1.0:
1✔
215
            raise ValueError(
1✔
216
                f"Missing ratio must be in [0,1] but got {self.missing_ratio}"
217
            )
218
        if self.missing_ratio > 0.0 and self.missing_pattern is not None:
1✔
219
            raise ValueError(
1✔
220
                "Can't set ratio and explicit pattern to specify missing data. "
221
                "Select one or the other."
222
            )
223

224
    def has_missing(self) -> bool:
1✔
225
        """Check if any form of missing data is requested."""
226
        return self.missing_ratio > 0.0 or self.missing_pattern is not None
1✔
227

228
    def raise_symmetric(self):
1✔
229
        """Raise for unsupported symmetry request."""
230
        if self.missing_ratio:
1✔
231
            raise ValueError("Can't generate a symmetric problem with missing data.")
1✔
232
        if self.sparse_model:
1✔
233
            raise ValueError("Can't generate sparse symmetric problem.")
1✔
234

235
    def get_pattern(self, shape: Shape) -> Union[None, ttb.tensor, ttb.sptensor]:
1✔
236
        """Generate a tensor pattern of missing data."""
237
        if self.missing_pattern is not None:
1✔
238
            if self.missing_pattern.shape != shape:
1✔
239
                raise ValueError(
1✔
240
                    "Missing pattern and problem shapes are not compatible."
241
                )
242
            return self.missing_pattern
1✔
243

244
        if self.missing_ratio == 0.0:
1✔
245
            # All usages of this are internal, should we just rule out this situation?
246
            return None
1✔
247
        if self.missing_ratio < 0.8 and self.sparse_model:
1✔
248
            logging.warning(
1✔
249
                "Setting sparse to false because there are"
250
                " fewer than 80% missing elements."
251
            )
252
        return _create_missing_data_pattern(
1✔
253
            shape, self.missing_ratio, self.sparse_model
254
        )
255

256

257
def _create_missing_data_pattern(
1✔
258
    shape: Shape, missing_ratio: float, sparse_model: bool = False
259
) -> Union[ttb.tensor, ttb.sptensor]:
260
    """Create a randomly missing element indicator tensor.
261

262
    Creates a binary tensor of specified size with 0's indication missing data
263
    and 1's indicating valid data. Will only return a tensor that has at least
264
    one entry per N-1 dimensional slice.
265
    """
266
    shape = parse_shape(shape)
1✔
267
    ndim = len(shape)
1✔
268
    P = math.prod(shape)
1✔
269
    Q = math.ceil((1 - missing_ratio) * P)
1✔
270
    W: Union[ttb.tensor, ttb.sptensor]
271

272
    # Create tensor
273
    ## Keep iterating until tensor is created or we give up.
274
    # TODO: make range configurable?
275
    for _ in range(20):
1✔
276
        if sparse_model:
1✔
277
            # Start with 50% more than Q random subs
278
            # Note in original matlab to work out expected value of a*Q to guarantee
279
            # Q unique entries
280
            subs = np.unique(
1✔
281
                np.floor(
282
                    np.random.random((int(np.ceil(1.5 * Q)), len(shape))).dot(
283
                        np.diag(shape)
284
                    )
285
                ),
286
                axis=0,
287
            ).astype(int)
288
            # Check if there are too many unique subs
289
            if len(subs) > Q:
1✔
290
                # TODO: check if note from matlab still relevant
291
                # Note in original matlab: unique orders the subs and would bias toward
292
                # first subs with lower values, so we sample to cut back
293
                idx = np.random.permutation(subs.shape[0])
1✔
294
                subs = subs[idx[:Q]]
1✔
295
            elif subs.shape[0] < Q:
1✔
296
                logging.warning(
1✔
297
                    f"Only generated {subs.shape[0]} of " f"{Q} desired subscripts"
298
                )
299
            W = ttb.sptensor(
1✔
300
                subs,
301
                np.ones(
302
                    (len(subs), 1),
303
                ),
304
                shape=shape,
305
            )
306
        else:
307
            # Compute the linear indices of the missing entries.
308
            idx = np.random.permutation(P)
1✔
309
            idx = idx[:Q]
1✔
310
            W = ttb.tenzeros(shape)
1✔
311
            W[idx] = 1
1✔
312
        # return W
313

314
        # Check if W has any empty slices
315
        isokay = True
1✔
316
        for n in range(ndim):
1✔
317
            all_but_n = np.arange(W.ndims)
1✔
318
            all_but_n = np.delete(all_but_n, n)
1✔
319
            collapse_W = W.collapse(all_but_n)
1✔
320
            if isinstance(collapse_W, np.ndarray):
1✔
321
                isokay &= bool(np.all(collapse_W))
1✔
322
            else:
323
                isokay &= bool(np.all(collapse_W.double()))
1✔
324

325
        # Quit if okay
326
        if isokay:
1✔
327
            break
1✔
328

329
    if not isokay:
1✔
NEW
330
        raise ValueError(
×
331
            f"After {iter} iterations, cannot produce a tensor with"
332
            f"{missing_ratio*100} missing data without an empty slice."
333
        )
334
    return W
1✔
335

336

337
@overload
338
def create_problem(
339
    problem_params: CPProblem, missing_params: Optional[MissingData] = None
340
) -> Tuple[
341
    ttb.ktensor, Union[ttb.tensor, ttb.sptensor]
342
]: ...  # pragma: no cover see coveragepy/issues/970
343

344

345
@overload
346
def create_problem(
347
    problem_params: TuckerProblem,
348
    missing_params: Optional[MissingData] = None,
349
) -> Tuple[ttb.ttensor, ttb.tensor]: ...  # pragma: no cover see coveragepy/issues/970
350

351

352
@overload
353
def create_problem(
354
    problem_params: ExistingSolution,
355
    missing_params: Optional[MissingData] = None,
356
) -> Tuple[
357
    Union[ttb.ktensor, ttb.ttensor], Union[ttb.tensor, ttb.sptensor]
358
]: ...  # pragma: no cover see coveragepy/issues/970
359

360

361
def create_problem(
1✔
362
    problem_params: Union[CPProblem, TuckerProblem, ExistingSolution],
363
    missing_params: Optional[MissingData] = None,
364
) -> Tuple[Union[ttb.ktensor, ttb.ttensor], Union[ttb.tensor, ttb.sptensor]]:
365
    """Generate a problem and solution.
366

367
    Arguments
368
    ---------
369
    problem_params:
370
        Parameters related to the problem to generate, or an existing solution.
371
    missing_params:
372
        Parameters to control missing data in the generated data/solution.
373

374
    Examples
375
    --------
376
    Base example params
377

378
    >>> shape = (5, 4, 3)
379

380
    Generate a CP problem
381

382
    >>> cp_specific_params = CPProblem(shape=shape, num_factors=3, noise=0.1)
383
    >>> no_missing_data = MissingData()
384
    >>> solution, data = create_problem(cp_specific_params, no_missing_data)
385
    >>> diff = (solution.full() - data).norm() / solution.full().norm()
386
    >>> bool(np.isclose(diff, 0.1))
387
    True
388

389
    Generate Tucker Problem
390

391
    >>> tucker_specific_params = TuckerProblem(shape, num_factors=[3, 3, 2], noise=0.1)
392
    >>> solution, data = create_problem(tucker_specific_params, no_missing_data)
393
    >>> diff = (solution.full() - data).norm() / solution.full().norm()
394
    >>> bool(np.isclose(diff, 0.1))
395
    True
396

397
    Use existing solution
398

399
    >>> factor_matrices = [np.random.random((dim, 3)) for dim in shape]
400
    >>> weights = np.random.random(3)
401
    >>> existing_ktensor = ttb.ktensor(factor_matrices, weights)
402
    >>> existing_params = ExistingSolution(existing_ktensor, noise=0.1)
403
    >>> solution, data = create_problem(existing_params, no_missing_data)
404
    >>> assert solution is existing_ktensor
405
    """
406
    if missing_params is None:
1✔
NEW
407
        missing_params = MissingData()
×
408

409
    if problem_params.symmetric is not None:
1✔
410
        missing_params.raise_symmetric()
1✔
411

412
    solution = generate_solution(problem_params)
1✔
413

414
    data: Union[ttb.tensor, ttb.sptensor]
415
    if (
1✔
416
        isinstance(problem_params, (CPProblem, ExistingCPSolution))
417
        and problem_params.sparse_generation is not None
418
    ):
419
        if missing_params.has_missing():
1✔
420
            raise ValueError(
1✔
421
                f"Can't combine missing data {MissingData.__name__} and "
422
                f" sparse generation {CPProblem.__name__}."
423
            )
424
        solution = cast(ttb.ktensor, solution)
1✔
425
        solution, data = generate_data_sparse(solution, problem_params)
1✔
426
    elif missing_params.has_missing():
1✔
427
        pattern = missing_params.get_pattern(solution.shape)
1✔
428
        data = generate_data(solution, problem_params, pattern)
1✔
429
    else:
430
        data = generate_data(solution, problem_params)
1✔
431
    return solution, data
1✔
432

433

434
def generate_solution_factors(base_params: BaseProblem) -> list[np.ndarray]:
1✔
435
    """Generate the factor matrices for either type of solution."""
436
    # Get shape of final tensor
437
    shape = cast(Tuple[int, ...], base_params.shape)
1✔
438

439
    # Get shape of factors
440
    if isinstance(base_params.num_factors, int):
1✔
441
        nfactors = [base_params.num_factors] * len(shape)
1✔
442
    elif base_params.num_factors is not None:
1✔
443
        nfactors = base_params.num_factors
1✔
444
    else:
445
        raise ValueError("Num_factors shouldn't be none.")
1✔
446
    if len(nfactors) != len(shape):
1✔
447
        raise ValueError(
1✔
448
            "Num_factors should be the same dimensions as shape but got"
449
            f"{nfactors} and {shape}"
450
        )
451
    factor_matrices = []
1✔
452
    for shape_i, nfactors_i in zip(shape, nfactors):
1✔
453
        factor_matrices.append(base_params.factor_generator((shape_i, nfactors_i)))
1✔
454

455
    if base_params.symmetric is not None:
1✔
456
        for grp in base_params.symmetric:
1✔
457
            for j in range(1, len(grp)):
1✔
458
                factor_matrices[grp[j]] = factor_matrices[grp[0]]
1✔
459

460
    return factor_matrices
1✔
461

462

463
@overload
1✔
464
def generate_solution(
1✔
465
    problem_params: TuckerProblem,
466
) -> ttb.ttensor: ...
467

468

469
@overload
1✔
470
def generate_solution(
1✔
471
    problem_params: CPProblem,
472
) -> ttb.ktensor: ...
473

474

475
@overload
1✔
476
def generate_solution(
1✔
477
    problem_params: ExistingSolution,
478
) -> Union[ttb.ktensor, ttb.ttensor]: ...
479

480

481
def generate_solution(
1✔
482
    problem_params: Union[CPProblem, TuckerProblem, ExistingSolution],
483
) -> Union[ttb.ktensor, ttb.ttensor]:
484
    """Generate problem solution."""
485
    if isinstance(problem_params, ExistingSolution):
1✔
486
        return problem_params.solution
1✔
487
    factor_matrices = generate_solution_factors(problem_params)
1✔
488
    # Create final model
489
    if isinstance(problem_params, TuckerProblem):
1✔
490
        nfactors = cast(list[int], problem_params.num_factors)
1✔
491
        generated_core = problem_params.core_generator(tuple(nfactors))
1✔
492
        if isinstance(generated_core, (ttb.tensor, ttb.sptensor)):
1✔
493
            core = generated_core
1✔
494
        else:
495
            core = ttb.tensor(generated_core)
1✔
496
        return ttb.ttensor(core, factor_matrices)
1✔
497
    elif isinstance(problem_params, CPProblem):
1✔
498
        weights = problem_params.weight_generator((problem_params.num_factors,))
1✔
499
        return ttb.ktensor(factor_matrices, weights)
1✔
500
    raise ValueError(f"Unsupported problem parameter type: {type(problem_params)=}")
1✔
501

502

503
@overload
504
def generate_data(
505
    solution: Union[ttb.ktensor, ttb.ttensor],
506
    problem_params: Union[BaseProblem, ExistingSolution],
507
    pattern: Optional[ttb.tensor] = None,
508
) -> ttb.tensor: ...  # pragma: no cover see coveragepy/issues/970
509

510

511
@overload
512
def generate_data(
513
    solution: Union[ttb.ktensor, ttb.ttensor],
514
    problem_params: Union[BaseProblem, ExistingSolution],
515
    pattern: ttb.sptensor,
516
) -> ttb.sptensor: ...  # pragma: no cover see coveragepy/issues/970
517

518

519
def generate_data(
1✔
520
    solution: Union[ttb.ktensor, ttb.ttensor],
521
    problem_params: Union[BaseProblem, ExistingSolution],
522
    pattern: Optional[Union[ttb.tensor, ttb.sptensor]] = None,
523
) -> Union[ttb.tensor, ttb.sptensor]:
524
    """Generate problem data."""
525
    shape = solution.shape
1✔
526
    Rdm: Union[ttb.tensor, ttb.sptensor]
527
    if pattern is not None:
1✔
528
        if isinstance(pattern, ttb.sptensor):
1✔
529
            Rdm = ttb.sptensor(pattern.subs, randn((pattern.nnz, 1)), pattern.shape)
1✔
530
            Z = pattern * solution
1✔
531
        elif isinstance(pattern, ttb.tensor):
1✔
532
            Rdm = pattern * ttb.tensor(randn(shape))
1✔
533
            Z = pattern * solution.full()
1✔
534
        else:
535
            raise ValueError(f"Unsupported sparsity pattern of type {type(pattern)}")
1✔
536
    else:
537
        # TODO don't we already have a randn tensor method?
538
        Rdm = ttb.tensor(randn(shape))
1✔
539
        Z = solution.full()
1✔
540
        if problem_params.symmetric is not None:
1✔
541
            # TODO Note in MATLAB code to follow up
542
            Rdm = Rdm.symmetrize(np.array(problem_params.symmetric))
1✔
543

544
    D = Z + problem_params.noise * Z.norm() * Rdm / Rdm.norm()
1✔
545
    # Make sure the final result is definitely symmetric
546
    if problem_params.symmetric is not None:
1✔
547
        D = D.symmetrize(np.array(problem_params.symmetric))
1✔
548
    return D
1✔
549

550

551
def prosample(nsamples: int, prob: np.ndarray) -> np.ndarray:
1✔
552
    """Proportional Sampling."""
553
    bins = np.minimum(np.cumsum(np.array([0, *prob])), 1)
1✔
554
    bins[-1] = 1
1✔
555
    indices = np.digitize(np.random.random(nsamples), bins=bins)
1✔
556
    return indices - 1
1✔
557

558

559
def generate_data_sparse(
1✔
560
    solution: ttb.ktensor,
561
    problem_params: Union[CPProblem, ExistingCPSolution],
562
) -> Tuple[ttb.ktensor, ttb.sptensor]:
563
    """Generate sparse CP data from a given solution."""
564
    # Error check on solution
565
    if np.any(solution.weights < 0):
1✔
566
        raise ValueError("All weights must be nonnegative.")
1✔
567
    if any(np.any(factor < 0) for factor in solution.factor_matrices):
1✔
568
        raise ValueError("All factor matrices must be nonnegative.")
1✔
569
    if problem_params.symmetric is not None:
1✔
NEW
570
        logging.warning("Summetric constraints have been ignored.")
×
571
    if problem_params.sparse_generation is None:
1✔
572
        raise ValueError("Cannot generate sparse data without sparse_generation set.")
1✔
573

574
    # Convert solution to probability tensor
575
    # NOTE: Make copy since normalize modifies in place
576
    P = solution.copy().normalize(mode=0)
1✔
577
    eta = np.sum(P.weights)
1✔
578
    P.weights /= eta
1✔
579

580
    # Determine how many samples per component
581
    nedges = problem_params.sparse_generation
1✔
582
    if nedges < 1:
1✔
583
        nedges = np.round(nedges * math.prod(P.shape)).astype(int)
1✔
584
    nedges = int(nedges)
1✔
585
    nd = P.ndims
1✔
586
    nc = P.ncomponents
1✔
587
    csample = prosample(nedges, P.weights)
1✔
588
    # TODO check this
589
    csums = accumarray(csample, 1, size=nc)
1✔
590

591
    # Determine the subscripts for each randomly sampled entry
592
    shape = solution.shape
1✔
593
    subs: list[np.ndarray] = []
1✔
594
    for c in range(nc):
1✔
595
        nsample = csums[c]
1✔
596
        if nsample == 0:
1✔
NEW
597
            continue
×
598
        subs.append(np.zeros((nsample, nd), dtype=int))
1✔
599
        for d in range(nd):
1✔
600
            subs[-1][:, d] = prosample(nsample, P.factor_matrices[d][:, c])
1✔
601
    # TODO could sum csums and allocate in place with slicing
602
    allsubs = np.vstack(subs)
1✔
603
    # Assemble final tensor. Note that duplicates are summed.
604
    # TODO should we have sptenones for purposes like this?
605
    Z = ttb.sptensor.from_aggregator(
1✔
606
        allsubs,
607
        np.ones(
608
            (len(allsubs), 1),
609
        ),
610
        shape=shape,
611
    )
612

613
    # Rescale S so that it is proportional to the number of edges inserted
614
    solution = P
1✔
615
    # raise ValueError(
616
    #    f"{nedges=}"
617
    #    f"{solution.weights=}"
618
    # )
619
    solution.weights *= nedges
1✔
620

621
    # TODO no noise introduced in this special case in MATLAB
622

623
    return solution, Z
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc