• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

LearnLib / learnlib / 19556545386

21 Nov 2025 01:10AM UTC coverage: 94.471%. First build
19556545386

Pull #155

github

web-flow
Merge d1e6e0721 into 7c236fec9
Pull Request #155: Bump Java Version to 17/25

59 of 83 new or added lines in 25 files covered. (71.08%)

12611 of 13349 relevant lines covered (94.47%)

1.73 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

88.66
/algorithms/active/ttt-vpa/src/main/java/de/learnlib/algorithm/ttt/vpa/TTTLearnerVPA.java
1
/* Copyright (C) 2013-2025 TU Dortmund University
2
 * This file is part of LearnLib <https://learnlib.de>.
3
 *
4
 * Licensed under the Apache License, Version 2.0 (the "License");
5
 * you may not use this file except in compliance with the License.
6
 * You may obtain a copy of the License at
7
 *
8
 *     http://www.apache.org/licenses/LICENSE-2.0
9
 *
10
 * Unless required by applicable law or agreed to in writing, software
11
 * distributed under the License is distributed on an "AS IS" BASIS,
12
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
 * See the License for the specific language governing permissions and
14
 * limitations under the License.
15
 */
16
package de.learnlib.algorithm.ttt.vpa;
17

18
import java.util.ArrayDeque;
19
import java.util.ArrayList;
20
import java.util.Collections;
21
import java.util.Deque;
22
import java.util.HashMap;
23
import java.util.HashSet;
24
import java.util.List;
25
import java.util.Map;
26
import java.util.Objects;
27
import java.util.Set;
28

29
import de.learnlib.acex.AcexAnalyzer;
30
import de.learnlib.algorithm.observationpack.vpa.OPLearnerVPA;
31
import de.learnlib.algorithm.observationpack.vpa.hypothesis.AbstractHypTrans;
32
import de.learnlib.algorithm.observationpack.vpa.hypothesis.ContextPair;
33
import de.learnlib.algorithm.observationpack.vpa.hypothesis.DTNode;
34
import de.learnlib.algorithm.observationpack.vpa.hypothesis.HypLoc;
35
import de.learnlib.algorithm.observationpack.vpa.hypothesis.TransList;
36
import de.learnlib.datastructure.discriminationtree.SplitData;
37
import de.learnlib.datastructure.list.IntrusiveList;
38
import de.learnlib.oracle.MembershipOracle;
39
import de.learnlib.query.DefaultQuery;
40
import de.learnlib.query.Query;
41
import de.learnlib.tooling.annotation.builder.GenerateBuilder;
42
import net.automatalib.alphabet.VPAlphabet;
43
import net.automatalib.automaton.vpa.SEVPA;
44
import net.automatalib.automaton.vpa.StackContents;
45
import net.automatalib.automaton.vpa.State;
46
import net.automatalib.common.util.collection.CollectionUtil;
47
import net.automatalib.word.Word;
48
import org.checkerframework.checker.nullness.qual.Nullable;
49

50
/**
51
 * A {@link SEVPA}-based adoption of the "TTT" algorithm.
52
 *
53
 * @param <I>
54
 *         input symbol type
55
 */
56
public class TTTLearnerVPA<I> extends OPLearnerVPA<I> {
2✔
57

58
    private final IntrusiveList<DTNode<I>> blockList = new IntrusiveList<>();
2✔
59

60
    @GenerateBuilder(defaults = BuilderDefaults.class)
61
    public TTTLearnerVPA(VPAlphabet<I> alphabet, MembershipOracle<I, Boolean> oracle, AcexAnalyzer analyzer) {
62
        super(alphabet, oracle, analyzer);
2✔
63
    }
2✔
64

65
    @Override
66
    protected State<HypLoc<I>> getDefinitiveSuccessor(State<HypLoc<I>> baseState, Word<I> suffix) {
67
        NonDetState<HypLoc<I>> curr = NonDetState.fromDet(baseState);
2✔
68
        int lastDet = 0;
2✔
69
        NonDetState<HypLoc<I>> lastDetState = curr;
2✔
70
        int i = 0;
2✔
71
        for (I sym : suffix) {
2✔
72
            if (alphabet.isCallSymbol(sym)) {
2✔
73
                Set<Integer> stackSyms = new HashSet<>();
2✔
74
                for (HypLoc<I> loc : curr.getLocations()) {
2✔
75
                    int stackSym = hypothesis.encodeStackSym(loc, sym);
2✔
76
                    stackSyms.add(stackSym);
2✔
77
                }
2✔
78
                NondetStackContents nsc = NondetStackContents.push(stackSyms, curr.getStack());
2✔
79
                curr = new NonDetState<>(Collections.singleton(hypothesis.getInitialLocation()), nsc);
2✔
80
            } else if (alphabet.isReturnSymbol(sym)) {
2✔
81
                Set<HypLoc<I>> succs = new HashSet<>();
2✔
82
                for (HypLoc<I> loc : curr.getLocations()) {
2✔
83
                    for (int stackSym : curr.getStack().peek()) {
2✔
84
                        AbstractHypTrans<I> trans = hypothesis.getReturnTransition(loc, sym, stackSym);
2✔
85
                        if (trans.isTree()) {
2✔
86
                            succs.add(trans.getTreeTarget());
×
87
                        } else {
88
                            CollectionUtil.add(succs, trans.getNonTreeTarget().subtreeLocsIterator());
2✔
89
                        }
90
                    }
2✔
91
                }
2✔
92
                curr = new NonDetState<>(succs, curr.getStack().pop());
2✔
93
            } else {
2✔
94
                Set<HypLoc<I>> succs = new HashSet<>();
2✔
95
                for (HypLoc<I> loc : curr.getLocations()) {
2✔
96
                    AbstractHypTrans<I> trans = hypothesis.getInternalTransition(loc, sym);
2✔
97
                    if (trans.isTree()) {
2✔
98
                        succs.add(trans.getTreeTarget());
2✔
99
                    } else {
100
                        CollectionUtil.add(succs, trans.getNonTreeTarget().subtreeLocsIterator());
2✔
101
                    }
102
                }
2✔
103
                curr = new NonDetState<>(succs, curr.getStack());
2✔
104
            }
105
            i++;
2✔
106
            if (!curr.isNonDet()) {
2✔
107
                lastDet = i;
2✔
108
                lastDetState = curr;
2✔
109
            }
110
        }
2✔
111

112
        if (lastDet < suffix.length()) {
2✔
113
            determinize(lastDetState.determinize(), suffix.subWord(lastDet));
×
114
        }
115
        return hypothesis.getSuccessor(baseState, suffix);
2✔
116
    }
117

118
    @Override
119
    protected boolean refineHypothesisSingle(DefaultQuery<I, Boolean> ceQuery) {
120
        Word<I> ceWord = ceQuery.getInput();
2✔
121

122
        Boolean out = computeHypothesisOutput(ceWord);
2✔
123

124
        if (Objects.equals(out, ceQuery.getOutput())) {
2✔
125
            return false;
2✔
126
        }
127

128
        OutputInconsistency<I> outIncons = new OutputInconsistency<>(hypothesis.getInitialLocation(),
2✔
129
                                                                     new ContextPair<>(Word.epsilon(), ceWord),
2✔
130
                                                                     ceQuery.getOutput());
2✔
131

132
        do {
133
            splitState(outIncons);
2✔
134
            do {
135
                closeTransitions();
2✔
136
            } while (finalizeAny());
2✔
137

138
            outIncons = findOutputInconsistency();
2✔
139
        } while (outIncons != null);
2✔
140

141
        return true;
2✔
142
    }
143

144
    protected boolean computeHypothesisOutput(Word<I> word) {
145
        State<HypLoc<I>> curr = hypothesis.getInitialState();
2✔
146
        for (I sym : word) {
2✔
147
            curr = getAnySuccessor(curr, sym);
2✔
148
        }
2✔
149
        return hypothesis.isAccepting(curr);
2✔
150
    }
151

152
    private void splitState(OutputInconsistency<I> outIncons) {
153
        PrefixTransformAcex acex = deriveAcex(outIncons);
2✔
154
        int breakpoint = analyzer.analyzeAbstractCounterexample(acex);
2✔
155

156
        Word<I> acexSuffix = acex.getSuffix();
2✔
157
        Word<I> prefix = acexSuffix.prefix(breakpoint);
2✔
158
        I act = acexSuffix.getSymbol(breakpoint);
2✔
159
        Word<I> suffix = acexSuffix.subWord(breakpoint + 1);
2✔
160

161
        State<HypLoc<I>> state = hypothesis.getSuccessor(acex.getBaseState(), prefix);
2✔
162
        assert state != null;
2✔
163
        State<HypLoc<I>> succState = hypothesis.getSuccessor(state, act);
2✔
164
        assert succState != null;
2✔
165

166
        ContextPair<I> context = new ContextPair<>(transformAccessSequence(succState.getStackContents()), suffix);
2✔
167

168
        AbstractHypTrans<I> trans = hypothesis.getInternalTransition(state, act);
2✔
169
        assert trans != null;
2✔
170

171
        HypLoc<I> newLoc = makeTree(trans);
2✔
172
        DTNode<I> oldDtNode = succState.getLocation().getLeaf();
2✔
173
        openTransitions.concat(oldDtNode.getIncoming());
2✔
174
        DTNode<I>.SplitResult children = oldDtNode.split(context, acex.effect(breakpoint), acex.effect(breakpoint + 1));
2✔
175
        oldDtNode.setTemp(true);
2✔
176
        if (!oldDtNode.getParent().isTemp()) {
2✔
177
            blockList.add(oldDtNode);
2✔
178
        }
179
        link(children.nodeOld, newLoc);
2✔
180
        link(children.nodeNew, succState.getLocation());
2✔
181
        initializeLocation(newLoc);
2✔
182
    }
2✔
183

184
    protected boolean finalizeAny() {
185
        assert openTransitions.isEmpty();
2✔
186

187
        GlobalSplitter<I> splitter = findSplitterGlobal();
2✔
188
        if (splitter != null) {
2✔
189
            finalizeDiscriminator(splitter.blockRoot, splitter.localSplitter);
2✔
190
            return true;
2✔
191
        }
192
        return false;
2✔
193
    }
194

195
    private @Nullable OutputInconsistency<I> findOutputInconsistency() {
196
        OutputInconsistency<I> best = null;
2✔
197

198
        for (HypLoc<I> loc : hypothesis.getLocations()) {
2✔
199
            int locAsLen = loc.getAccessSequence().length();
2✔
200
            DTNode<I> node = loc.getLeaf();
2✔
201
            while (!node.isRoot()) {
2✔
202
                boolean expectedOut = node.getParentOutcome();
2✔
203
                node = node.getParent();
2✔
204
                ContextPair<I> discr = node.getDiscriminator();
2✔
205
                if (best == null || discr.getLength() + locAsLen < best.totalLength()) {
2✔
206
                    boolean hypOut = computeHypothesisOutput(discr.getPrefix()
2✔
207
                                                                  .concat(loc.getAccessSequence(), discr.getSuffix()));
2✔
208
                    if (hypOut != expectedOut) {
2✔
209
                        best = new OutputInconsistency<>(loc, discr, expectedOut);
2✔
210
                    }
211
                }
212
            }
2✔
213
        }
2✔
214
        return best;
2✔
215
    }
216

217
    protected State<HypLoc<I>> getAnySuccessor(State<HypLoc<I>> state, I sym) {
218
        final VPAlphabet.SymbolType type = alphabet.getSymbolType(sym);
2✔
219
        final StackContents stackContents = state.getStackContents();
2✔
220

221
        return switch (type) {
2✔
222
            case INTERNAL: {
223
                AbstractHypTrans<I> trans = hypothesis.getInternalTransition(state.getLocation(), sym);
2✔
224
                HypLoc<I> succLoc;
225
                if (trans.isTree()) {
2✔
226
                    succLoc = trans.getTreeTarget();
2✔
227
                } else {
228
                    succLoc = trans.getNonTreeTarget().subtreeLocsIterator().next();
2✔
229
                }
230
                yield new State<>(succLoc, stackContents);
2✔
231
            }
232
            case CALL: {
233
                int stackSym = hypothesis.encodeStackSym(state.getLocation(), sym);
2✔
234
                yield new State<>(hypothesis.getInitialLocation(), StackContents.push(stackSym, stackContents));
2✔
235
            }
236
            case RETURN: {
237
                assert stackContents != null;
2✔
238
                AbstractHypTrans<I> trans =
2✔
239
                        hypothesis.getReturnTransition(state.getLocation(), sym, stackContents.peek());
2✔
240
                HypLoc<I> succLoc;
241
                if (trans.isTree()) {
2✔
242
                    succLoc = trans.getTreeTarget();
2✔
243
                } else {
244
                    succLoc = trans.getNonTreeTarget().subtreeLocsIterator().next();
2✔
245
                }
246
                yield new State<>(succLoc, stackContents.pop());
2✔
247
            }
248
        };
249
    }
250

251
    private PrefixTransformAcex deriveAcex(OutputInconsistency<I> outIncons) {
252
        PrefixTransformAcex acex =
2✔
253
                new PrefixTransformAcex(outIncons.location.getAccessSequence(), outIncons.discriminator);
2✔
254
        acex.setEffect(0, outIncons.expectedOut);
2✔
255
        acex.setEffect(acex.getLength() - 1, !outIncons.expectedOut);
2✔
256

257
        return acex;
2✔
258
    }
259

260
    /**
261
     * Determines a global splitter, i.e., a splitter for any block. This method may (but is not required to) employ
262
     * heuristics to obtain a splitter with a relatively short suffix length.
263
     *
264
     * @return a splitter for any of the blocks
265
     */
266
    private @Nullable GlobalSplitter<I> findSplitterGlobal() {
267
        DTNode<I> bestBlockRoot = null;
2✔
268
        Splitter<I> bestSplitter = null;
2✔
269

270
        for (DTNode<I> blockRoot : blockList) {
2✔
271
            Splitter<I> splitter = findSplitter(blockRoot);
2✔
272

273
            if (splitter != null && (bestSplitter == null ||
2✔
274
                                     splitter.getNewDiscriminatorLength() < bestSplitter.getNewDiscriminatorLength())) {
×
275
                bestSplitter = splitter;
2✔
276
                bestBlockRoot = blockRoot;
2✔
277
            }
278
        }
2✔
279

280
        if (bestSplitter == null) {
2✔
281
            return null;
2✔
282
        }
283

284
        return new GlobalSplitter<>(bestBlockRoot, bestSplitter);
2✔
285
    }
286

287
    /**
288
     * Finalize a discriminator. Given a block root and a {@link Splitter}, replace the discriminator at the block root
289
     * by the one derived from the splitter, and update the discrimination tree accordingly.
290
     *
291
     * @param blockRoot
292
     *         the block root whose discriminator to finalize
293
     * @param splitter
294
     *         the splitter to use for finalization
295
     */
296
    private void finalizeDiscriminator(DTNode<I> blockRoot, Splitter<I> splitter) {
297
        assert blockRoot.isBlockRoot();
2✔
298

299
        ContextPair<I> newDiscr = splitter.getNewDiscriminator();
2✔
300

301
        assert !blockRoot.getDiscriminator().equals(newDiscr);
2✔
302

303
        ContextPair<I> finalDiscriminator = prepareSplit(blockRoot, splitter);
2✔
304
        Map<Boolean, DTNode<I>> repChildren = new HashMap<>();
2✔
305
        for (Boolean label : blockRoot.getSplitData().getLabels()) {
2✔
306
            repChildren.put(label, extractSubtree(blockRoot, label));
2✔
307
        }
2✔
308
        blockRoot.replaceChildren(repChildren);
2✔
309

310
        blockRoot.setDiscriminator(finalDiscriminator);
2✔
311

312
        declareFinal(blockRoot);
2✔
313
    }
2✔
314

315
    /**
316
     * Determines a (local) splitter for a given block. This method may (but is not required to) employ heuristics to
317
     * obtain a splitter with a relatively short suffix.
318
     *
319
     * @param blockRoot
320
     *         the root of the block
321
     *
322
     * @return a splitter for this block, or {@code null} if no such splitter could be found.
323
     */
324
    private @Nullable Splitter<I> findSplitter(DTNode<I> blockRoot) {
325
        int alphabetSize =
2✔
326
                alphabet.getNumInternals() + alphabet.getNumCalls() * alphabet.getNumReturns() * hypothesis.size() * 2;
2✔
327

328
        @SuppressWarnings("unchecked")
329
        DTNode<I>[] lcas = new DTNode[alphabetSize];
2✔
330

331
        for (HypLoc<I> loc : blockRoot.subtreeLocations()) {
2✔
332
            int i = 0;
2✔
333
            for (I intSym : alphabet.getInternalAlphabet()) {
2✔
334
                DTNode<I> currLca = lcas[i];
2✔
335
                AbstractHypTrans<I> trans = hypothesis.getInternalTransition(loc, intSym);
2✔
336
                assert trans.getTargetNode() != null;
2✔
337
                if (currLca == null) {
2✔
338
                    lcas[i] = trans.getTargetNode();
2✔
339
                } else {
340
                    lcas[i] = dtree.leastCommonAncestor(currLca, trans.getTargetNode());
2✔
341
                }
342
                i++;
2✔
343
            }
2✔
344
            for (I retSym : alphabet.getReturnAlphabet()) {
2✔
345
                for (I callSym : alphabet.getCallAlphabet()) {
2✔
346
                    for (HypLoc<I> stackLoc : hypothesis.getLocations()) {
2✔
347
                        AbstractHypTrans<I> trans = hypothesis.getReturnTransition(loc, retSym, stackLoc, callSym);
2✔
348
                        DTNode<I> currLca = lcas[i];
2✔
349
                        assert trans.getTargetNode() != null;
2✔
350
                        if (currLca == null) {
2✔
351
                            lcas[i] = trans.getTargetNode();
2✔
352
                        } else {
353
                            lcas[i] = dtree.leastCommonAncestor(currLca, trans.getTargetNode());
2✔
354
                        }
355
                        i++;
2✔
356

357
                        trans = hypothesis.getReturnTransition(stackLoc, retSym, loc, callSym);
2✔
358
                        currLca = lcas[i];
2✔
359
                        if (currLca == null) {
2✔
360
                            lcas[i] = trans.getTargetNode();
2✔
361
                        } else {
362
                            lcas[i] = dtree.leastCommonAncestor(currLca, trans.getTargetNode());
2✔
363
                        }
364
                        i++;
2✔
365
                    }
2✔
366
                }
2✔
367
            }
2✔
368
        }
2✔
369

370
        int shortestLen = Integer.MAX_VALUE;
2✔
371
        Splitter<I> shortestSplitter = null;
2✔
372

373
        int i = 0;
2✔
374
        for (I intSym : alphabet.getInternalAlphabet()) {
2✔
375
            DTNode<I> currLca = lcas[i];
2✔
376
            if (!currLca.isLeaf() && !currLca.isTemp()) {
2✔
377
                Splitter<I> splitter = new Splitter<>(intSym, currLca);
2✔
378
                int newLen = splitter.getNewDiscriminatorLength();
2✔
379
                if (shortestSplitter == null || shortestLen > newLen) {
2✔
380
                    shortestSplitter = splitter;
2✔
381
                    shortestLen = newLen;
2✔
382
                }
383
            }
384
            i++;
2✔
385
        }
2✔
386
        for (I retSym : alphabet.getReturnAlphabet()) {
2✔
387
            for (I callSym : alphabet.getCallAlphabet()) {
2✔
388
                for (HypLoc<I> stackLoc : hypothesis.getLocations()) {
2✔
389
                    DTNode<I> currLca = lcas[i];
2✔
390
                    assert currLca != null;
2✔
391
                    if (!currLca.isLeaf() && !currLca.isTemp()) {
2✔
392
                        Splitter<I> splitter = new Splitter<>(retSym, stackLoc, callSym, false, currLca);
2✔
393
                        int newLen = splitter.getNewDiscriminatorLength();
2✔
394
                        if (shortestSplitter == null || shortestLen > newLen) {
2✔
395
                            shortestSplitter = splitter;
×
396
                            shortestLen = newLen;
×
397
                        }
398
                    }
399
                    i++;
2✔
400

401
                    currLca = lcas[i];
2✔
402
                    assert currLca != null;
2✔
403
                    if (!currLca.isLeaf() && !currLca.isTemp()) {
2✔
404
                        Splitter<I> splitter = new Splitter<>(callSym, stackLoc, retSym, true, currLca);
2✔
405
                        int newLen = splitter.getNewDiscriminatorLength();
2✔
406
                        if (shortestSplitter == null || shortestLen > newLen) {
2✔
407
                            shortestSplitter = splitter;
×
408
                            shortestLen = newLen;
×
409
                        }
410
                    }
411
                    i++;
2✔
412
                }
2✔
413
            }
2✔
414
        }
2✔
415

416
        return shortestSplitter;
2✔
417
    }
418

419
    /**
420
     * Prepare a split operation on a block, by marking all the nodes and transitions in the subtree (and annotating
421
     * them with {@link SplitData} objects).
422
     *
423
     * @param node
424
     *         the block root to be split
425
     * @param splitter
426
     *         the splitter to use for splitting the block
427
     *
428
     * @return the discriminator to use for splitting
429
     */
430
    private ContextPair<I> prepareSplit(DTNode<I> node, Splitter<I> splitter) {
431
        ContextPair<I> discriminator = splitter.getNewDiscriminator();
2✔
432

433
        Deque<DTNode<I>> dfsStack = new ArrayDeque<>();
2✔
434
        List<SplitQuery<I>> queries = new ArrayList<>();
2✔
435

436
        DTNode<I> succSeparator = splitter.succSeparator;
2✔
437

438
        dfsStack.push(node);
2✔
439
        assert node.getSplitData() == null;
2✔
440

441
        while (!dfsStack.isEmpty()) {
2✔
442
            DTNode<I> curr = dfsStack.pop();
2✔
443
            assert curr.getSplitData() == null;
2✔
444

445
            curr.setSplitData(new SplitData<>(TransList::new));
2✔
446

447
            for (AbstractHypTrans<I> trans : curr.getIncoming()) {
2✔
448
                queries.add(new SplitQuery<>(trans, discriminator));
2✔
449
            }
2✔
450

451
            if (!queries.isEmpty()) {
2✔
452
                oracle.processQueries(queries);
2✔
453

454
                for (SplitQuery<I> query : queries) {
2✔
455
                    curr.getSplitData().getIncoming(query.output).add(query.transition);
2✔
456
                    markAndPropagate(curr, query.output);
2✔
457
                }
2✔
458

459
                queries.clear();
2✔
460
            }
461

462
            if (curr.isInner()) {
2✔
463
                for (DTNode<I> child : curr.getChildren()) {
2✔
464
                    dfsStack.push(child);
2✔
465
                }
2✔
466
            } else {
467
                HypLoc<I> loc = curr.getData();
2✔
468
                assert loc != null;
2✔
469

470
                // Try to deduct the outcome from the DT target of
471
                // the respective transition
472
                AbstractHypTrans<I> trans = getSplitterTrans(loc, splitter);
2✔
473
                Boolean outcome = succSeparator.subtreeLabel(trans.getTargetNode());
2✔
474
                assert outcome != null;
2✔
475
                curr.getSplitData().setStateLabel(outcome);
2✔
476
                markAndPropagate(curr, outcome);
2✔
477
            }
478

479
        }
2✔
480

481
        return discriminator;
2✔
482
    }
483

484
    /**
485
     * Extract a (reduced) subtree containing all nodes with the given label from the subtree given by its root.
486
     * "Reduced" here refers to the fact that the resulting subtree will contain no inner nodes with only one child. The
487
     * tree returned by this method (represented by its root) will have as a parent node the root that was passed to
488
     * this method.
489
     *
490
     * @param root
491
     *         the root of the subtree from which to extract
492
     * @param label
493
     *         the label of the nodes to extract
494
     *
495
     * @return the extracted subtree
496
     */
497
    private DTNode<I> extractSubtree(DTNode<I> root, Boolean label) {
498
        assert root.getSplitData() != null;
2✔
499
        assert root.getSplitData().isMarked(label);
2✔
500

501
        Deque<ExtractRecord<I>> stack = new ArrayDeque<>();
2✔
502

503
        DTNode<I> firstExtracted = new DTNode<>(root, label);
2✔
504

505
        stack.push(new ExtractRecord<>(root, firstExtracted));
2✔
506
        while (!stack.isEmpty()) {
2✔
507
            ExtractRecord<I> curr = stack.pop();
2✔
508

509
            DTNode<I> original = curr.original;
2✔
510
            DTNode<I> extracted = curr.extracted;
2✔
511

512
            moveIncoming(extracted, original, label);
2✔
513

514
            if (original.isLeaf()) {
2✔
515
                if (Objects.equals(original.getSplitData().getStateLabel(), label)) {
2✔
516
                    link(extracted, original.getData());
2✔
517
                } else {
518
                    createNewState(extracted);
×
519
                }
520
                extracted.updateIncoming();
2✔
521
            } else {
522
                List<DTNode<I>> markedChildren = new ArrayList<>();
2✔
523

524
                for (DTNode<I> child : original.getChildren()) {
2✔
525
                    if (child.getSplitData().isMarked(label)) {
2✔
526
                        markedChildren.add(child);
2✔
527
                    }
528
                }
2✔
529

530
                if (markedChildren.size() > 1) {
2✔
531
                    Map<Boolean, DTNode<I>> childMap = new HashMap<>();
×
532
                    for (DTNode<I> c : markedChildren) {
×
533
                        Boolean childLabel = c.getParentOutcome();
×
534
                        DTNode<I> extractedChild = new DTNode<>(extracted, childLabel);
×
535
                        childMap.put(childLabel, extractedChild);
×
536
                        stack.push(new ExtractRecord<>(c, extractedChild));
×
537
                    }
×
538
                    extracted.split(original.getDiscriminator(), childMap);
×
539
                    extracted.updateIncoming();
×
540
                    extracted.setTemp(true);
×
541
                } else if (markedChildren.size() == 1) {
2✔
542
                    stack.push(new ExtractRecord<>(markedChildren.get(0), extracted));
2✔
543
                } else { // markedChildren.isEmppty()
544
                    createNewState(extracted);
×
545
                    extracted.updateIncoming();
×
546
                }
547
            }
548

549
            assert extracted.getSplitData() == null;
2✔
550
        }
2✔
551

552
        return firstExtracted;
2✔
553
    }
554

555
    protected void declareFinal(DTNode<I> blockRoot) {
556
        blockRoot.setTemp(false);
2✔
557
        blockRoot.setSplitData(null);
2✔
558

559
        blockRoot.removeFromList();
2✔
560

561
        for (DTNode<I> subtree : blockRoot.getChildren()) {
2✔
562
            assert subtree.getSplitData() == null;
2✔
563
            //blockRoot.setChild(subtree.getParentLabel(), subtree);
564
            // Register as blocks, if they are non-trivial subtrees
565
            if (subtree.isInner()) {
2✔
566
                blockList.add(subtree);
×
567
            }
568
        }
2✔
569

570
        openTransitions.concat(blockRoot.getIncoming());
2✔
571
    }
2✔
572

573
    /**
574
     * Marks a node, and propagates the label up to all nodes on the path from the block root to this node.
575
     *
576
     * @param node
577
     *         the node to mark
578
     * @param label
579
     *         the label to mark the node with
580
     */
581
    private static <I> void markAndPropagate(DTNode<I> node, Boolean label) {
582
        DTNode<I> curr = node;
2✔
583

584
        while (curr != null && curr.getSplitData() != null) {
2✔
585
            if (!curr.getSplitData().mark(label)) {
2✔
586
                return;
2✔
587
            }
588
            curr = curr.getParent();
2✔
589
        }
590
    }
2✔
591

592
    public AbstractHypTrans<I> getSplitterTrans(HypLoc<I> loc, Splitter<I> splitter) {
593
        return switch (splitter.type) {
2✔
594
            case INTERNAL -> hypothesis.getInternalTransition(loc, splitter.symbol);
2✔
NEW
595
            case RETURN -> hypothesis.getReturnTransition(loc, splitter.symbol, splitter.location, splitter.otherSymbol);
×
NEW
596
            case CALL -> hypothesis.getReturnTransition(splitter.location, splitter.otherSymbol, loc, splitter.symbol);
×
597
        };
598
    }
599

600
    private static <I> void moveIncoming(DTNode<I> newNode, DTNode<I> oldNode, Boolean label) {
601
        newNode.getIncoming().concat(oldNode.getSplitData().getIncoming(label));
2✔
602
    }
2✔
603

604
    /**
605
     * Create a new state during extraction on-the-fly. This is required if a node in the DT has an incoming transition
606
     * with a certain label, but in its subtree there are no leaves with this label as their state label.
607
     *
608
     * @param newNode
609
     *         the extracted node
610
     */
611
    private void createNewState(DTNode<I> newNode) {
612
        AbstractHypTrans<I> newTreeTrans = newNode.getIncoming().chooseMinimal();
×
613
        assert newTreeTrans != null;
×
614

615
        HypLoc<I> newLoc = makeTree(newTreeTrans);
×
616
        link(newNode, newLoc);
×
617
        initializeLocation(newLoc);
×
618
    }
×
619

620
    protected void determinize(State<HypLoc<I>> state, Word<I> suffix) {
621
        State<HypLoc<I>> curr = state;
×
622
        for (I sym : suffix) {
×
623
            if (!alphabet.isCallSymbol(sym)) {
×
624
                AbstractHypTrans<I> trans = hypothesis.getInternalTransition(curr, sym);
×
625
                assert trans != null;
×
626
                if (!trans.isTree() && !trans.getNonTreeTarget().isLeaf()) {
×
627
                    updateDTTargets(Collections.singletonList(trans), true);
×
628
                }
629
            }
630
            curr = hypothesis.getSuccessor(curr, sym);
×
631
        }
×
632
    }
×
633

634
    private static final class SplitQuery<I> extends Query<I, Boolean> {
635

636
        private final AbstractHypTrans<I> transition;
637
        private final ContextPair<I> discriminator;
638
        private Boolean output;
639

640
        SplitQuery(AbstractHypTrans<I> transition, ContextPair<I> discriminator) {
2✔
641
            this.transition = transition;
2✔
642
            this.discriminator = discriminator;
2✔
643
        }
2✔
644

645
        @Override
646
        public void answer(Boolean output) {
647
            this.output = output;
2✔
648
        }
2✔
649

650
        @Override
651
        public Word<I> getPrefix() {
652
            return discriminator.getPrefix().concat(transition.getAccessSequence());
2✔
653
        }
654

655
        @Override
656
        public Word<I> getSuffix() {
657
            return discriminator.getSuffix();
2✔
658
        }
659
    }
660
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc