• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

LearnLib / learnlib / 6433387082

06 Oct 2023 03:10PM UTC coverage: 92.296% (-0.007%) from 92.303%
6433387082

push

github

mtf90
update Falk's developer id

11573 of 12539 relevant lines covered (92.3%)

1.67 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

86.94
/algorithms/active/ttt-vpa/src/main/java/de/learnlib/algorithms/ttt/vpa/TTTLearnerVPA.java
1
/* Copyright (C) 2013-2023 TU Dortmund
2
 * This file is part of LearnLib, http://www.learnlib.de/.
3
 *
4
 * Licensed under the Apache License, Version 2.0 (the "License");
5
 * you may not use this file except in compliance with the License.
6
 * You may obtain a copy of the License at
7
 *
8
 *     http://www.apache.org/licenses/LICENSE-2.0
9
 *
10
 * Unless required by applicable law or agreed to in writing, software
11
 * distributed under the License is distributed on an "AS IS" BASIS,
12
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
 * See the License for the specific language governing permissions and
14
 * limitations under the License.
15
 */
16
package de.learnlib.algorithms.ttt.vpa;
17

18
import java.util.ArrayDeque;
19
import java.util.ArrayList;
20
import java.util.Collections;
21
import java.util.Deque;
22
import java.util.HashMap;
23
import java.util.HashSet;
24
import java.util.List;
25
import java.util.Map;
26
import java.util.Objects;
27
import java.util.Set;
28

29
import com.github.misberner.buildergen.annotations.GenerateBuilder;
30
import com.google.common.collect.Iterables;
31
import de.learnlib.acex.AcexAnalyzer;
32
import de.learnlib.algorithms.discriminationtree.hypothesis.vpa.AbstractHypTrans;
33
import de.learnlib.algorithms.discriminationtree.hypothesis.vpa.BlockList;
34
import de.learnlib.algorithms.discriminationtree.hypothesis.vpa.ContextPair;
35
import de.learnlib.algorithms.discriminationtree.hypothesis.vpa.DTNode;
36
import de.learnlib.algorithms.discriminationtree.hypothesis.vpa.HypLoc;
37
import de.learnlib.algorithms.discriminationtree.hypothesis.vpa.TransList;
38
import de.learnlib.algorithms.discriminationtree.vpa.DTLearnerVPA;
39
import de.learnlib.api.oracle.MembershipOracle;
40
import de.learnlib.api.query.DefaultQuery;
41
import de.learnlib.datastructure.discriminationtree.SplitData;
42
import net.automatalib.automata.vpa.StackContents;
43
import net.automatalib.automata.vpa.State;
44
import net.automatalib.words.VPAlphabet;
45
import net.automatalib.words.Word;
46
import org.checkerframework.checker.nullness.qual.Nullable;
47
import org.slf4j.Logger;
48
import org.slf4j.LoggerFactory;
49

50
/**
51
 * @param <I>
52
 *         input symbol type
53
 */
54
public class TTTLearnerVPA<I> extends DTLearnerVPA<I> {
55

56
    private static final Logger LOGGER = LoggerFactory.getLogger(TTTLearnerVPA.class);
2✔
57

58
    private final BlockList<I> blockList = new BlockList<>();
2✔
59

60
    @GenerateBuilder
61
    public TTTLearnerVPA(VPAlphabet<I> alphabet, MembershipOracle<I, Boolean> oracle, AcexAnalyzer analyzer) {
62
        super(alphabet, oracle, analyzer);
2✔
63
    }
2✔
64

65
    @Override
66
    protected State<HypLoc<I>> getDefinitiveSuccessor(State<HypLoc<I>> baseState, Word<I> suffix) {
67
        NonDetState<HypLoc<I>> curr = NonDetState.fromDet(baseState);
2✔
68
        int lastDet = 0;
2✔
69
        NonDetState<HypLoc<I>> lastDetState = curr;
2✔
70
        int i = 0;
2✔
71
        for (I sym : suffix) {
2✔
72
            if (alphabet.isCallSymbol(sym)) {
2✔
73
                Set<Integer> stackSyms = new HashSet<>();
2✔
74
                for (HypLoc<I> loc : curr.getLocations()) {
2✔
75
                    int stackSym = hypothesis.encodeStackSym(loc, sym);
2✔
76
                    stackSyms.add(stackSym);
2✔
77
                }
2✔
78
                NondetStackContents nsc = NondetStackContents.push(stackSyms, curr.getStack());
2✔
79
                curr = new NonDetState<>(Collections.singleton(hypothesis.getInitialLocation()), nsc);
2✔
80
            } else if (alphabet.isReturnSymbol(sym)) {
2✔
81
                Set<HypLoc<I>> succs = new HashSet<>();
2✔
82
                for (HypLoc<I> loc : curr.getLocations()) {
2✔
83
                    for (int stackSym : curr.getStack().peek()) {
2✔
84
                        AbstractHypTrans<I> trans = hypothesis.getReturnTransition(loc, sym, stackSym);
2✔
85
                        if (trans.isTree()) {
2✔
86
                            succs.add(trans.getTreeTarget());
×
87
                        } else {
88
                            Iterables.addAll(succs, trans.getNonTreeTarget().subtreeLocations());
2✔
89
                        }
90
                    }
2✔
91
                }
2✔
92
                curr = new NonDetState<>(succs, curr.getStack().pop());
2✔
93
            } else {
2✔
94
                Set<HypLoc<I>> succs = new HashSet<>();
2✔
95
                for (HypLoc<I> loc : curr.getLocations()) {
2✔
96
                    AbstractHypTrans<I> trans = hypothesis.getInternalTransition(loc, sym);
2✔
97
                    if (trans.isTree()) {
2✔
98
                        succs.add(trans.getTreeTarget());
2✔
99
                    } else {
100
                        Iterables.addAll(succs, trans.getNonTreeTarget().subtreeLocations());
2✔
101
                    }
102
                }
2✔
103
                curr = new NonDetState<>(succs, curr.getStack());
2✔
104
            }
105
            i++;
2✔
106
            if (!curr.isNonDet()) {
2✔
107
                lastDet = i;
2✔
108
                lastDetState = curr;
2✔
109
            }
110
        }
2✔
111

112
        if (lastDet < suffix.length()) {
2✔
113
            LOGGER.debug("last det: {}", lastDet);
×
114
            determinize(lastDetState.determinize(), suffix.subWord(lastDet));
×
115
        }
116
        return hypothesis.getSuccessor(baseState, suffix);
2✔
117
    }
118

119
    @Override
120
    protected boolean refineHypothesisSingle(DefaultQuery<I, Boolean> ceQuery) {
121
        Word<I> ceWord = ceQuery.getInput();
2✔
122

123
        Boolean out = computeHypothesisOutput(ceWord);
2✔
124

125
        if (Objects.equals(out, ceQuery.getOutput())) {
2✔
126
            return false;
2✔
127
        }
128

129
        OutputInconsistency<I> outIncons = new OutputInconsistency<>(hypothesis.getInitialLocation(),
2✔
130
                                                                     new ContextPair<>(Word.epsilon(), ceWord),
2✔
131
                                                                     ceQuery.getOutput());
2✔
132

133
        do {
134
            splitState(outIncons);
2✔
135
            closeTransitions();
2✔
136
            while (finalizeAny()) {
2✔
137
                closeTransitions();
2✔
138
            }
139

140
            outIncons = findOutputInconsistency();
2✔
141
        } while (outIncons != null);
2✔
142

143
        return true;
2✔
144
    }
145

146
    protected boolean computeHypothesisOutput(Word<I> word) {
147
        State<HypLoc<I>> curr = hypothesis.getInitialState();
2✔
148
        for (I sym : word) {
2✔
149
            curr = getAnySuccessor(curr, sym);
2✔
150
        }
2✔
151
        return hypothesis.isAccepting(curr);
2✔
152
    }
153

154
    private void splitState(OutputInconsistency<I> outIncons) {
155
        PrefixTransformAcex acex = deriveAcex(outIncons);
2✔
156
        int breakpoint = analyzer.analyzeAbstractCounterexample(acex);
2✔
157

158
        Word<I> acexSuffix = acex.getSuffix();
2✔
159
        Word<I> prefix = acexSuffix.prefix(breakpoint);
2✔
160
        I act = acexSuffix.getSymbol(breakpoint);
2✔
161
        Word<I> suffix = acexSuffix.subWord(breakpoint + 1);
2✔
162

163
        State<HypLoc<I>> state = hypothesis.getSuccessor(acex.getBaseState(), prefix);
2✔
164
        assert state != null;
2✔
165
        State<HypLoc<I>> succState = hypothesis.getSuccessor(state, act);
2✔
166
        assert succState != null;
2✔
167

168
        ContextPair<I> context = new ContextPair<>(transformAccessSequence(succState.getStackContents()), suffix);
2✔
169

170
        AbstractHypTrans<I> trans = hypothesis.getInternalTransition(state, act);
2✔
171
        assert trans != null;
2✔
172

173
        HypLoc<I> newLoc = makeTree(trans);
2✔
174
        DTNode<I> oldDtNode = succState.getLocation().getLeaf();
2✔
175
        openTransitions.addAll(oldDtNode.getIncoming());
2✔
176
        DTNode<I>.SplitResult children = oldDtNode.split(context, acex.effect(breakpoint), acex.effect(breakpoint + 1));
2✔
177
        oldDtNode.setTemp(true);
2✔
178
        if (!oldDtNode.getParent().isTemp()) {
2✔
179
            blockList.add(oldDtNode);
2✔
180
        }
181
        link(children.nodeOld, newLoc);
2✔
182
        link(children.nodeNew, succState.getLocation());
2✔
183
        initializeLocation(newLoc);
2✔
184
    }
2✔
185

186
    protected boolean finalizeAny() {
187
        assert openTransitions.isEmpty();
2✔
188

189
        GlobalSplitter<I> splitter = findSplitterGlobal();
2✔
190
        if (splitter != null) {
2✔
191
            finalizeDiscriminator(splitter.blockRoot, splitter.localSplitter);
2✔
192
            return true;
2✔
193
        }
194
        return false;
2✔
195
    }
196

197
    private @Nullable OutputInconsistency<I> findOutputInconsistency() {
198
        OutputInconsistency<I> best = null;
2✔
199

200
        for (HypLoc<I> loc : hypothesis.getLocations()) {
2✔
201
            int locAsLen = loc.getAccessSequence().length();
2✔
202
            DTNode<I> node = loc.getLeaf();
2✔
203
            while (!node.isRoot()) {
2✔
204
                boolean expectedOut = node.getParentOutcome();
2✔
205
                node = node.getParent();
2✔
206
                ContextPair<I> discr = node.getDiscriminator();
2✔
207
                if (best == null || discr.getLength() + locAsLen < best.totalLength()) {
2✔
208
                    boolean hypOut = computeHypothesisOutput(discr.getPrefix()
2✔
209
                                                                  .concat(loc.getAccessSequence(), discr.getSuffix()));
2✔
210
                    if (hypOut != expectedOut) {
2✔
211
                        best = new OutputInconsistency<>(loc, discr, expectedOut);
2✔
212
                    }
213
                }
214
            }
2✔
215
        }
2✔
216
        return best;
2✔
217
    }
218

219
    protected State<HypLoc<I>> getAnySuccessor(State<HypLoc<I>> state, I sym) {
220
        final VPAlphabet.SymbolType type = alphabet.getSymbolType(sym);
2✔
221
        final StackContents stackContents = state.getStackContents();
2✔
222

223
        switch (type) {
2✔
224
            case INTERNAL: {
225
                AbstractHypTrans<I> trans = hypothesis.getInternalTransition(state.getLocation(), sym);
2✔
226
                HypLoc<I> succLoc;
227
                if (trans.isTree()) {
2✔
228
                    succLoc = trans.getTreeTarget();
2✔
229
                } else {
230
                    succLoc = trans.getNonTreeTarget().subtreeLocsIterator().next();
2✔
231
                }
232
                return new State<>(succLoc, stackContents);
2✔
233
            }
234
            case CALL: {
235
                int stackSym = hypothesis.encodeStackSym(state.getLocation(), sym);
2✔
236
                return new State<>(hypothesis.getInitialLocation(), StackContents.push(stackSym, stackContents));
2✔
237
            }
238
            case RETURN: {
239
                assert stackContents != null;
2✔
240
                AbstractHypTrans<I> trans =
2✔
241
                        hypothesis.getReturnTransition(state.getLocation(), sym, stackContents.peek());
2✔
242
                HypLoc<I> succLoc;
243
                if (trans.isTree()) {
2✔
244
                    succLoc = trans.getTreeTarget();
2✔
245
                } else {
246
                    succLoc = trans.getNonTreeTarget().subtreeLocsIterator().next();
2✔
247
                }
248
                return new State<>(succLoc, stackContents.pop());
2✔
249
            }
250
            default:
251
                throw new IllegalStateException("Unhandled type " + type);
×
252
        }
253
    }
254

255
    protected PrefixTransformAcex deriveAcex(OutputInconsistency<I> outIncons) {
256
        PrefixTransformAcex acex =
2✔
257
                new PrefixTransformAcex(outIncons.location.getAccessSequence(), outIncons.discriminator);
2✔
258
        acex.setEffect(0, outIncons.expectedOut);
2✔
259
        acex.setEffect(acex.getLength() - 1, !outIncons.expectedOut);
2✔
260

261
        return acex;
2✔
262
    }
263

264
    /**
265
     * Determines a global splitter, i.e., a splitter for any block. This method may (but is not required to) employ
266
     * heuristics to obtain a splitter with a relatively short suffix length.
267
     *
268
     * @return a splitter for any of the blocks
269
     */
270
    private @Nullable GlobalSplitter<I> findSplitterGlobal() {
271
        DTNode<I> bestBlockRoot = null;
2✔
272
        Splitter<I> bestSplitter = null;
2✔
273

274
        for (DTNode<I> blockRoot : blockList) {
2✔
275
            Splitter<I> splitter = findSplitter(blockRoot);
2✔
276

277
            if (splitter != null && (bestSplitter == null ||
2✔
278
                                     splitter.getNewDiscriminatorLength() < bestSplitter.getNewDiscriminatorLength())) {
×
279
                bestSplitter = splitter;
2✔
280
                bestBlockRoot = blockRoot;
2✔
281
            }
282
        }
2✔
283

284
        if (bestSplitter == null) {
2✔
285
            return null;
2✔
286
        }
287

288
        return new GlobalSplitter<>(bestBlockRoot, bestSplitter);
2✔
289
    }
290

291
    /**
292
     * Finalize a discriminator. Given a block root and a {@link Splitter}, replace the discriminator at the block root
293
     * by the one derived from the splitter, and update the discrimination tree accordingly.
294
     *
295
     * @param blockRoot
296
     *         the block root whose discriminator to finalize
297
     * @param splitter
298
     *         the splitter to use for finalization
299
     */
300
    private void finalizeDiscriminator(DTNode<I> blockRoot, Splitter<I> splitter) {
301
        assert blockRoot.isBlockRoot();
2✔
302

303
        ContextPair<I> newDiscr = splitter.getNewDiscriminator();
2✔
304

305
        if (!blockRoot.getDiscriminator().equals(newDiscr)) {
2✔
306
            ContextPair<I> finalDiscriminator = prepareSplit(blockRoot, splitter);
2✔
307
            Map<Boolean, DTNode<I>> repChildren = new HashMap<>();
2✔
308
            for (Boolean label : blockRoot.getSplitData().getLabels()) {
2✔
309
                repChildren.put(label, extractSubtree(blockRoot, label));
2✔
310
            }
2✔
311
            blockRoot.replaceChildren(repChildren);
2✔
312

313
            blockRoot.setDiscriminator(finalDiscriminator);
2✔
314
        } else {
2✔
315
            LOGGER.debug("Weird..");
×
316
        }
317

318
        declareFinal(blockRoot);
2✔
319
    }
2✔
320

321
    /**
322
     * Determines a (local) splitter for a given block. This method may (but is not required to) employ heuristics to
323
     * obtain a splitter with a relatively short suffix.
324
     *
325
     * @param blockRoot
326
     *         the root of the block
327
     *
328
     * @return a splitter for this block, or {@code null} if no such splitter could be found.
329
     */
330
    private @Nullable Splitter<I> findSplitter(DTNode<I> blockRoot) {
331
        int alphabetSize =
2✔
332
                alphabet.getNumInternals() + alphabet.getNumCalls() * alphabet.getNumReturns() * hypothesis.size() * 2;
2✔
333

334
        @SuppressWarnings("unchecked")
335
        DTNode<I>[] lcas = new DTNode[alphabetSize];
2✔
336

337
        for (HypLoc<I> loc : blockRoot.subtreeLocations()) {
2✔
338
            int i = 0;
2✔
339
            for (I intSym : alphabet.getInternalAlphabet()) {
2✔
340
                DTNode<I> currLca = lcas[i];
2✔
341
                AbstractHypTrans<I> trans = hypothesis.getInternalTransition(loc, intSym);
2✔
342
                assert trans.getTargetNode() != null;
2✔
343
                if (currLca == null) {
2✔
344
                    lcas[i] = trans.getTargetNode();
2✔
345
                } else {
346
                    lcas[i] = dtree.leastCommonAncestor(currLca, trans.getTargetNode());
2✔
347
                }
348
                i++;
2✔
349
            }
2✔
350
            for (I retSym : alphabet.getReturnAlphabet()) {
2✔
351
                for (I callSym : alphabet.getCallAlphabet()) {
2✔
352
                    for (HypLoc<I> stackLoc : hypothesis.getLocations()) {
2✔
353
                        AbstractHypTrans<I> trans = hypothesis.getReturnTransition(loc, retSym, stackLoc, callSym);
2✔
354
                        DTNode<I> currLca = lcas[i];
2✔
355
                        assert trans.getTargetNode() != null;
2✔
356
                        if (currLca == null) {
2✔
357
                            lcas[i] = trans.getTargetNode();
2✔
358
                        } else {
359
                            lcas[i] = dtree.leastCommonAncestor(currLca, trans.getTargetNode());
2✔
360
                        }
361
                        i++;
2✔
362

363
                        trans = hypothesis.getReturnTransition(stackLoc, retSym, loc, callSym);
2✔
364
                        currLca = lcas[i];
2✔
365
                        if (currLca == null) {
2✔
366
                            lcas[i] = trans.getTargetNode();
2✔
367
                        } else {
368
                            lcas[i] = dtree.leastCommonAncestor(currLca, trans.getTargetNode());
2✔
369
                        }
370
                        i++;
2✔
371
                    }
2✔
372
                }
2✔
373
            }
2✔
374
        }
2✔
375

376
        int shortestLen = Integer.MAX_VALUE;
2✔
377
        Splitter<I> shortestSplitter = null;
2✔
378

379
        int i = 0;
2✔
380
        for (I intSym : alphabet.getInternalAlphabet()) {
2✔
381
            DTNode<I> currLca = lcas[i];
2✔
382
            if (!currLca.isLeaf() && !currLca.isTemp()) {
2✔
383
                Splitter<I> splitter = new Splitter<>(intSym, currLca);
2✔
384
                int newLen = splitter.getNewDiscriminatorLength();
2✔
385
                if (shortestSplitter == null || shortestLen > newLen) {
2✔
386
                    shortestSplitter = splitter;
2✔
387
                    shortestLen = newLen;
2✔
388
                }
389
            }
390
            i++;
2✔
391
        }
2✔
392
        for (I retSym : alphabet.getReturnAlphabet()) {
2✔
393
            for (I callSym : alphabet.getCallAlphabet()) {
2✔
394
                for (HypLoc<I> stackLoc : hypothesis.getLocations()) {
2✔
395
                    DTNode<I> currLca = lcas[i];
2✔
396
                    assert currLca != null;
2✔
397
                    if (!currLca.isLeaf() && !currLca.isTemp()) {
2✔
398
                        Splitter<I> splitter = new Splitter<>(retSym, stackLoc, callSym, false, currLca);
2✔
399
                        int newLen = splitter.getNewDiscriminatorLength();
2✔
400
                        if (shortestSplitter == null || shortestLen > newLen) {
2✔
401
                            shortestSplitter = splitter;
×
402
                            shortestLen = newLen;
×
403
                        }
404
                    }
405
                    i++;
2✔
406

407
                    currLca = lcas[i];
2✔
408
                    assert currLca != null;
2✔
409
                    if (!currLca.isLeaf() && !currLca.isTemp()) {
2✔
410
                        Splitter<I> splitter = new Splitter<>(callSym, stackLoc, retSym, true, currLca);
2✔
411
                        int newLen = splitter.getNewDiscriminatorLength();
2✔
412
                        if (shortestSplitter == null || shortestLen > newLen) {
2✔
413
                            shortestSplitter = splitter;
×
414
                            shortestLen = newLen;
×
415
                        }
416
                    }
417
                    i++;
2✔
418
                }
2✔
419
            }
2✔
420
        }
2✔
421

422
        return shortestSplitter;
2✔
423
    }
424

425
    /**
426
     * Prepare a split operation on a block, by marking all the nodes and transitions in the subtree (and annotating
427
     * them with {@link SplitData} objects).
428
     *
429
     * @param node
430
     *         the block root to be split
431
     * @param splitter
432
     *         the splitter to use for splitting the block
433
     *
434
     * @return the discriminator to use for splitting
435
     */
436
    private ContextPair<I> prepareSplit(DTNode<I> node, Splitter<I> splitter) {
437
        ContextPair<I> discriminator = splitter.getNewDiscriminator();
2✔
438

439
        Deque<DTNode<I>> dfsStack = new ArrayDeque<>();
2✔
440

441
        DTNode<I> succSeparator = splitter.succSeparator;
2✔
442

443
        dfsStack.push(node);
2✔
444
        assert node.getSplitData() == null;
2✔
445

446
        while (!dfsStack.isEmpty()) {
2✔
447
            DTNode<I> curr = dfsStack.pop();
2✔
448
            assert curr.getSplitData() == null;
2✔
449

450
            curr.setSplitData(new SplitData<>(TransList::new));
2✔
451

452
            for (AbstractHypTrans<I> trans : curr.getIncoming()) {
2✔
453
                Boolean outcome = query(trans, discriminator);
2✔
454
                curr.getSplitData().getIncoming(outcome).add(trans);
2✔
455
                markAndPropagate(curr, outcome);
2✔
456
            }
2✔
457

458
            if (curr.isInner()) {
2✔
459
                for (DTNode<I> child : curr.getChildren()) {
2✔
460
                    dfsStack.push(child);
2✔
461
                }
2✔
462
            } else {
463
                HypLoc<I> loc = curr.getData();
2✔
464
                assert loc != null;
2✔
465

466
                // Try to deduct the outcome from the DT target of
467
                // the respective transition
468
                AbstractHypTrans<I> trans = getSplitterTrans(loc, splitter);
2✔
469
                Boolean outcome = succSeparator.subtreeLabel(trans.getTargetNode());
2✔
470
                assert outcome != null;
2✔
471
                curr.getSplitData().setStateLabel(outcome);
2✔
472
                markAndPropagate(curr, outcome);
2✔
473
            }
474

475
        }
2✔
476

477
        return discriminator;
2✔
478
    }
479

480
    /**
481
     * Extract a (reduced) subtree containing all nodes with the given label from the subtree given by its root.
482
     * "Reduced" here refers to the fact that the resulting subtree will contain no inner nodes with only one child. The
483
     * tree returned by this method (represented by its root) will have as a parent node the root that was passed to
484
     * this method.
485
     *
486
     * @param root
487
     *         the root of the subtree from which to extract
488
     * @param label
489
     *         the label of the nodes to extract
490
     *
491
     * @return the extracted subtree
492
     */
493
    private DTNode<I> extractSubtree(DTNode<I> root, Boolean label) {
494
        assert root.getSplitData() != null;
2✔
495
        assert root.getSplitData().isMarked(label);
2✔
496

497
        Deque<ExtractRecord<I>> stack = new ArrayDeque<>();
2✔
498

499
        DTNode<I> firstExtracted = new DTNode<>(root, label);
2✔
500

501
        stack.push(new ExtractRecord<>(root, firstExtracted));
2✔
502
        while (!stack.isEmpty()) {
2✔
503
            ExtractRecord<I> curr = stack.pop();
2✔
504

505
            DTNode<I> original = curr.original;
2✔
506
            DTNode<I> extracted = curr.extracted;
2✔
507

508
            moveIncoming(extracted, original, label);
2✔
509

510
            if (original.isLeaf()) {
2✔
511
                if (Objects.equals(original.getSplitData().getStateLabel(), label)) {
2✔
512
                    link(extracted, original.getData());
2✔
513
                } else {
514
                    createNewState(extracted);
×
515
                }
516
                extracted.updateIncoming();
2✔
517
            } else {
518
                List<DTNode<I>> markedChildren = new ArrayList<>();
2✔
519

520
                for (DTNode<I> child : original.getChildren()) {
2✔
521
                    if (child.getSplitData().isMarked(label)) {
2✔
522
                        markedChildren.add(child);
2✔
523
                    }
524
                }
2✔
525

526
                if (markedChildren.size() > 1) {
2✔
527
                    Map<Boolean, DTNode<I>> childMap = new HashMap<>();
×
528
                    for (DTNode<I> c : markedChildren) {
×
529
                        Boolean childLabel = c.getParentOutcome();
×
530
                        DTNode<I> extractedChild = new DTNode<>(extracted, childLabel);
×
531
                        childMap.put(childLabel, extractedChild);
×
532
                        stack.push(new ExtractRecord<>(c, extractedChild));
×
533
                    }
×
534
                    extracted.split(original.getDiscriminator(), childMap);
×
535
                    extracted.updateIncoming();
×
536
                    extracted.setTemp(true);
×
537
                } else if (markedChildren.size() == 1) {
2✔
538
                    stack.push(new ExtractRecord<>(markedChildren.get(0), extracted));
2✔
539
                } else { // markedChildren.isEmppty()
540
                    createNewState(extracted);
×
541
                    extracted.updateIncoming();
×
542
                }
543
            }
544

545
            assert extracted.getSplitData() == null;
2✔
546
        }
2✔
547

548
        return firstExtracted;
2✔
549
    }
550

551
    protected void declareFinal(DTNode<I> blockRoot) {
552
        blockRoot.setTemp(false);
2✔
553
        blockRoot.setSplitData(null);
2✔
554

555
        blockRoot.removeFromBlockList();
2✔
556

557
        for (DTNode<I> subtree : blockRoot.getChildren()) {
2✔
558
            assert subtree.getSplitData() == null;
2✔
559
            //blockRoot.setChild(subtree.getParentLabel(), subtree);
560
            // Register as blocks, if they are non-trivial subtrees
561
            if (subtree.isInner()) {
2✔
562
                blockList.add(subtree);
×
563
            }
564
        }
2✔
565

566
        openTransitions.addAll(blockRoot.getIncoming());
2✔
567
    }
2✔
568

569
    /**
570
     * Marks a node, and propagates the label up to all nodes on the path from the block root to this node.
571
     *
572
     * @param node
573
     *         the node to mark
574
     * @param label
575
     *         the label to mark the node with
576
     */
577
    private static <I> void markAndPropagate(DTNode<I> node, Boolean label) {
578
        DTNode<I> curr = node;
2✔
579

580
        while (curr != null && curr.getSplitData() != null) {
2✔
581
            if (!curr.getSplitData().mark(label)) {
2✔
582
                return;
2✔
583
            }
584
            curr = curr.getParent();
2✔
585
        }
586
    }
2✔
587

588
    public AbstractHypTrans<I> getSplitterTrans(HypLoc<I> loc, Splitter<I> splitter) {
589
        switch (splitter.type) {
2✔
590
            case INTERNAL:
591
                return hypothesis.getInternalTransition(loc, splitter.symbol);
2✔
592
            case RETURN:
593
                return hypothesis.getReturnTransition(loc, splitter.symbol, splitter.location, splitter.otherSymbol);
×
594
            case CALL:
595
                return hypothesis.getReturnTransition(splitter.location, splitter.otherSymbol, loc, splitter.symbol);
×
596
            default:
597
                throw new IllegalStateException("Unhandled type " + splitter.type);
×
598
        }
599
    }
600

601
    private static <I> void moveIncoming(DTNode<I> newNode, DTNode<I> oldNode, Boolean label) {
602
        newNode.getIncoming().addAll(oldNode.getSplitData().getIncoming(label));
2✔
603
    }
2✔
604

605
    /**
606
     * Create a new state during extraction on-the-fly. This is required if a node in the DT has an incoming transition
607
     * with a certain label, but in its subtree there are no leaves with this label as their state label.
608
     *
609
     * @param newNode
610
     *         the extracted node
611
     */
612
    private void createNewState(DTNode<I> newNode) {
613
        LOGGER.debug("Create new state");
×
614
        AbstractHypTrans<I> newTreeTrans = newNode.getIncoming().chooseMinimal();
×
615
        assert newTreeTrans != null;
×
616

617
        HypLoc<I> newLoc = makeTree(newTreeTrans);
×
618
        link(newNode, newLoc);
×
619
        initializeLocation(newLoc);
×
620
    }
×
621

622
    protected void determinize(State<HypLoc<I>> state, Word<I> suffix) {
623
        State<HypLoc<I>> curr = state;
×
624
        for (I sym : suffix) {
×
625
            if (!alphabet.isCallSymbol(sym)) {
×
626
                AbstractHypTrans<I> trans = hypothesis.getInternalTransition(curr, sym);
×
627
                assert trans != null;
×
628
                if (!trans.isTree() && !trans.getNonTreeTarget().isLeaf()) {
×
629
                    updateDTTargets(Collections.singletonList(trans), true);
×
630
                }
631
            }
632
            curr = hypothesis.getSuccessor(curr, sym);
×
633
        }
×
634
    }
×
635

636
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc