• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

Alan-Jowett / ebpf-verifier / 21974328264

13 Feb 2026 04:10AM UTC coverage: 87.318% (+0.5%) from 86.783%
21974328264

Pull #161

github

web-flow
Merge bec52d039 into f1f1d42d7
Pull Request #161: Failure slice

531 of 680 new or added lines in 8 files covered. (78.09%)

1 existing line in 1 file now uncovered.

10025 of 11481 relevant lines covered (87.32%)

2944655.11 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

77.17
/src/result.cpp
1
// Copyright (c) Prevail Verifier contributors.
2
// SPDX-License-Identifier: MIT
3
#include <algorithm>
4
#include <map>
5
#include <ranges>
6
#include <regex>
7
#include <sstream>
8

9
#include "crab/ebpf_domain.hpp"
10
#include "ir/program.hpp"
11
#include "result.hpp"
12
#include "spec/ebpf_base.h"
13
#include "spec/vm_isa.hpp"
14

15
namespace prevail {
16

17
// Stream-local storage index for invariant filter
18
static int invariant_filter_index() {
24✔
19
    static const int index = std::ios_base::xalloc();
24✔
20
    return index;
24✔
21
}
22

23
const RelevantState* get_invariant_filter(std::ostream& os) {
12✔
24
    return static_cast<const RelevantState*>(os.pword(invariant_filter_index()));
12✔
25
}
26

27
std::ostream& operator<<(std::ostream& os, const invariant_filter& filter) {
12✔
28
    os.pword(invariant_filter_index()) = const_cast<void*>(static_cast<const void*>(filter.state));
12✔
29
    return os;
12✔
30
}
31

32
bool RelevantState::is_relevant_constraint(const std::string& constraint) const {
86✔
33
    // Extract the register or stack reference from the constraint.
34
    // Constraints look like: "r1.type=number", "r2.svalue=[0, 100]", "s[4088...4095].type=ctx"
35
    // Relational constraints: "r1.svalue-r8.svalue<=-100", "r0.svalue=r3.svalue+2"
36
    // Also handle: "packet_size=54", "meta_offset=[-4098, 0]"
37

38
    // Track whether we matched any known pattern.  When at least one pattern
39
    // fires but nothing is relevant we return false; the conservative
40
    // `return true` at the end only applies to truly unparseable constraints.
41
    bool parsed_any_pattern = false;
86✔
42

43
    // Check for ANY register mentioned in the constraint (not just at start).
44
    // This handles relational constraints like "r1.svalue-r8.svalue<=100".
45
    static const std::regex reg_pattern(R"(r(\d+)\.)");
86✔
46
    std::sregex_iterator it(constraint.begin(), constraint.end(), reg_pattern);
86✔
47
    std::sregex_iterator end;
86✔
48

49
    while (it != end) {
140✔
50
        parsed_any_pattern = true;
64✔
51
        const uint8_t reg_num = static_cast<uint8_t>(std::stoi((*it)[1].str()));
96✔
52
        if (registers.contains(Reg{reg_num})) {
64✔
53
            return true; // At least one relevant register is mentioned
5✔
54
        }
55
        ++it;
54✔
56
    }
57

58
    // Precompute absolute stack offsets once to avoid repeated conversion in loops
59
    std::vector<int64_t> abs_stack_offsets;
76✔
60
    abs_stack_offsets.reserve(stack_offsets.size());
76✔
61
    for (const auto& rel_offset : stack_offsets) {
76✔
NEW
62
        abs_stack_offsets.push_back(EBPF_TOTAL_STACK_SIZE + rel_offset);
×
63
    }
64

65
    // Check for stack range pattern: s[start...end]
66
    static const std::regex stack_range_pattern(R"(s\[(\d+)\.\.\.(\d+)\])");
76✔
67
    std::sregex_iterator stack_it(constraint.begin(), constraint.end(), stack_range_pattern);
76✔
68
    while (stack_it != end) {
88✔
69
        parsed_any_pattern = true;
12✔
70
        const int64_t abs_start = std::stoll((*stack_it)[1].str());
18✔
71
        const int64_t abs_end = std::stoll((*stack_it)[2].str());
18✔
72
        for (const auto& abs_offset : abs_stack_offsets) {
12✔
NEW
73
            if (abs_offset >= abs_start && abs_offset <= abs_end) {
×
74
                return true; // Overlaps within the constraint range
38✔
75
            }
76
        }
77
        ++stack_it;
12✔
78
    }
79

80
    // Check for single-offset stack pattern: s[offset]
81
    static const std::regex stack_single_pattern(R"(s\[(\d+)\]\.)");
76✔
82
    std::sregex_iterator single_it(constraint.begin(), constraint.end(), stack_single_pattern);
76✔
83
    while (single_it != end) {
76✔
NEW
84
        parsed_any_pattern = true;
×
NEW
85
        const int64_t abs_pos = std::stoll((*single_it)[1].str());
×
NEW
86
        for (const auto& abs_offset : abs_stack_offsets) {
×
NEW
87
            if (abs_offset == abs_pos) {
×
88
                return true;
6✔
89
            }
90
        }
NEW
91
        ++single_it;
×
92
    }
93

94
    // Global constraints (packet_size, meta_offset) - always show for context
95
    if (constraint.starts_with("packet_size") || constraint.starts_with("meta_offset")) {
111✔
96
        return true;
12✔
97
    }
98

99
    // A known pattern was parsed but nothing matched — the constraint is irrelevant.
100
    if (parsed_any_pattern) {
64✔
101
        return false;
32✔
102
    }
103

104
    // If we can't parse it at all, show it (conservative)
105
    return true;
106
}
162✔
107

UNCOV
108
bool AnalysisResult::is_valid_after(const Label& label, const StringInvariant& state) const {
×
109
    const EbpfDomain abstract_state =
110
        EbpfDomain::from_constraints(state.value(), thread_local_options.setup_constraints);
×
111
    return abstract_state <= invariants.at(label).post;
×
112
}
×
113

114
ObservationCheckResult AnalysisResult::check_observation_at_label(const Label& label, const InvariantPoint point,
12✔
115
                                                                  const StringInvariant& observation,
116
                                                                  const ObservationCheckMode mode) const {
117
    const auto it = invariants.find(label);
12✔
118
    if (it == invariants.end()) {
12✔
119
        return {.ok = false, .message = "No invariant available for label " + to_string(label)};
×
120
    }
121
    const auto& inv_pair = it->second;
12✔
122
    const EbpfDomain& abstract_state = (point == InvariantPoint::pre) ? inv_pair.pre : inv_pair.post;
12✔
123

124
    const EbpfDomain observed_state =
6✔
125
        observation.is_bottom()
12✔
126
            ? EbpfDomain::bottom()
12✔
127
            : EbpfDomain::from_constraints(observation.value(), thread_local_options.setup_constraints);
12✔
128

129
    if (observed_state.is_bottom()) {
12✔
130
        return {.ok = false, .message = "Observation constraints are unsatisfiable (domain is bottom)"};
2✔
131
    }
132

133
    if (abstract_state.is_bottom()) {
10✔
134
        return {.ok = false, .message = "Invariant at label is bottom (unreachable)"};
×
135
    }
136

137
    switch (mode) {
10✔
138
    case ObservationCheckMode::entailed:
4✔
139
        if (observed_state <= abstract_state) {
4✔
140
            return {.ok = true, .message = ""};
2✔
141
        }
142
        return {.ok = false, .message = "Invariant does not entail the observation (C ⊑ A is false)"};
2✔
143

144
    case ObservationCheckMode::consistent:
6✔
145
        // Default: consistency / satisfiability.
146
        if ((abstract_state & observed_state).is_bottom()) {
9✔
147
            return {.ok = false, .message = "Observation contradicts invariant (meet is bottom)"};
×
148
        }
149
        return {.ok = true, .message = ""};
6✔
150
    }
151

152
    return {.ok = false, .message = "Unsupported observation check mode"};
×
153
}
30✔
154

155
bool AnalysisResult::is_consistent_before(const Label& label, const StringInvariant& observation) const {
×
156
    return check_observation_at_label(label, InvariantPoint::pre, observation, ObservationCheckMode::consistent).ok;
×
157
}
158

159
bool AnalysisResult::is_consistent_after(const Label& label, const StringInvariant& observation) const {
×
160
    return check_observation_at_label(label, InvariantPoint::post, observation, ObservationCheckMode::consistent).ok;
×
161
}
162

163
StringInvariant AnalysisResult::invariant_at(const Label& label) const { return invariants.at(label).post.to_set(); }
1,400✔
164

165
std::optional<VerificationError> AnalysisResult::find_first_error() const {
1,400✔
166
    for (const auto& inv_pair : invariants | std::views::values) {
5,952✔
167
        if (inv_pair.pre.is_bottom()) {
4,866✔
168
            continue;
330✔
169
        }
170
        if (inv_pair.error) {
4,536✔
171
            return inv_pair.error;
314✔
172
        }
173
    }
174
    return {};
1,086✔
175
}
176

177
std::map<Label, std::vector<std::string>> AnalysisResult::find_unreachable(const Program& prog) const {
1,400✔
178
    std::map<Label, std::vector<std::string>> unreachable;
1,400✔
179
    for (const auto& [label, inv_pair] : invariants) {
6,828✔
180
        if (inv_pair.pre.is_bottom()) {
5,428✔
181
            continue;
810✔
182
        }
183
        if (const auto passume = std::get_if<Assume>(&prog.instruction_at(label))) {
7,332✔
184
            if (inv_pair.post.is_bottom() && !inv_pair.error) {
556✔
185
                const auto s = to_string(*passume);
182✔
186
                unreachable[label].emplace_back(to_string(label) + ": Code becomes unreachable (" + s + ")");
455✔
187
            }
182✔
188
        }
189
    }
190
    return unreachable;
1,400✔
191
}
×
192

193
/// Extract the registers and stack offsets read/written by an instruction.
194
/// Used to populate InstructionDeps during forward analysis.
195
InstructionDeps extract_instruction_deps(const Instruction& ins, const EbpfDomain& pre_state) {
242✔
196
    InstructionDeps deps;
242✔
197

198
    std::visit(
242✔
199
        [&](const auto& v) {
462✔
200
            using T = std::decay_t<decltype(v)>;
201

202
            if constexpr (std::is_same_v<T, Bin>) {
203
                // dst = dst op src (or dst = src for MOV/MOVSX)
204
                deps.regs_written.insert(v.dst);
76✔
205
                if (v.op != Bin::Op::MOV && v.op != Bin::Op::MOVSX8 && v.op != Bin::Op::MOVSX16 &&
76✔
206
                    v.op != Bin::Op::MOVSX32) {
14✔
207
                    // Non-move ops also read dst
208
                    deps.regs_read.insert(v.dst);
28✔
209
                }
210
                if (const auto* reg = std::get_if<Reg>(&v.v)) {
76✔
211
                    deps.regs_read.insert(*reg);
28✔
212
                }
213
            } else if constexpr (std::is_same_v<T, Un>) {
214
                // dst = op(dst)
NEW
215
                deps.regs_read.insert(v.dst);
×
NEW
216
                deps.regs_written.insert(v.dst);
×
217
            } else if constexpr (std::is_same_v<T, LoadMapFd> || std::is_same_v<T, LoadMapAddress>) {
218
                deps.regs_written.insert(v.dst);
8✔
219
            } else if constexpr (std::is_same_v<T, Mem>) {
220
                deps.regs_read.insert(v.access.basereg);
88✔
221
                if (v.is_load) {
88✔
222
                    // Load: value = *(basereg + offset)
223
                    if (const auto* dst_reg = std::get_if<Reg>(&v.value)) {
10✔
224
                        deps.regs_written.insert(*dst_reg);
10✔
225
                    }
226
                    // Track stack read if base is a stack pointer (R10 or derived)
227
                    if (v.access.basereg.v == R10_STACK_POINTER) {
10✔
228
                        deps.stack_read.insert(v.access.offset);
2✔
229
                    } else if (const auto stack_off = pre_state.get_stack_offset(v.access.basereg)) {
8✔
230
                        // basereg is a stack pointer with known offset from R10
231
                        // Compute the actual stack slot: stack_off is absolute (e.g., 4096),
232
                        // convert to relative from R10: relative = offset + (stack_off - EBPF_TOTAL_STACK_SIZE)
NEW
233
                        deps.stack_read.insert(v.access.offset + (*stack_off - EBPF_TOTAL_STACK_SIZE));
×
234
                    }
235
                } else {
236
                    // Store: *(basereg + offset) = value
237
                    if (const auto* src_reg = std::get_if<Reg>(&v.value)) {
78✔
238
                        deps.regs_read.insert(*src_reg);
78✔
239
                    }
240
                    // Track stack write if base is a stack pointer (R10 or derived)
241
                    if (v.access.basereg.v == R10_STACK_POINTER) {
78✔
242
                        deps.stack_written.insert(v.access.offset);
76✔
243
                    } else if (const auto stack_off = pre_state.get_stack_offset(v.access.basereg)) {
2✔
NEW
244
                        deps.stack_written.insert(v.access.offset + (*stack_off - EBPF_TOTAL_STACK_SIZE));
×
245
                    }
246
                }
247
            } else if constexpr (std::is_same_v<T, Atomic>) {
NEW
248
                deps.regs_read.insert(v.access.basereg);
×
NEW
249
                deps.regs_read.insert(v.valreg);
×
NEW
250
                if (v.fetch) {
×
NEW
251
                    deps.regs_written.insert(v.valreg);
×
252
                }
253
                // Track stack read/write if base is a stack pointer (R10 or derived)
NEW
254
                if (v.access.basereg.v == R10_STACK_POINTER) {
×
NEW
255
                    deps.stack_read.insert(v.access.offset);
×
NEW
256
                    deps.stack_written.insert(v.access.offset);
×
NEW
257
                } else if (const auto stack_off = pre_state.get_stack_offset(v.access.basereg)) {
×
NEW
258
                    const auto adjusted_off = v.access.offset + (*stack_off - EBPF_TOTAL_STACK_SIZE);
×
NEW
259
                    deps.stack_read.insert(adjusted_off);
×
NEW
260
                    deps.stack_written.insert(adjusted_off);
×
261
                }
262
            } else if constexpr (std::is_same_v<T, Call>) {
263
                // Calls read R1-R5 as arguments, write R0 as return value,
264
                // and clobber R1-R5 (caller-saved registers are killed).
265
                // Separate clobbers from writes so backward slicing can:
266
                // - Stop propagation for post-call uses of R1-R5 (they're killed)
267
                // - Still trace R1-R5 as read-deps when R0 is relevant-after (args affect return)
268
                for (uint8_t i = 1; i <= 5; ++i) {
60✔
269
                    deps.regs_read.insert(Reg{i});      // Arguments feed into return value
50✔
270
                    deps.regs_clobbered.insert(Reg{i}); // Killed after call
50✔
271
                }
272
                deps.regs_written.insert(Reg{0}); // Return value
10✔
273
            } else if constexpr (std::is_same_v<T, CallLocal>) {
274
                // Local (macro-inlined) calls:
275
                // - read R1-R5 as arguments
276
                // - write R0 as return value
277
                // - clobber R1-R5 (caller-saved)
278
                // - adjust frame pointer R10 (save/restore callee-saved regs)
NEW
279
                for (uint8_t i = 1; i <= 5; ++i) {
×
NEW
280
                    deps.regs_read.insert(Reg{i});      // Arguments
×
NEW
281
                    deps.regs_clobbered.insert(Reg{i}); // Killed after call
×
282
                }
NEW
283
                deps.regs_written.insert(Reg{0});  // Return value
×
NEW
284
                deps.regs_read.insert(Reg{10});    // Frame pointer read
×
NEW
285
                deps.regs_written.insert(Reg{10}); // Frame pointer adjusted
×
286
            } else if constexpr (std::is_same_v<T, Callx>) {
287
                // Indirect call: reads the function pointer and R1-R5,
288
                // writes R0, and clobbers R1-R5.
NEW
289
                deps.regs_read.insert(v.func);
×
NEW
290
                for (uint8_t i = 1; i <= 5; ++i) {
×
NEW
291
                    deps.regs_read.insert(Reg{i});      // Arguments
×
NEW
292
                    deps.regs_clobbered.insert(Reg{i}); // Killed after call
×
293
                }
NEW
294
                deps.regs_written.insert(Reg{0});
×
295
            } else if constexpr (std::is_same_v<T, Exit>) {
296
                deps.regs_read.insert(Reg{0}); // Return value
12✔
297
            } else if constexpr (std::is_same_v<T, Jmp>) {
298
                if (v.cond) {
8✔
299
                    deps.regs_read.insert(v.cond->left);
8✔
300
                    if (const auto* reg = std::get_if<Reg>(&v.cond->right)) {
8✔
301
                        deps.regs_read.insert(*reg);
2✔
302
                    }
303
                }
304
            } else if constexpr (std::is_same_v<T, Assume>) {
305
                deps.regs_read.insert(v.cond.left);
16✔
306
                if (const auto* reg = std::get_if<Reg>(&v.cond.right)) {
16✔
307
                    deps.regs_read.insert(*reg);
4✔
308
                }
309
            } else if constexpr (std::is_same_v<T, Packet>) {
NEW
310
                if (v.regoffset) {
×
NEW
311
                    deps.regs_read.insert(*v.regoffset);
×
312
                }
NEW
313
                deps.regs_written.insert(Reg{0}); // Result in R0
×
314
                // Packet clobbers caller-saved registers R1-R5 without reading them
NEW
315
                for (uint8_t i = 1; i <= 5; ++i) {
×
NEW
316
                    deps.regs_clobbered.insert(Reg{i});
×
317
                }
318
            }
319
            // Undefined and IncrementLoopCounter have no register deps
320
        },
220✔
321
        ins);
322

323
    return deps;
242✔
NEW
324
}
×
325

326
/// Extract the registers referenced by an assertion.
327
/// Used to seed backward slicing from the point of failure.
328
std::set<Reg> extract_assertion_registers(const Assertion& assertion) {
20✔
329
    return std::visit(
10✔
330
        [](const auto& a) -> std::set<Reg> {
40✔
331
            using T = std::decay_t<decltype(a)>;
332

333
            if constexpr (std::is_same_v<T, Comparable>) {
334
                return {a.r1, a.r2};
4✔
335
            } else if constexpr (std::is_same_v<T, Addable>) {
NEW
336
                return {a.ptr, a.num};
×
337
            } else if constexpr (std::is_same_v<T, ValidDivisor>) {
338
                return {a.reg};
4✔
339
            } else if constexpr (std::is_same_v<T, ValidAccess>) {
340
                std::set<Reg> regs{a.reg};
9✔
341
                if (const auto* r = std::get_if<Reg>(&a.width)) {
6✔
NEW
342
                    regs.insert(*r);
×
343
                }
344
                return regs;
6✔
345
            } else if constexpr (std::is_same_v<T, ValidStore>) {
6✔
NEW
346
                return {a.mem, a.val};
×
347
            } else if constexpr (std::is_same_v<T, ValidSize>) {
NEW
348
                return {a.reg};
×
349
            } else if constexpr (std::is_same_v<T, ValidMapKeyValue>) {
350
                return {a.access_reg, a.map_fd_reg};
4✔
351
            } else if constexpr (std::is_same_v<T, ValidCall>) {
352
                // ValidCall checks function validity, no direct register deps
NEW
353
                return {};
×
354
            } else if constexpr (std::is_same_v<T, TypeConstraint>) {
355
                return {a.reg};
12✔
356
            } else if constexpr (std::is_same_v<T, FuncConstraint>) {
NEW
357
                return {a.reg};
×
358
            } else if constexpr (std::is_same_v<T, ZeroCtxOffset>) {
NEW
359
                return {a.reg};
×
360
            } else if constexpr (std::is_same_v<T, BoundedLoopCount>) {
361
                // Loop counter, not a register
362
                return {};
2✔
363
            }
364
            return {};
365
        },
366
        assertion);
25✔
367
}
368

369
std::vector<FailureSlice> AnalysisResult::compute_failure_slices(const Program& prog, const SliceParams params) const {
12✔
370
    const auto max_steps = params.max_steps;
12✔
371
    const auto max_slices = params.max_slices;
12✔
372
    std::vector<FailureSlice> slices;
12✔
373

374
    // Find all labels with errors
375
    for (const auto& [label, inv_pair] : invariants) {
246✔
376
        if (inv_pair.pre.is_bottom()) {
234✔
377
            continue; // Unreachable
224✔
378
        }
379
        if (!inv_pair.error) {
184✔
380
            continue; // No error here
174✔
381
        }
382

383
        // Check if we've reached the max slices limit
384
        if (max_slices > 0 && slices.size() >= max_slices) {
10✔
385
            break;
386
        }
387

388
        FailureSlice slice{
5✔
389
            .failing_label = label,
390
            .error = *inv_pair.error,
391
            .relevance = {},
392
        };
10✔
393

394
        // Seed relevant registers from the actual failing assertion.
395
        // Forward analysis stops at the first failing assertion, which may not be
396
        // assertions[0]. Replay the checks against the pre-state to identify
397
        // the actual failing assertion and seed relevance from it.
398
        RelevantState initial_relevance;
10✔
399
        const auto& assertions = prog.assertions_at(label);
10✔
400
        bool found_failing = false;
10✔
401
        for (const auto& assertion : assertions) {
24✔
402
            if (ebpf_domain_check(inv_pair.pre, assertion, label)) {
24✔
403
                for (const auto& reg : extract_assertion_registers(assertion)) {
22✔
404
                    initial_relevance.registers.insert(reg);
12✔
405
                }
5✔
406
                found_failing = true;
10✔
407
                break;
10✔
408
            }
409
        }
410
        // Fallback: if no failing assertion was identified (shouldn't happen),
411
        // or if the failing assertion has no register deps, aggregate all assertions.
412
        if (!found_failing || initial_relevance.registers.empty()) {
10✔
NEW
413
            for (const auto& assertion : assertions) {
×
NEW
414
                for (const auto& reg : extract_assertion_registers(assertion)) {
×
NEW
415
                    initial_relevance.registers.insert(reg);
×
NEW
416
                }
×
417
            }
418
        }
419

420
        // Always include the failing label in the slice, even if no registers were extracted
421
        // (e.g., ValidCall, BoundedLoopCount assertions have no register deps)
422

423
        // `visited` tracks all explored labels for deduplication during backward traversal.
424
        // `slice_labels` tracks only labels that interact with relevant registers (the output slice).
425
        std::map<Label, RelevantState> visited;
10✔
426
        std::set<Label> conservative_visited; // Dedup for empty-relevance labels in conservative mode
10✔
427
        std::map<Label, RelevantState> slice_labels;
10✔
428

429
        // Worklist: (label, relevant_state_after_this_label)
430
        std::vector<std::pair<Label, RelevantState>> worklist;
10✔
431
        worklist.emplace_back(label, initial_relevance);
10✔
432

433
        // When the seed has no register/stack deps (e.g., BoundedLoopCount),
434
        // perform a conservative backward walk so the slice still shows the
435
        // loop structure and control flow leading to the failure.
436
        const bool conservative_mode = initial_relevance.registers.empty() && initial_relevance.stack_offsets.empty();
10✔
437

438
        size_t steps = 0;
10✔
439

440
        // Hoist the parent lookup for the failing label outside the hot loop;
441
        // it is invariant and parents_of() may return a temporary.
442
        const auto parents_of_fail = prog.cfg().parents_of(label);
10✔
443

444
        while (!worklist.empty() && steps < max_steps) {
108✔
445
            auto [current_label, relevant_after] = worklist.back();
98✔
446
            worklist.pop_back();
98✔
447

448
            // Skip if nothing is relevant — unless we're in conservative mode
449
            // (empty-seed assertions like BoundedLoopCount) or this is the failing label.
450
            if (!conservative_mode && current_label != label && relevant_after.registers.empty() &&
100✔
451
                relevant_after.stack_offsets.empty()) {
4✔
452
                continue;
4✔
453
            }
454

455
            // Merge with existing relevance at this label (for deduplication)
456
            auto& existing = visited[current_label];
94✔
457
            const size_t prev_size = existing.registers.size() + existing.stack_offsets.size();
94✔
458
            existing.registers.insert(relevant_after.registers.begin(), relevant_after.registers.end());
94✔
459
            existing.stack_offsets.insert(relevant_after.stack_offsets.begin(), relevant_after.stack_offsets.end());
94✔
460
            const size_t new_size = existing.registers.size() + existing.stack_offsets.size();
94✔
461

462
            // If no new relevance was added, skip (already processed with same or broader relevance).
463
            // In conservative mode with empty relevance, use a separate visited set for dedup.
464
            if (new_size == prev_size) {
94✔
465
                if (prev_size > 0) {
2✔
466
                    continue;
2✔
467
                }
468
                // Empty relevance (conservative mode): skip if we already visited this label
NEW
469
                if (!conservative_visited.insert(current_label).second) {
×
NEW
470
                    continue;
×
471
                }
472
            }
473

474
            // Compute what's relevant BEFORE this instruction using deps
475
            RelevantState relevant_before;
92✔
476
            const auto inv_it = invariants.find(current_label);
92✔
477
            if (inv_it != invariants.end() && inv_it->second.deps) {
92✔
478
                const auto& deps = *inv_it->second.deps;
92✔
479

480
                // Start with what's relevant after
481
                relevant_before = relevant_after;
92✔
482

483
                // Remove registers that are written by this instruction
484
                // (they weren't relevant before their definition)
485
                for (const auto& reg : deps.regs_written) {
158✔
486
                    relevant_before.registers.erase(reg);
66✔
487
                }
488

489
                // Remove registers that are clobbered (killed without reading).
490
                // These stop propagation for post-instruction uses but don't add read-deps.
491
                for (const auto& reg : deps.regs_clobbered) {
112✔
492
                    relevant_before.registers.erase(reg);
20✔
493
                }
494

495
                // Determine if this instruction contributes to the slice.
496
                // An instruction contributes when it writes to a relevant register/stack slot,
497
                // or when it is a control-flow decision (Jmp/Assume) that reads relevant registers.
498
                bool instruction_contributes = false;
92✔
499
                for (const auto& reg : deps.regs_written) {
112✔
500
                    if (relevant_after.registers.contains(reg)) {
66✔
501
                        instruction_contributes = true;
23✔
502
                        break;
23✔
503
                    }
504
                }
505
                for (const auto& offset : deps.stack_written) {
98✔
506
                    if (relevant_after.stack_offsets.contains(offset)) {
6✔
507
                        instruction_contributes = true;
508
                        break;
509
                    }
510
                }
511

512
                // Control-flow instructions (Jmp/Assume) that read relevant registers
513
                // contribute to the slice because they shape the path to the failure.
514
                if (!instruction_contributes) {
92✔
515
                    const auto& ins = prog.instruction_at(current_label);
46✔
516
                    if (std::holds_alternative<Jmp>(ins) || std::holds_alternative<Assume>(ins)) {
46✔
517
                        for (const auto& reg : deps.regs_read) {
30✔
518
                            if (relevant_after.registers.contains(reg)) {
20✔
519
                                instruction_contributes = true;
1✔
520
                                break;
1✔
521
                            }
522
                        }
523
                    }
524
                }
525

526
                // Immediate path guard: when the current label is a direct predecessor
527
                // of the failing label and is an Assume instruction, its condition
528
                // registers are causally relevant — they determine reachability.
529
                if (std::holds_alternative<Assume>(prog.instruction_at(current_label))) {
92✔
530
                    if (std::find(parents_of_fail.begin(), parents_of_fail.end(), current_label) !=
6✔
531
                        parents_of_fail.end()) {
9✔
532
                        instruction_contributes = true;
47✔
533
                    }
534
                }
535

536
                // At the failing label, the assertion depends on registers the instruction
537
                // reads (e.g., base pointer r3 in a store). Since stores write to memory
538
                // not registers, instruction_contributes would be false without this.
539
                if (current_label == label) {
92✔
540
                    instruction_contributes = true;
10✔
541
                }
542

543
                // In conservative mode (empty seed, e.g., BoundedLoopCount), include all
544
                // reachable labels so the slice shows the loop structure and control flow.
545
                if (conservative_mode) {
92✔
546
                    instruction_contributes = true;
547
                }
548

549
                if (instruction_contributes) {
92✔
550
                    for (const auto& reg : deps.regs_read) {
114✔
551
                        relevant_before.registers.insert(reg);
58✔
552
                    }
553
                    for (const auto& offset : deps.stack_read) {
56✔
NEW
554
                        relevant_before.stack_offsets.insert(offset);
×
555
                    }
556
                }
557

558
                // Remove stack locations that are written by this instruction,
559
                // but preserve offsets that are also read (read-modify-write, e.g., Atomic).
560
                // Done before storing to slice_labels for consistency with register handling
561
                // (written registers are removed before storage at lines 476-478).
562
                for (const auto& offset : deps.stack_written) {
98✔
563
                    if (!deps.stack_read.contains(offset)) {
6✔
564
                        relevant_before.stack_offsets.erase(offset);
6✔
565
                    }
566
                }
567

568
                if (instruction_contributes) {
92✔
569
                    // Only include contributing labels in the output slice.
570
                    // Store relevant_before so pre-invariant filtering shows the
571
                    // instruction's read-deps (the true upstream dependencies).
572
                    slice_labels[current_label] = relevant_before;
56✔
573
                }
574
            } else {
575
                // No deps available: conservatively treat this label as contributing
576
                // and propagate all current relevance to predecessors.
NEW
577
                relevant_before = relevant_after;
×
NEW
578
                slice_labels[current_label] = relevant_before;
×
579
            }
580

581
            // Add predecessors to worklist
582
            for (const auto& parent : prog.cfg().parents_of(current_label)) {
180✔
583
                worklist.emplace_back(parent, relevant_before);
88✔
584
            }
585

586
            ++steps;
92✔
587
        }
98✔
588

589
        // Expand join points: for any traversed label that is a join point
590
        // (≥2 predecessors) where at least one predecessor is already in the slice,
591
        // add the join-point label itself and all predecessors that have invariants.
592
        // This ensures the causal trace shows converging paths that cause precision loss.
593
        // Note: predecessors may not be in `visited` if the worklist budget was exhausted
594
        // before reaching them, so we check the invariant map directly.
595
        std::map<Label, RelevantState> join_expansion;
10✔
596
        for (const auto& [v_label, v_relevance] : visited) {
86✔
597
            const auto& parents = prog.cfg().parents_of(v_label);
76✔
598
            if (parents.size() < 2) {
76✔
599
                continue;
74✔
600
            }
601
            // Check that at least one predecessor is in the slice (this join is relevant)
602
            bool has_slice_parent = false;
2✔
603
            for (const auto& parent : parents) {
4✔
604
                if (slice_labels.contains(parent)) {
4✔
605
                    has_slice_parent = true;
1✔
606
                    break;
1✔
607
                }
608
            }
609
            if (!has_slice_parent) {
2✔
NEW
610
                continue;
×
611
            }
612
            // Include the join-point label itself so the printing code can display
613
            // per-predecessor state at this join.
614
            if (!slice_labels.contains(v_label)) {
2✔
NEW
615
                join_expansion[v_label] = v_relevance;
×
616
            }
617
            // Include all predecessors so the join display is complete.
618
            // Use visited relevance if available, otherwise use the join-point's relevance.
619
            for (const auto& parent : parents) {
6✔
620
                if (slice_labels.contains(parent) || join_expansion.contains(parent)) {
4✔
621
                    continue;
2✔
622
                }
623
                const auto& rel = visited.contains(parent) ? visited.at(parent) : v_relevance;
2✔
624
                join_expansion[parent] = rel;
2✔
625
            }
626
        }
627
        slice_labels.insert(join_expansion.begin(), join_expansion.end());
10✔
628

629
        // Build the slice from contributing labels only
630
        slice.relevance = std::move(slice_labels);
10✔
631
        slices.push_back(std::move(slice));
10✔
632
    }
10✔
633

634
    return slices;
12✔
NEW
635
}
×
636

637
} // namespace prevail
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc