• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

Alan-Jowett / ebpf-verifier / 21968098219

12 Feb 2026 11:19PM UTC coverage: 87.328% (+0.5%) from 86.783%
21968098219

Pull #161

github

web-flow
Merge fcfbad30c into f1f1d42d7
Pull Request #161: Failure slice

527 of 677 new or added lines in 8 files covered. (77.84%)

20 existing lines in 2 files now uncovered.

10020 of 11474 relevant lines covered (87.33%)

2947086.45 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

77.27
/src/result.cpp
1
// Copyright (c) Prevail Verifier contributors.
2
// SPDX-License-Identifier: MIT
3
#include <algorithm>
4
#include <map>
5
#include <ranges>
6
#include <regex>
7
#include <sstream>
8

9
#include "crab/ebpf_domain.hpp"
10
#include "ir/program.hpp"
11
#include "result.hpp"
12
#include "spec/ebpf_base.h"
13
#include "spec/vm_isa.hpp"
14

15
namespace prevail {
16

17
// Stream-local storage index for invariant filter
18
static int invariant_filter_index() {
24✔
19
    static const int index = std::ios_base::xalloc();
24✔
20
    return index;
24✔
21
}
22

23
const RelevantState* get_invariant_filter(std::ostream& os) {
12✔
24
    return static_cast<const RelevantState*>(os.pword(invariant_filter_index()));
12✔
25
}
26

27
std::ostream& operator<<(std::ostream& os, const invariant_filter& filter) {
12✔
28
    os.pword(invariant_filter_index()) = const_cast<void*>(static_cast<const void*>(filter.state));
12✔
29
    return os;
12✔
30
}
31

32
bool RelevantState::is_relevant_constraint(const std::string& constraint) const {
86✔
33
    // Extract the register or stack reference from the constraint
34
    // Constraints look like: "r1.type=number", "r2.svalue=[0, 100]", "s[4088...4095].type=ctx"
35
    // Relational constraints: "r1.svalue-r8.svalue<=-100", "r0.svalue=r3.svalue+2"
36
    // Also handle: "packet_size=54", "meta_offset=[-4098, 0]"
37

38
    // Check for ANY register mentioned in the constraint (not just at start)
39
    // This handles relational constraints like "r1.svalue-r8.svalue<=100"
40
    static const std::regex reg_pattern(R"(r(\d+)\.)");
86✔
41
    std::sregex_iterator it(constraint.begin(), constraint.end(), reg_pattern);
86✔
42
    std::sregex_iterator end;
86✔
43

44
    bool found_any_register = false;
86✔
45
    while (it != end) {
140✔
46
        found_any_register = true;
64✔
47
        const uint8_t reg_num = static_cast<uint8_t>(std::stoi((*it)[1].str()));
96✔
48
        if (registers.contains(Reg{reg_num})) {
64✔
49
            return true; // At least one relevant register is mentioned
5✔
50
        }
51
        ++it;
54✔
52
    }
53

54
    // If we found registers but none were relevant, skip this constraint
55
    if (found_any_register) {
76✔
56
        return false;
26✔
57
    }
58

59
    // Precompute absolute stack offsets once to avoid repeated conversion in loops
60
    std::vector<int64_t> abs_stack_offsets;
24✔
61
    abs_stack_offsets.reserve(stack_offsets.size());
24✔
62
    for (const auto& rel_offset : stack_offsets) {
24✔
NEW
63
        abs_stack_offsets.push_back(EBPF_TOTAL_STACK_SIZE + rel_offset);
×
64
    }
65

66
    // Check for stack range pattern: s[start...end]
67
    static const std::regex stack_range_pattern(R"(s\[(\d+)\.\.\.(\d+)\])");
24✔
68
    std::sregex_iterator stack_it(constraint.begin(), constraint.end(), stack_range_pattern);
24✔
69
    while (stack_it != end) {
36✔
70
        // Stack offsets in constraints are absolute (e.g., 4088...4095)
71
        // Our stack_offsets are relative to R10 (e.g., -8)
72
        const int64_t abs_start = std::stoll((*stack_it)[1].str());
18✔
73
        const int64_t abs_end = std::stoll((*stack_it)[2].str());
18✔
74
        for (const auto& abs_offset : abs_stack_offsets) {
12✔
NEW
75
            if (abs_offset >= abs_start && abs_offset <= abs_end) {
×
76
                return true; // Overlaps within the constraint range
12✔
77
            }
78
        }
79
        ++stack_it;
12✔
80
    }
81

82
    // Check for single-offset stack pattern: s[offset]
83
    static const std::regex stack_single_pattern(R"(s\[(\d+)\]\.)");
24✔
84
    std::sregex_iterator single_it(constraint.begin(), constraint.end(), stack_single_pattern);
24✔
85
    while (single_it != end) {
24✔
NEW
86
        const int64_t abs_pos = std::stoll((*single_it)[1].str());
×
NEW
87
        for (const auto& abs_offset : abs_stack_offsets) {
×
NEW
88
            if (abs_offset == abs_pos) {
×
89
                return true;
12✔
90
            }
91
        }
NEW
92
        ++single_it;
×
93
    }
94

95
    // Global constraints (packet_size, meta_offset) - always show for context
96
    if (constraint.starts_with("packet_size") || constraint.starts_with("meta_offset")) {
33✔
97
        return true;
9✔
98
    }
99

100
    // If we can't parse it, show it (conservative)
101
    return true;
6✔
102
}
110✔
103

NEW
104
bool AnalysisResult::is_valid_after(const Label& label, const StringInvariant& state) const {
×
105
    const EbpfDomain abstract_state =
NEW
106
        EbpfDomain::from_constraints(state.value(), thread_local_options.setup_constraints);
×
NEW
107
    return abstract_state <= invariants.at(label).post;
×
UNCOV
108
}
×
109

110
ObservationCheckResult AnalysisResult::check_observation_at_label(const Label& label, const InvariantPoint point,
12✔
111
                                                                  const StringInvariant& observation,
112
                                                                  const ObservationCheckMode mode) const {
113
    const auto it = invariants.find(label);
12✔
114
    if (it == invariants.end()) {
12✔
UNCOV
115
        return {.ok = false, .message = "No invariant available for label " + to_string(label)};
×
116
    }
117
    const auto& inv_pair = it->second;
12✔
118
    const EbpfDomain& abstract_state = (point == InvariantPoint::pre) ? inv_pair.pre : inv_pair.post;
12✔
119

120
    const EbpfDomain observed_state =
6✔
121
        observation.is_bottom()
12✔
122
            ? EbpfDomain::bottom()
12✔
123
            : EbpfDomain::from_constraints(observation.value(), thread_local_options.setup_constraints);
12✔
124

125
    if (observed_state.is_bottom()) {
12✔
126
        return {.ok = false, .message = "Observation constraints are unsatisfiable (domain is bottom)"};
2✔
127
    }
128

129
    if (abstract_state.is_bottom()) {
10✔
UNCOV
130
        return {.ok = false, .message = "Invariant at label is bottom (unreachable)"};
×
131
    }
132

133
    switch (mode) {
10✔
134
    case ObservationCheckMode::entailed:
4✔
135
        if (observed_state <= abstract_state) {
4✔
136
            return {.ok = true, .message = ""};
2✔
137
        }
138
        return {.ok = false, .message = "Invariant does not entail the observation (C ⊑ A is false)"};
2✔
139

140
    case ObservationCheckMode::consistent:
6✔
141
        // Default: consistency / satisfiability.
142
        if ((abstract_state & observed_state).is_bottom()) {
9✔
UNCOV
143
            return {.ok = false, .message = "Observation contradicts invariant (meet is bottom)"};
×
144
        }
145
        return {.ok = true, .message = ""};
6✔
146
    }
147

UNCOV
148
    return {.ok = false, .message = "Unsupported observation check mode"};
×
149
}
30✔
150

UNCOV
151
bool AnalysisResult::is_consistent_before(const Label& label, const StringInvariant& observation) const {
×
152
    return check_observation_at_label(label, InvariantPoint::pre, observation, ObservationCheckMode::consistent).ok;
×
153
}
154

155
bool AnalysisResult::is_consistent_after(const Label& label, const StringInvariant& observation) const {
×
156
    return check_observation_at_label(label, InvariantPoint::post, observation, ObservationCheckMode::consistent).ok;
×
157
}
158

159
StringInvariant AnalysisResult::invariant_at(const Label& label) const { return invariants.at(label).post.to_set(); }
1,400✔
160

161
std::optional<VerificationError> AnalysisResult::find_first_error() const {
1,400✔
162
    for (const auto& inv_pair : invariants | std::views::values) {
5,952✔
163
        if (inv_pair.pre.is_bottom()) {
4,866✔
164
            continue;
330✔
165
        }
166
        if (inv_pair.error) {
4,536✔
167
            return inv_pair.error;
314✔
168
        }
169
    }
170
    return {};
1,086✔
171
}
172

173
std::map<Label, std::vector<std::string>> AnalysisResult::find_unreachable(const Program& prog) const {
1,400✔
174
    std::map<Label, std::vector<std::string>> unreachable;
1,400✔
175
    for (const auto& [label, inv_pair] : invariants) {
6,828✔
176
        if (inv_pair.pre.is_bottom()) {
5,428✔
177
            continue;
810✔
178
        }
179
        if (const auto passume = std::get_if<Assume>(&prog.instruction_at(label))) {
7,332✔
180
            if (inv_pair.post.is_bottom() && !inv_pair.error) {
556✔
181
                const auto s = to_string(*passume);
182✔
182
                unreachable[label].emplace_back(to_string(label) + ": Code becomes unreachable (" + s + ")");
455✔
183
            }
182✔
184
        }
185
    }
186
    return unreachable;
1,400✔
UNCOV
187
}
×
188

189
/// Extract the registers and stack offsets read/written by an instruction.
190
/// Used to populate InstructionDeps during forward analysis.
191
InstructionDeps extract_instruction_deps(const Instruction& ins, const EbpfDomain& pre_state) {
242✔
192
    InstructionDeps deps;
242✔
193

194
    std::visit(
242✔
195
        [&](const auto& v) {
462✔
196
            using T = std::decay_t<decltype(v)>;
197

198
            if constexpr (std::is_same_v<T, Bin>) {
199
                // dst = dst op src (or dst = src for MOV/MOVSX)
200
                deps.regs_written.insert(v.dst);
76✔
201
                if (v.op != Bin::Op::MOV && v.op != Bin::Op::MOVSX8 && v.op != Bin::Op::MOVSX16 &&
76✔
202
                    v.op != Bin::Op::MOVSX32) {
14✔
203
                    // Non-move ops also read dst
204
                    deps.regs_read.insert(v.dst);
28✔
205
                }
206
                if (const auto* reg = std::get_if<Reg>(&v.v)) {
76✔
207
                    deps.regs_read.insert(*reg);
28✔
208
                }
209
            } else if constexpr (std::is_same_v<T, Un>) {
210
                // dst = op(dst)
NEW
211
                deps.regs_read.insert(v.dst);
×
NEW
212
                deps.regs_written.insert(v.dst);
×
213
            } else if constexpr (std::is_same_v<T, LoadMapFd> || std::is_same_v<T, LoadMapAddress>) {
214
                deps.regs_written.insert(v.dst);
8✔
215
            } else if constexpr (std::is_same_v<T, Mem>) {
216
                deps.regs_read.insert(v.access.basereg);
88✔
217
                if (v.is_load) {
88✔
218
                    // Load: value = *(basereg + offset)
219
                    if (const auto* dst_reg = std::get_if<Reg>(&v.value)) {
10✔
220
                        deps.regs_written.insert(*dst_reg);
10✔
221
                    }
222
                    // Track stack read if base is a stack pointer (R10 or derived)
223
                    if (v.access.basereg.v == R10_STACK_POINTER) {
10✔
224
                        deps.stack_read.insert(v.access.offset);
2✔
225
                    } else if (const auto stack_off = pre_state.get_stack_offset(v.access.basereg)) {
8✔
226
                        // basereg is a stack pointer with known offset from R10
227
                        // Compute the actual stack slot: stack_off is absolute (e.g., 4096),
228
                        // convert to relative from R10: relative = offset + (stack_off - EBPF_TOTAL_STACK_SIZE)
NEW
229
                        deps.stack_read.insert(v.access.offset + (*stack_off - EBPF_TOTAL_STACK_SIZE));
×
230
                    }
231
                } else {
232
                    // Store: *(basereg + offset) = value
233
                    if (const auto* src_reg = std::get_if<Reg>(&v.value)) {
78✔
234
                        deps.regs_read.insert(*src_reg);
78✔
235
                    }
236
                    // Track stack write if base is a stack pointer (R10 or derived)
237
                    if (v.access.basereg.v == R10_STACK_POINTER) {
78✔
238
                        deps.stack_written.insert(v.access.offset);
76✔
239
                    } else if (const auto stack_off = pre_state.get_stack_offset(v.access.basereg)) {
2✔
NEW
240
                        deps.stack_written.insert(v.access.offset + (*stack_off - EBPF_TOTAL_STACK_SIZE));
×
241
                    }
242
                }
243
            } else if constexpr (std::is_same_v<T, Atomic>) {
NEW
244
                deps.regs_read.insert(v.access.basereg);
×
NEW
245
                deps.regs_read.insert(v.valreg);
×
NEW
246
                if (v.fetch) {
×
NEW
247
                    deps.regs_written.insert(v.valreg);
×
248
                }
249
                // Track stack read/write if base is a stack pointer (R10 or derived)
NEW
250
                if (v.access.basereg.v == R10_STACK_POINTER) {
×
NEW
251
                    deps.stack_read.insert(v.access.offset);
×
NEW
252
                    deps.stack_written.insert(v.access.offset);
×
NEW
253
                } else if (const auto stack_off = pre_state.get_stack_offset(v.access.basereg)) {
×
NEW
254
                    const auto adjusted_off = v.access.offset + (*stack_off - EBPF_TOTAL_STACK_SIZE);
×
NEW
255
                    deps.stack_read.insert(adjusted_off);
×
NEW
256
                    deps.stack_written.insert(adjusted_off);
×
257
                }
258
            } else if constexpr (std::is_same_v<T, Call>) {
259
                // Calls read R1-R5 as arguments, write R0 as return value,
260
                // and clobber R1-R5 (caller-saved registers are killed).
261
                // Separate clobbers from writes so backward slicing can:
262
                // - Stop propagation for post-call uses of R1-R5 (they're killed)
263
                // - Still trace R1-R5 as read-deps when R0 is relevant-after (args affect return)
264
                for (uint8_t i = 1; i <= 5; ++i) {
60✔
265
                    deps.regs_read.insert(Reg{i});      // Arguments feed into return value
50✔
266
                    deps.regs_clobbered.insert(Reg{i}); // Killed after call
50✔
267
                }
268
                deps.regs_written.insert(Reg{0}); // Return value
10✔
269
            } else if constexpr (std::is_same_v<T, CallLocal>) {
270
                // Local (macro-inlined) calls:
271
                // - read R1-R5 as arguments
272
                // - write R0 as return value
273
                // - clobber R1-R5 (caller-saved)
274
                // - adjust frame pointer R10 (save/restore callee-saved regs)
NEW
275
                for (uint8_t i = 1; i <= 5; ++i) {
×
NEW
276
                    deps.regs_read.insert(Reg{i});      // Arguments
×
NEW
277
                    deps.regs_clobbered.insert(Reg{i}); // Killed after call
×
278
                }
NEW
279
                deps.regs_written.insert(Reg{0});  // Return value
×
NEW
280
                deps.regs_read.insert(Reg{10});    // Frame pointer read
×
NEW
281
                deps.regs_written.insert(Reg{10}); // Frame pointer adjusted
×
282
            } else if constexpr (std::is_same_v<T, Callx>) {
283
                // Indirect call: reads the function pointer and R1-R5,
284
                // writes R0, and clobbers R1-R5.
NEW
285
                deps.regs_read.insert(v.func);
×
NEW
286
                for (uint8_t i = 1; i <= 5; ++i) {
×
NEW
287
                    deps.regs_read.insert(Reg{i});      // Arguments
×
NEW
288
                    deps.regs_clobbered.insert(Reg{i}); // Killed after call
×
289
                }
NEW
290
                deps.regs_written.insert(Reg{0});
×
291
            } else if constexpr (std::is_same_v<T, Exit>) {
292
                deps.regs_read.insert(Reg{0}); // Return value
12✔
293
            } else if constexpr (std::is_same_v<T, Jmp>) {
294
                if (v.cond) {
8✔
295
                    deps.regs_read.insert(v.cond->left);
8✔
296
                    if (const auto* reg = std::get_if<Reg>(&v.cond->right)) {
8✔
297
                        deps.regs_read.insert(*reg);
2✔
298
                    }
299
                }
300
            } else if constexpr (std::is_same_v<T, Assume>) {
301
                deps.regs_read.insert(v.cond.left);
16✔
302
                if (const auto* reg = std::get_if<Reg>(&v.cond.right)) {
16✔
303
                    deps.regs_read.insert(*reg);
4✔
304
                }
305
            } else if constexpr (std::is_same_v<T, Packet>) {
NEW
306
                if (v.regoffset) {
×
NEW
307
                    deps.regs_read.insert(*v.regoffset);
×
308
                }
NEW
309
                deps.regs_written.insert(Reg{0}); // Result in R0
×
310
                // Packet clobbers caller-saved registers R1-R5 without reading them
NEW
311
                for (uint8_t i = 1; i <= 5; ++i) {
×
NEW
312
                    deps.regs_clobbered.insert(Reg{i});
×
313
                }
314
            }
315
            // Undefined and IncrementLoopCounter have no register deps
316
        },
220✔
317
        ins);
318

319
    return deps;
242✔
NEW
320
}
×
321

322
/// Extract the registers referenced by an assertion.
323
/// Used to seed backward slicing from the point of failure.
324
std::set<Reg> extract_assertion_registers(const Assertion& assertion) {
20✔
325
    return std::visit(
10✔
326
        [](const auto& a) -> std::set<Reg> {
40✔
327
            using T = std::decay_t<decltype(a)>;
328

329
            if constexpr (std::is_same_v<T, Comparable>) {
330
                return {a.r1, a.r2};
4✔
331
            } else if constexpr (std::is_same_v<T, Addable>) {
NEW
332
                return {a.ptr, a.num};
×
333
            } else if constexpr (std::is_same_v<T, ValidDivisor>) {
334
                return {a.reg};
4✔
335
            } else if constexpr (std::is_same_v<T, ValidAccess>) {
336
                std::set<Reg> regs{a.reg};
9✔
337
                if (const auto* r = std::get_if<Reg>(&a.width)) {
6✔
NEW
338
                    regs.insert(*r);
×
339
                }
340
                return regs;
6✔
341
            } else if constexpr (std::is_same_v<T, ValidStore>) {
6✔
NEW
342
                return {a.mem, a.val};
×
343
            } else if constexpr (std::is_same_v<T, ValidSize>) {
NEW
344
                return {a.reg};
×
345
            } else if constexpr (std::is_same_v<T, ValidMapKeyValue>) {
346
                return {a.access_reg, a.map_fd_reg};
4✔
347
            } else if constexpr (std::is_same_v<T, ValidCall>) {
348
                // ValidCall checks function validity, no direct register deps
NEW
349
                return {};
×
350
            } else if constexpr (std::is_same_v<T, TypeConstraint>) {
351
                return {a.reg};
12✔
352
            } else if constexpr (std::is_same_v<T, FuncConstraint>) {
NEW
353
                return {a.reg};
×
354
            } else if constexpr (std::is_same_v<T, ZeroCtxOffset>) {
NEW
355
                return {a.reg};
×
356
            } else if constexpr (std::is_same_v<T, BoundedLoopCount>) {
357
                // Loop counter, not a register
358
                return {};
2✔
359
            }
360
            return {};
361
        },
362
        assertion);
25✔
363
}
364

365
std::vector<FailureSlice> AnalysisResult::compute_failure_slices(const Program& prog, const size_t max_steps,
12✔
366
                                                                 const size_t max_slices) const {
367
    std::vector<FailureSlice> slices;
12✔
368

369
    // Find all labels with errors
370
    for (const auto& [label, inv_pair] : invariants) {
246✔
371
        if (inv_pair.pre.is_bottom()) {
234✔
372
            continue; // Unreachable
224✔
373
        }
374
        if (!inv_pair.error) {
184✔
375
            continue; // No error here
174✔
376
        }
377

378
        // Check if we've reached the max slices limit
379
        if (max_slices > 0 && slices.size() >= max_slices) {
10✔
380
            break;
381
        }
382

383
        FailureSlice slice{
5✔
384
            .failing_label = label,
385
            .error = *inv_pair.error,
386
            .relevance = {},
387
        };
10✔
388

389
        // Seed relevant registers from the actual failing assertion.
390
        // Forward analysis stops at the first failing assertion, which may not be
391
        // assertions[0]. Replay the checks against the pre-state to identify
392
        // the actual failing assertion and seed relevance from it.
393
        RelevantState initial_relevance;
10✔
394
        const auto& assertions = prog.assertions_at(label);
10✔
395
        bool found_failing = false;
10✔
396
        for (const auto& assertion : assertions) {
24✔
397
            if (ebpf_domain_check(inv_pair.pre, assertion, label)) {
24✔
398
                for (const auto& reg : extract_assertion_registers(assertion)) {
22✔
399
                    initial_relevance.registers.insert(reg);
12✔
400
                }
5✔
401
                found_failing = true;
10✔
402
                break;
10✔
403
            }
404
        }
405
        // Fallback: if no failing assertion was identified (shouldn't happen),
406
        // or if the failing assertion has no register deps, aggregate all assertions.
407
        if (!found_failing || initial_relevance.registers.empty()) {
10✔
NEW
408
            for (const auto& assertion : assertions) {
×
NEW
409
                for (const auto& reg : extract_assertion_registers(assertion)) {
×
NEW
410
                    initial_relevance.registers.insert(reg);
×
NEW
411
                }
×
412
            }
413
        }
414

415
        // Always include the failing label in the slice, even if no registers were extracted
416
        // (e.g., ValidCall, BoundedLoopCount assertions have no register deps)
417

418
        // `visited` tracks all explored labels for deduplication during backward traversal.
419
        // `slice_labels` tracks only labels that interact with relevant registers (the output slice).
420
        std::map<Label, RelevantState> visited;
10✔
421
        std::set<Label> conservative_visited; // Dedup for empty-relevance labels in conservative mode
10✔
422
        std::map<Label, RelevantState> slice_labels;
10✔
423

424
        // Worklist: (label, relevant_state_after_this_label)
425
        std::vector<std::pair<Label, RelevantState>> worklist;
10✔
426
        worklist.emplace_back(label, initial_relevance);
10✔
427

428
        // When the seed has no register/stack deps (e.g., BoundedLoopCount),
429
        // perform a conservative backward walk so the slice still shows the
430
        // loop structure and control flow leading to the failure.
431
        const bool conservative_mode = initial_relevance.registers.empty() && initial_relevance.stack_offsets.empty();
10✔
432

433
        size_t steps = 0;
10✔
434

435
        while (!worklist.empty() && steps < max_steps) {
108✔
436
            auto [current_label, relevant_after] = worklist.back();
98✔
437
            worklist.pop_back();
98✔
438

439
            // Skip if nothing is relevant — unless we're in conservative mode
440
            // (empty-seed assertions like BoundedLoopCount) or this is the failing label.
441
            if (!conservative_mode && current_label != label && relevant_after.registers.empty() &&
100✔
442
                relevant_after.stack_offsets.empty()) {
4✔
443
                continue;
4✔
444
            }
445

446
            // Merge with existing relevance at this label (for deduplication)
447
            auto& existing = visited[current_label];
94✔
448
            const size_t prev_size = existing.registers.size() + existing.stack_offsets.size();
94✔
449
            existing.registers.insert(relevant_after.registers.begin(), relevant_after.registers.end());
94✔
450
            existing.stack_offsets.insert(relevant_after.stack_offsets.begin(), relevant_after.stack_offsets.end());
94✔
451
            const size_t new_size = existing.registers.size() + existing.stack_offsets.size();
94✔
452

453
            // If no new relevance was added, skip (already processed with same or broader relevance).
454
            // In conservative mode with empty relevance, use a separate visited set for dedup.
455
            if (new_size == prev_size) {
94✔
456
                if (prev_size > 0) {
2✔
457
                    continue;
2✔
458
                }
459
                // Empty relevance (conservative mode): skip if we already visited this label
NEW
460
                if (!conservative_visited.insert(current_label).second) {
×
NEW
461
                    continue;
×
462
                }
463
            }
464

465
            // Compute what's relevant BEFORE this instruction using deps
466
            RelevantState relevant_before;
92✔
467
            const auto inv_it = invariants.find(current_label);
92✔
468
            if (inv_it != invariants.end() && inv_it->second.deps) {
92✔
469
                const auto& deps = *inv_it->second.deps;
92✔
470

471
                // Start with what's relevant after
472
                relevant_before = relevant_after;
92✔
473

474
                // Remove registers that are written by this instruction
475
                // (they weren't relevant before their definition)
476
                for (const auto& reg : deps.regs_written) {
158✔
477
                    relevant_before.registers.erase(reg);
66✔
478
                }
479

480
                // Remove registers that are clobbered (killed without reading).
481
                // These stop propagation for post-instruction uses but don't add read-deps.
482
                for (const auto& reg : deps.regs_clobbered) {
112✔
483
                    relevant_before.registers.erase(reg);
20✔
484
                }
485

486
                // Determine if this instruction contributes to the slice.
487
                // An instruction contributes when it writes to a relevant register/stack slot,
488
                // or when it is a control-flow decision (Jmp/Assume) that reads relevant registers.
489
                bool instruction_contributes = false;
92✔
490
                for (const auto& reg : deps.regs_written) {
112✔
491
                    if (relevant_after.registers.contains(reg)) {
66✔
492
                        instruction_contributes = true;
23✔
493
                        break;
23✔
494
                    }
495
                }
496
                for (const auto& offset : deps.stack_written) {
98✔
497
                    if (relevant_after.stack_offsets.contains(offset)) {
6✔
498
                        instruction_contributes = true;
499
                        break;
500
                    }
501
                }
502

503
                // Control-flow instructions (Jmp/Assume) that read relevant registers
504
                // contribute to the slice because they shape the path to the failure.
505
                if (!instruction_contributes) {
92✔
506
                    const auto& ins = prog.instruction_at(current_label);
46✔
507
                    if (std::holds_alternative<Jmp>(ins) || std::holds_alternative<Assume>(ins)) {
46✔
508
                        for (const auto& reg : deps.regs_read) {
30✔
509
                            if (relevant_after.registers.contains(reg)) {
20✔
510
                                instruction_contributes = true;
1✔
511
                                break;
1✔
512
                            }
513
                        }
514
                    }
515
                }
516

517
                // Immediate path guard: when the current label is a direct predecessor
518
                // of the failing label and is an Assume instruction, its condition
519
                // registers are causally relevant — they determine reachability.
520
                if (std::holds_alternative<Assume>(prog.instruction_at(current_label))) {
92✔
521
                    const auto& parents_of_fail = prog.cfg().parents_of(label);
6✔
522
                    if (std::find(parents_of_fail.begin(), parents_of_fail.end(), current_label) !=
6✔
523
                        parents_of_fail.end()) {
9✔
524
                        instruction_contributes = true;
47✔
525
                    }
526
                }
527

528
                // At the failing label, the assertion depends on registers the instruction
529
                // reads (e.g., base pointer r3 in a store). Since stores write to memory
530
                // not registers, instruction_contributes would be false without this.
531
                if (current_label == label) {
92✔
532
                    instruction_contributes = true;
10✔
533
                }
534

535
                // In conservative mode (empty seed, e.g., BoundedLoopCount), include all
536
                // reachable labels so the slice shows the loop structure and control flow.
537
                if (conservative_mode) {
92✔
538
                    instruction_contributes = true;
539
                }
540

541
                if (instruction_contributes) {
92✔
542
                    for (const auto& reg : deps.regs_read) {
114✔
543
                        relevant_before.registers.insert(reg);
58✔
544
                    }
545
                    for (const auto& offset : deps.stack_read) {
56✔
NEW
546
                        relevant_before.stack_offsets.insert(offset);
×
547
                    }
548
                }
549

550
                // Remove stack locations that are written by this instruction,
551
                // but preserve offsets that are also read (read-modify-write, e.g., Atomic).
552
                // Done before storing to slice_labels for consistency with register handling
553
                // (written registers are removed before storage at lines 476-478).
554
                for (const auto& offset : deps.stack_written) {
98✔
555
                    if (!deps.stack_read.contains(offset)) {
6✔
556
                        relevant_before.stack_offsets.erase(offset);
6✔
557
                    }
558
                }
559

560
                if (instruction_contributes) {
92✔
561
                    // Only include contributing labels in the output slice.
562
                    // Store relevant_before so pre-invariant filtering shows the
563
                    // instruction's read-deps (the true upstream dependencies).
564
                    slice_labels[current_label] = relevant_before;
56✔
565
                }
566
            } else {
567
                // No deps available: conservatively treat this label as contributing
568
                // and propagate all current relevance to predecessors.
NEW
569
                relevant_before = relevant_after;
×
NEW
570
                slice_labels[current_label] = relevant_before;
×
571
            }
572

573
            // Add predecessors to worklist
574
            for (const auto& parent : prog.cfg().parents_of(current_label)) {
180✔
575
                worklist.emplace_back(parent, relevant_before);
88✔
576
            }
577

578
            ++steps;
92✔
579
        }
98✔
580

581
        // Expand join points: for any traversed label that is a join point
582
        // (≥2 predecessors) where at least one predecessor is already in the slice,
583
        // add the join-point label itself and all predecessors that have invariants.
584
        // This ensures the causal trace shows converging paths that cause precision loss.
585
        // Note: predecessors may not be in `visited` if the worklist budget was exhausted
586
        // before reaching them, so we check the invariant map directly.
587
        std::map<Label, RelevantState> join_expansion;
10✔
588
        for (const auto& [v_label, v_relevance] : visited) {
86✔
589
            const auto& parents = prog.cfg().parents_of(v_label);
76✔
590
            if (parents.size() < 2) {
76✔
591
                continue;
74✔
592
            }
593
            // Check that at least one predecessor is in the slice (this join is relevant)
594
            bool has_slice_parent = false;
2✔
595
            for (const auto& parent : parents) {
4✔
596
                if (slice_labels.contains(parent)) {
4✔
597
                    has_slice_parent = true;
1✔
598
                    break;
1✔
599
                }
600
            }
601
            if (!has_slice_parent) {
2✔
NEW
602
                continue;
×
603
            }
604
            // Include the join-point label itself so the printing code can display
605
            // per-predecessor state at this join.
606
            if (!slice_labels.contains(v_label)) {
2✔
NEW
607
                join_expansion[v_label] = v_relevance;
×
608
            }
609
            // Include all predecessors so the join display is complete.
610
            // Use visited relevance if available, otherwise use the join-point's relevance.
611
            for (const auto& parent : parents) {
6✔
612
                if (slice_labels.contains(parent) || join_expansion.contains(parent)) {
4✔
613
                    continue;
2✔
614
                }
615
                const auto& rel = visited.contains(parent) ? visited.at(parent) : v_relevance;
2✔
616
                join_expansion[parent] = rel;
2✔
617
            }
618
        }
619
        slice_labels.insert(join_expansion.begin(), join_expansion.end());
10✔
620

621
        // Build the slice from contributing labels only
622
        slice.relevance = std::move(slice_labels);
10✔
623
        slices.push_back(std::move(slice));
10✔
624
    }
10✔
625

626
    return slices;
12✔
NEW
627
}
×
628

629
} // namespace prevail
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc