• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

Alan-Jowett / ebpf-verifier / 22235569093

20 Feb 2026 02:12AM UTC coverage: 88.002% (-0.2%) from 88.157%
22235569093

push

github

web-flow
Handle Call builtins: fix handling of Falco tests  (#1025)

* falco: fix raw_tracepoint privilege and group expected failures

Mark raw_tracepoint/raw_tracepoint_writable as privileged program types so Falco raw-tracepoint sections are not treated as unprivileged argument checks.

Update Falco sample matrix to move now-passing sections out of TEST_SECTION_FAIL and group the remaining expected failures by root-cause class (offset lower-bound loss vs size lower-bound loss at correlated joins).

Signed-off-by: Elazar Gershuni <elazarg@gmail.com>

* elf/unmarshal: gate builtin relocations via platform call model

Add platform hooks to resolve builtin symbols and provide builtin call contracts, thread relocation-gated builtin call offsets through ProgramInfo, and only treat static helper IDs as builtins at gated call sites.

Also extend platform-table, marshal, and YAML-platform tests to cover builtin resolver wiring and call unmarshal behavior.
* crab: canonicalize unsigned intervals in bitwise_and
When uvalue intervals temporarily carry signed lower bounds (e.g. after joins), Interval::bitwise_and asserted in debug builds. Canonicalize both operands via zero_extend(64) before unsigned bitwise reasoning, preserving soundness and avoiding debug aborts.

Validated by reproducing SIGABRT on reverted code in [falco][verify] and confirming the patched build completes with expected 73 pass / 20 failed-as-expected.

* Fix unsound bitwise_and case for non-singleton all-ones rhs

Signed-off-by: Elazar Gershuni <elazarg@gmail.com>

239 of 252 new or added lines in 9 files covered. (94.84%)

602 existing lines in 16 files now uncovered.

11743 of 13344 relevant lines covered (88.0%)

3262592.78 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

83.64
/src/ir/assertions.cpp
1
// Copyright (c) Prevail Verifier contributors.
2
// SPDX-License-Identifier: MIT
3
#include <cassert>
4
#include <cinttypes>
5
#include <utility>
6
#include <vector>
7

8
#include "ir/syntax.hpp"
9
#include "platform.hpp"
10

11
using std::string;
12
using std::to_string;
13
using std::vector;
14

15
namespace prevail {
16
class AssertExtractor {
17
    const ProgramInfo& info;
18
    const std::optional<Label>& current_label; ///< Pre-simplification label this assert is part of.
19

20
    static Imm imm(const Value& v) { return std::get<Imm>(v); }
21

22
    static vector<Assertion> zero_offset_ctx(const Reg reg, const bool or_null) {
6,426✔
23
        vector<Assertion> res;
6,426✔
24
        res.emplace_back(TypeConstraint{reg, or_null ? TypeGroup::ctx_or_num : TypeGroup::ctx});
9,615✔
25
        res.emplace_back(ZeroCtxOffset{reg, or_null});
6,426✔
26
        return res;
6,426✔
27
    }
×
28

29
    [[nodiscard]]
30
    ValidAccess make_valid_access(const Reg reg, const int32_t offset = {}, const Value& width = Imm{0},
117,456✔
31
                                  const bool or_null = {}, const AccessType access_type = {}) const {
32
        const int depth = current_label.has_value() ? current_label.value().call_stack_depth() : 1;
117,456✔
33
        return ValidAccess{depth, reg, offset, width, or_null, access_type};
117,456✔
34
    }
35

36
  public:
37
    explicit AssertExtractor(const ProgramInfo& info, const std::optional<Label>& label)
664,794✔
38
        : info{info}, current_label{label} {}
664,794✔
39

40
    vector<Assertion> operator()(const Undefined&) const {
6,036✔
41
        // assert(false);
42
        return {};
6,036✔
43
    }
44

45
    vector<Assertion> operator()(const IncrementLoopCounter& ipc) const { return {{BoundedLoopCount{ipc.name}}}; }
160✔
46

47
    vector<Assertion> operator()(const LoadMapFd&) const { return {}; }
15,072✔
48
    vector<Assertion> operator()(const LoadMapAddress&) const { return {}; }
426✔
49
    vector<Assertion> operator()(const LoadPseudo& pseudo) const {
14✔
50
        switch (pseudo.addr.kind) {
14✔
51
        case PseudoAddress::Kind::CODE_ADDR:
14✔
52
            // Type/offset semantics are handled during abstract transformation.
53
            return {};
14✔
UNCOV
54
        case PseudoAddress::Kind::VARIABLE_ADDR:
×
55
        case PseudoAddress::Kind::MAP_BY_IDX:
56
        case PseudoAddress::Kind::MAP_VALUE_BY_IDX:
UNCOV
57
            assert(false && "unexpected LoadPseudo kind after CFG construction");
×
58
            return {};
59
        }
UNCOV
60
        return {};
×
61
    }
62

63
    /// Packet access implicitly uses R6, so verify that R6 still has a pointer to the context.
64
    vector<Assertion> operator()(const Packet&) const { return zero_offset_ctx({6}, false); }
236✔
65

66
    vector<Assertion> operator()(const Exit&) const {
2,146✔
67
        vector<Assertion> res;
2,146✔
68
        if (current_label->stack_frame_prefix.empty()) {
2,146✔
69
            // Verify that Exit returns a number.
70
            res.emplace_back(TypeConstraint{Reg{R0_RETURN_VALUE}, TypeGroup::number});
2,036✔
71
        }
72
        return res;
2,146✔
UNCOV
73
    }
×
74

75
    vector<Assertion> operator()(const Call& call) const {
29,310✔
76
        vector<Assertion> res;
29,310✔
77
        std::optional<Reg> map_fd_reg;
29,310✔
78
        for (ArgSingle arg : call.singles) {
88,784✔
79
            switch (arg.kind) {
59,474✔
80
            case ArgSingle::Kind::ANYTHING:
20,090✔
81
                // avoid pointer leakage:
82
                if (!info.type.is_privileged) {
20,090✔
83
                    res.emplace_back(TypeConstraint{arg.reg, TypeGroup::number});
14,018✔
84
                }
85
                break;
10,045✔
86
            case ArgSingle::Kind::MAP_FD_PROGRAMS:
736✔
87
                res.emplace_back(TypeConstraint{arg.reg, TypeGroup::map_fd_programs});
736✔
88
                // Do not update map_fd_reg
89
                break;
736✔
90
            case ArgSingle::Kind::MAP_FD:
14,362✔
91
                res.emplace_back(TypeConstraint{arg.reg, TypeGroup::map_fd});
14,362✔
92
                map_fd_reg = arg.reg;
44,099✔
93
                break;
7,181✔
94
            case ArgSingle::Kind::PTR_TO_MAP_KEY:
18,072✔
95
            case ArgSingle::Kind::PTR_TO_MAP_VALUE:
9,036✔
96
                assert(map_fd_reg);
18,072✔
97
                res.emplace_back(TypeConstraint{arg.reg, TypeGroup::mem});
18,072✔
98
                res.emplace_back(ValidMapKeyValue{arg.reg, *map_fd_reg, arg.kind == ArgSingle::Kind::PTR_TO_MAP_KEY});
18,072✔
99
                break;
18,072✔
100
            case ArgSingle::Kind::PTR_TO_STACK:
12✔
101
                res.emplace_back(TypeConstraint{arg.reg, arg.or_null ? TypeGroup::stack_or_num : TypeGroup::stack});
12✔
102
                // TODO: check 0 when null
103
                break;
12✔
104
            case ArgSingle::Kind::PTR_TO_FUNC:
12✔
105
                res.emplace_back(TypeConstraint{arg.reg, TypeGroup::func});
12✔
106
                res.emplace_back(ValidCallbackTarget{arg.reg});
12✔
107
                break;
12✔
108
            case ArgSingle::Kind::PTR_TO_CTX:
6,190✔
109
                for (const Assertion& a : zero_offset_ctx(arg.reg, arg.or_null)) {
18,570✔
110
                    res.emplace_back(a);
12,380✔
111
                }
6,190✔
112
                break;
6,190✔
UNCOV
113
            case ArgSingle::Kind::PTR_TO_SOCKET:
×
UNCOV
114
                res.emplace_back(TypeConstraint{arg.reg, TypeGroup::socket});
×
115
                break;
×
UNCOV
116
            case ArgSingle::Kind::PTR_TO_BTF_ID:
×
UNCOV
117
                res.emplace_back(TypeConstraint{arg.reg, TypeGroup::btf_id});
×
UNCOV
118
                break;
×
UNCOV
119
            case ArgSingle::Kind::PTR_TO_ALLOC_MEM:
×
UNCOV
120
                res.emplace_back(TypeConstraint{arg.reg, TypeGroup::alloc_mem});
×
UNCOV
121
                break;
×
UNCOV
122
            case ArgSingle::Kind::PTR_TO_SPIN_LOCK:
×
123
            case ArgSingle::Kind::PTR_TO_TIMER:
124
                res.emplace_back(TypeConstraint{arg.reg, TypeGroup::mem});
×
UNCOV
125
                break;
×
UNCOV
126
            case ArgSingle::Kind::CONST_SIZE_OR_ZERO:
×
127
                res.emplace_back(TypeConstraint{arg.reg, TypeGroup::number});
×
128
                res.emplace_back(ValidSize{arg.reg, true});
×
UNCOV
129
                break;
×
UNCOV
130
            case ArgSingle::Kind::PTR_TO_WRITABLE_LONG:
×
UNCOV
131
                res.emplace_back(TypeConstraint{arg.reg, TypeGroup::mem});
×
UNCOV
132
                res.emplace_back(make_valid_access(arg.reg, 0, Imm{8}, false, AccessType::write));
×
UNCOV
133
                break;
×
UNCOV
134
            case ArgSingle::Kind::PTR_TO_WRITABLE_INT:
×
UNCOV
135
                res.emplace_back(TypeConstraint{arg.reg, TypeGroup::mem});
×
UNCOV
136
                res.emplace_back(make_valid_access(arg.reg, 0, Imm{4}, false, AccessType::write));
×
UNCOV
137
                break;
×
138
            }
139
        }
140
        for (ArgPair arg : call.pairs) {
39,982✔
141
            const auto group = arg.or_null ? TypeGroup::mem_or_num : TypeGroup::mem;
10,672✔
142
            const auto access_type =
16,008✔
143
                arg.kind == ArgPair::Kind::PTR_TO_READABLE_MEM ? AccessType::read : AccessType::write;
10,672✔
144
            res.emplace_back(TypeConstraint{arg.size, TypeGroup::number});
10,672✔
145
            res.emplace_back(ValidSize{arg.size, arg.can_be_zero});
10,672✔
146
            res.emplace_back(TypeConstraint{arg.mem, group});
10,672✔
147
            res.emplace_back(make_valid_access(arg.mem, 0, arg.size, arg.or_null, access_type));
16,002✔
148
            // TODO: reg is constant (or maybe it's not important)
149
        }
150
        return res;
43,965✔
UNCOV
151
    }
×
152

153
    vector<Assertion> operator()(const CallLocal&) const { return {}; }
110✔
154

155
    vector<Assertion> operator()(const Callx& callx) const {
48✔
156
        vector<Assertion> res;
48✔
157
        res.emplace_back(TypeConstraint{callx.func, TypeGroup::number});
48✔
158
        res.emplace_back(FuncConstraint{callx.func});
48✔
159
        return res;
48✔
UNCOV
160
    }
×
161

162
    // Rejected before assertion extraction by cfg_builder::check_instruction_feature_support.
UNCOV
163
    vector<Assertion> operator()(const CallBtf&) const {
×
UNCOV
164
        assert(false && "CallBtf should be rejected before assertion extraction");
×
165
        return {};
166
    }
167

168
    [[nodiscard]]
169
    vector<Assertion> explicate(const Condition& cond) const {
48,292✔
170
        vector<Assertion> res;
48,292✔
171
        if (info.type.is_privileged) {
48,292✔
172
            return res;
9,217✔
173
        }
174
        if (const auto pimm = std::get_if<Imm>(&cond.right)) {
29,858✔
175
            if (pimm->v != 0) {
24,122✔
176
                // no need to check for valid access, it must be a number
177
                res.emplace_back(TypeConstraint{cond.left, TypeGroup::number});
8,060✔
178
            } else {
179
                bool allow_pointers = false;
16,062✔
180

181
                switch (cond.op) {
16,062✔
182
                case Condition::Op::EQ: allow_pointers = true; break;
10,939✔
183
                case Condition::Op::NE: allow_pointers = true; break;
10,649✔
184
                case Condition::Op::SET: allow_pointers = true; break;
7,211✔
185
                case Condition::Op::NSET: allow_pointers = true; break;
7,211✔
186
                case Condition::Op::LT: allow_pointers = true; break;
7,224✔
187
                case Condition::Op::LE: allow_pointers = true; break;
7,211✔
188
                case Condition::Op::GT: allow_pointers = true; break;
7,223✔
189
                case Condition::Op::GE: allow_pointers = true; break;
7,213✔
190
                case Condition::Op::SLT: allow_pointers = false; break;
796✔
191
                case Condition::Op::SLE: allow_pointers = false; break;
4✔
192
                case Condition::Op::SGT: allow_pointers = false; break;
18✔
193
                case Condition::Op::SGE: allow_pointers = false; break;
4✔
194
                default: assert(!"unexpected condition");
8,031✔
195
                }
196

197
                // Only permit pointer comparisons if the operation is 64-bit.
198
                allow_pointers &= cond.is64;
16,062✔
199

200
                if (!allow_pointers) {
16,062✔
201
                    res.emplace_back(TypeConstraint{cond.left, TypeGroup::number});
8,746✔
202
                }
203

204
                res.emplace_back(make_valid_access(cond.left));
24,093✔
205
                // OK - map_fd is just another pointer
206
                // Anything can be compared to 0
207
            }
208
        } else {
209
            const auto reg_right = get<Reg>(cond.right);
5,736✔
210
            res.emplace_back(make_valid_access(cond.left));
8,604✔
211
            res.emplace_back(make_valid_access(reg_right));
8,604✔
212
            if (cond.op != Condition::Op::EQ && cond.op != Condition::Op::NE) {
5,736✔
213
                res.emplace_back(TypeConstraint{cond.left, TypeGroup::ptr_or_num});
4,088✔
214
            }
215
            res.emplace_back(Comparable{.r1 = cond.left, .r2 = reg_right, .or_r2_is_number = false});
5,736✔
216
        }
217
        return res;
14,929✔
UNCOV
218
    }
×
219

220
    vector<Assertion> operator()(const Assume& ins) const {
96,208✔
221
        if (!ins.is_implicit) {
96,208✔
222
            return explicate(ins.cond);
340✔
223
        }
224
        return {};
95,868✔
225
    }
226

227
    vector<Assertion> operator()(const Jmp& ins) const {
59,282✔
228
        if (!ins.cond) {
59,282✔
229
            return {};
11,330✔
230
        }
231
        return explicate(*ins.cond);
47,952✔
232
    }
233

234
    vector<Assertion> operator()(const Mem& ins) const {
151,360✔
235
        vector<Assertion> res;
151,360✔
236
        const Reg basereg = ins.access.basereg;
151,360✔
237
        Imm width{gsl::narrow<uint32_t>(ins.access.width)};
151,360✔
238
        const int offset = ins.access.offset;
151,360✔
239
        if (basereg.v == R10_STACK_POINTER) {
151,360✔
240
            // We know we are accessing the stack.
241
            if (offset < -EBPF_SUBPROGRAM_STACK_SIZE || offset + static_cast<int>(width.v) > 0) {
72,650✔
242
                // This assertion will fail
243
                res.emplace_back(make_valid_access(basereg, offset, width, false,
3✔
244
                                                   ins.is_load ? AccessType::read : AccessType::write));
2✔
245
            } else if (ins.is_load) {
246
                // This assertion is not for bound checks but for reading initialized memory.
247
                // TODO: avoid load assertions: use post-assertion to check that the loaded value is initialized
248
                // res.emplace_back(make_valid_access(basereg, offset, width, false, AccessType::read));
249
            }
250
        } else {
251
            res.emplace_back(TypeConstraint{basereg, TypeGroup::pointer});
78,710✔
252
            res.emplace_back(
118,065✔
253
                make_valid_access(basereg, offset, width, false, ins.is_load ? AccessType::read : AccessType::write));
134,004✔
254
            if (!info.type.is_privileged && !ins.is_load) {
78,710✔
255
                if (const auto preg = std::get_if<Reg>(&ins.value)) {
5,744✔
256
                    if (width.v != 8) {
5,734✔
257
                        res.emplace_back(TypeConstraint{*preg, TypeGroup::number});
4,636✔
258
                    } else {
259
                        res.emplace_back(ValidStore{ins.access.basereg, *preg});
1,098✔
260
                    }
261
                }
262
            }
263
        }
264
        return res;
227,040✔
UNCOV
265
    }
×
266

267
    vector<Assertion> operator()(const Atomic& ins) const {
538✔
268

269
        return {
269✔
270
            Assertion{TypeConstraint{ins.valreg, TypeGroup::number}},
269✔
271
            Assertion{TypeConstraint{ins.access.basereg, TypeGroup::pointer}},
269✔
272
            Assertion{make_valid_access(ins.access.basereg, ins.access.offset,
807✔
273
                                        Imm{static_cast<uint32_t>(ins.access.width)}, false)},
269✔
274
        };
2,421✔
275
    }
1,076✔
276

277
    vector<Assertion> operator()(const Un& ins) const {
3,248✔
278
        return {Assertion{TypeConstraint{ins.dst, TypeGroup::number}}};
8,120✔
279
    }
3,248✔
280

281
    vector<Assertion> operator()(const Bin& ins) const {
300,720✔
282
        switch (ins.op) {
300,720✔
283
        case Bin::Op::MOV:
152,132✔
284
            if (const auto src = std::get_if<Reg>(&ins.v)) {
152,132✔
285
                if (!ins.is64) {
85,304✔
286
                    return {Assertion{TypeConstraint{*src, TypeGroup::number}}};
37,010✔
287
                }
288
            }
289
            return {};
137,328✔
290
        case Bin::Op::MOVSX8:
2,768✔
291
        case Bin::Op::MOVSX16:
1,384✔
292
        case Bin::Op::MOVSX32:
1,384✔
293
            if (const auto src = std::get_if<Reg>(&ins.v)) {
2,768✔
294
                return {Assertion{TypeConstraint{*src, TypeGroup::number}}};
6,920✔
295
            }
UNCOV
296
            return {};
×
297
        case Bin::Op::ADD: {
46,772✔
298
            if (const auto src = std::get_if<Reg>(&ins.v)) {
46,772✔
299
                return {Assertion{TypeConstraint{ins.dst, TypeGroup::ptr_or_num}},
5,378✔
300
                        Assertion{TypeConstraint{*src, TypeGroup::ptr_or_num}}, Assertion{Addable{*src, ins.dst}},
5,378✔
301
                        Assertion{Addable{ins.dst, *src}}};
59,158✔
302
            }
303
            return {Assertion{TypeConstraint{ins.dst, TypeGroup::ptr_or_num}}};
90,040✔
304
        }
305
        case Bin::Op::SUB: {
1,124✔
306
            if (const auto reg = std::get_if<Reg>(&ins.v)) {
1,124✔
307
                // disallow map-map since same type does not mean same offset
308
                // TODO: map identities
309
                return {TypeConstraint{ins.dst, TypeGroup::ptr_or_num},
546✔
310
                        Comparable{.r1 = ins.dst, .r2 = *reg, .or_r2_is_number = true}};
3,822✔
311
            }
312
            return {Assertion{TypeConstraint{ins.dst, TypeGroup::ptr_or_num}}};
80✔
313
        }
314
        case Bin::Op::UDIV:
2,642✔
315
        case Bin::Op::UMOD:
1,321✔
316
        case Bin::Op::SDIV:
1,321✔
317
        case Bin::Op::SMOD: {
1,321✔
318
            if (const auto src = std::get_if<Reg>(&ins.v)) {
2,642✔
319
                const bool is_signed = (ins.op == Bin::Op::SDIV || ins.op == Bin::Op::SMOD);
280✔
320
                return {Assertion{TypeConstraint{ins.dst, TypeGroup::number}},
140✔
321
                        Assertion{ValidDivisor{*src, is_signed}}};
980✔
322
            }
323
            return {Assertion{TypeConstraint{ins.dst, TypeGroup::number}}};
5,905✔
324
        }
325
            // For all other binary operations, the destination register must be a number and the source must either be
326
            // an immediate or a number.
327
        default:
95,282✔
328
            if (const auto src = std::get_if<Reg>(&ins.v)) {
95,282✔
329
                return {Assertion{TypeConstraint{ins.dst, TypeGroup::number}},
15,440✔
330
                        Assertion{TypeConstraint{*src, TypeGroup::number}}};
108,080✔
331
            } else {
332
                return {Assertion{TypeConstraint{ins.dst, TypeGroup::number}}};
161,005✔
333
            }
334
        }
335
        assert(false);
336
    }
163,392✔
337
};
338

339
/// Annotate the CFG by adding explicit assertions for all the preconditions
340
/// of any instruction. For example, jump instructions are asserted not to
341
/// compare numbers and pointers, or pointers to potentially distinct memory
342
/// regions. The verifier will use these assertions to treat the program as
343
/// unsafe unless it can prove that the assertions can never fail.
344
vector<Assertion> get_assertions(const Instruction& ins, const ProgramInfo& info, const std::optional<Label>& label) {
664,794✔
345
    return std::visit(AssertExtractor{info, label}, ins);
664,794✔
346
}
347
} // namespace prevail
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc