• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

TritonVM / triton-vm / 11573599342

29 Oct 2024 12:13PM UTC coverage: 98.329% (-0.04%) from 98.367%
11573599342

push

github

jan-ferdinand
perf: Reduce prover's space requirements

Only generate exactly as much randomness as is needed to achieve
Zero-Knowledge. In particular, instead of storing a full copy of the
trace, and again as much randomness, the trace is stored exactly once,
and any randomness is “spliced in” for any operation that requires the
randomized trace.

The merged branch contains a slew of changes for the memory-efficient
code path, making it _even_ more memory efficient, by a factor of
almost 2 (!). The caching code path is more efficient also, but only
slightly.

The changes also introduce performance gains for the prover of between
10% (caching path) and 15% (memory-efficient path).

Co-authored-by: Alan <alan@neptune.cash>

756 of 768 new or added lines in 7 files covered. (98.44%)

21 existing lines in 4 files now uncovered.

24304 of 24717 relevant lines covered (98.33%)

6148621.94 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

87.36
/triton-constraint-builder/src/codegen.rs
1
//! The various tables' constraints are very inefficient to evaluate if they live in RAM.
2
//! Instead, the build script turns them into rust code, which is then optimized by rustc.
3

4
use std::cell::RefCell;
5
use std::collections::HashSet;
6
use std::rc::Rc;
7

8
use constraint_circuit::BinOp;
9
use constraint_circuit::CircuitExpression;
10
use constraint_circuit::ConstraintCircuit;
11
use constraint_circuit::InputIndicator;
12
use isa::instruction::Instruction;
13
use isa::op_stack::NumberOfWords;
14
use itertools::Itertools;
15
use proc_macro2::TokenStream;
16
use quote::format_ident;
17
use quote::quote;
18
use quote::ToTokens;
19
use twenty_first::prelude::x_field_element::EXTENSION_DEGREE;
20
use twenty_first::prelude::*;
21

22
use crate::Constraints;
23

24
pub trait Codegen {
25
    fn constraint_evaluation_code(constraints: &Constraints) -> TokenStream;
26

27
    fn tokenize_bfe(bfe: BFieldElement) -> TokenStream {
415✔
28
        let raw_u64 = bfe.raw_u64();
415✔
29
        quote!(BFieldElement::from_raw_u64(#raw_u64))
415✔
30
    }
415✔
31

32
    fn tokenize_xfe(xfe: XFieldElement) -> TokenStream {
1✔
33
        let [c_0, c_1, c_2] = xfe.coefficients.map(Self::tokenize_bfe);
1✔
34
        quote!(XFieldElement::new([#c_0, #c_1, #c_2]))
1✔
35
    }
1✔
36
}
37

38
#[derive(Debug, Default, Clone, Eq, PartialEq)]
39
pub struct RustBackend {
40
    /// All [circuit] IDs known to be in scope.
41
    ///
42
    /// [circuit]: triton_vm::table::circuit::ConstraintCircuit
43
    scope: HashSet<usize>,
44
}
45

46
#[derive(Debug, Default, Clone, Eq, PartialEq)]
47
pub struct TasmBackend {
48
    /// All [circuit] IDs known to be processed and stored to memory.
49
    ///
50
    /// [circuit]: triton_vm::table::circuit::ConstraintCircuit
51
    scope: HashSet<usize>,
52

53
    /// The number of elements written to the output list.
54
    elements_written: usize,
55

56
    /// Whether the code that is to be generated can assume statically provided
57
    /// addresses for the various input arrays.
58
    input_location_is_static: bool,
59
}
60

61
impl Codegen for RustBackend {
62
    fn constraint_evaluation_code(constraints: &Constraints) -> TokenStream {
1✔
63
        let num_init_constraints = constraints.init.len();
1✔
64
        let num_cons_constraints = constraints.cons.len();
1✔
65
        let num_tran_constraints = constraints.tran.len();
1✔
66
        let num_term_constraints = constraints.term.len();
1✔
67

1✔
68
        let (init_constraint_degrees, init_constraints_bfe, init_constraints_xfe) =
1✔
69
            Self::tokenize_circuits(&constraints.init());
1✔
70
        let (cons_constraint_degrees, cons_constraints_bfe, cons_constraints_xfe) =
1✔
71
            Self::tokenize_circuits(&constraints.cons());
1✔
72
        let (tran_constraint_degrees, tran_constraints_bfe, tran_constraints_xfe) =
1✔
73
            Self::tokenize_circuits(&constraints.tran());
1✔
74
        let (term_constraint_degrees, term_constraints_bfe, term_constraints_xfe) =
1✔
75
            Self::tokenize_circuits(&constraints.term());
1✔
76

1✔
77
        let evaluable_over_base_field = Self::generate_evaluable_implementation_over_field(
1✔
78
            &init_constraints_bfe,
1✔
79
            &cons_constraints_bfe,
1✔
80
            &tran_constraints_bfe,
1✔
81
            &term_constraints_bfe,
1✔
82
            quote!(BFieldElement),
1✔
83
        );
1✔
84
        let evaluable_over_ext_field = Self::generate_evaluable_implementation_over_field(
1✔
85
            &init_constraints_xfe,
1✔
86
            &cons_constraints_xfe,
1✔
87
            &tran_constraints_xfe,
1✔
88
            &term_constraints_xfe,
1✔
89
            quote!(XFieldElement),
1✔
90
        );
1✔
91

1✔
92
        let quotient_trait_impl = quote!(
1✔
93
        impl MasterAuxTable {
1✔
94
            pub const NUM_INITIAL_CONSTRAINTS: usize = #num_init_constraints;
1✔
95
            pub const NUM_CONSISTENCY_CONSTRAINTS: usize = #num_cons_constraints;
1✔
96
            pub const NUM_TRANSITION_CONSTRAINTS: usize = #num_tran_constraints;
1✔
97
            pub const NUM_TERMINAL_CONSTRAINTS: usize = #num_term_constraints;
1✔
98
            pub const NUM_CONSTRAINTS: usize = Self::NUM_INITIAL_CONSTRAINTS
1✔
99
                + Self::NUM_CONSISTENCY_CONSTRAINTS
1✔
100
                + Self::NUM_TRANSITION_CONSTRAINTS
1✔
101
                + Self::NUM_TERMINAL_CONSTRAINTS;
1✔
102

1✔
103
            #[allow(unused_variables)]
1✔
104
            pub fn initial_quotient_degree_bounds(interpolant_degree: isize) -> Vec<isize> {
1✔
105
                let zerofier_degree = 1;
1✔
106
                [#init_constraint_degrees].to_vec()
1✔
107
            }
1✔
108

1✔
109
            #[allow(unused_variables)]
1✔
110
            pub fn consistency_quotient_degree_bounds(
1✔
111
                interpolant_degree: isize,
1✔
112
                padded_height: usize,
1✔
113
            ) -> Vec<isize> {
1✔
114
                let zerofier_degree = padded_height as isize;
1✔
115
                [#cons_constraint_degrees].to_vec()
1✔
116
            }
1✔
117

1✔
118
            #[allow(unused_variables)]
1✔
119
            pub fn transition_quotient_degree_bounds(
1✔
120
                interpolant_degree: isize,
1✔
121
                padded_height: usize,
1✔
122
            ) -> Vec<isize> {
1✔
123
                let zerofier_degree = padded_height as isize - 1;
1✔
124
                [#tran_constraint_degrees].to_vec()
1✔
125
            }
1✔
126

1✔
127
            #[allow(unused_variables)]
1✔
128
            pub fn terminal_quotient_degree_bounds(interpolant_degree: isize) -> Vec<isize> {
1✔
129
                let zerofier_degree = 1;
1✔
130
                [#term_constraint_degrees].to_vec()
1✔
131
            }
1✔
132
        }
1✔
133
        );
1✔
134

1✔
135
        quote!(
1✔
136
            #evaluable_over_base_field
1✔
137
            #evaluable_over_ext_field
1✔
138
            #quotient_trait_impl
1✔
139
        )
1✔
140
    }
1✔
141
}
142

143
impl RustBackend {
144
    fn generate_evaluable_implementation_over_field(
2✔
145
        init_constraints: &TokenStream,
2✔
146
        cons_constraints: &TokenStream,
2✔
147
        tran_constraints: &TokenStream,
2✔
148
        term_constraints: &TokenStream,
2✔
149
        field: TokenStream,
2✔
150
    ) -> TokenStream {
2✔
151
        quote!(
2✔
152
        impl Evaluable<#field> for MasterAuxTable {
2✔
153
            #[allow(unused_variables)]
2✔
154
            fn evaluate_initial_constraints(
2✔
155
                main_row: ArrayView1<#field>,
2✔
156
                aux_row: ArrayView1<XFieldElement>,
2✔
157
                challenges: &Challenges,
2✔
158
            ) -> Vec<XFieldElement> {
2✔
159
                #init_constraints
2✔
160
            }
2✔
161

2✔
162
            #[allow(unused_variables)]
2✔
163
            fn evaluate_consistency_constraints(
2✔
164
                main_row: ArrayView1<#field>,
2✔
165
                aux_row: ArrayView1<XFieldElement>,
2✔
166
                challenges: &Challenges,
2✔
167
            ) -> Vec<XFieldElement> {
2✔
168
                #cons_constraints
2✔
169
            }
2✔
170

2✔
171
            #[allow(unused_variables)]
2✔
172
            fn evaluate_transition_constraints(
2✔
173
                current_main_row: ArrayView1<#field>,
2✔
174
                current_aux_row: ArrayView1<XFieldElement>,
2✔
175
                next_main_row: ArrayView1<#field>,
2✔
176
                next_aux_row: ArrayView1<XFieldElement>,
2✔
177
                challenges: &Challenges,
2✔
178
            ) -> Vec<XFieldElement> {
2✔
179
                #tran_constraints
2✔
180
            }
2✔
181

2✔
182
            #[allow(unused_variables)]
2✔
183
            fn evaluate_terminal_constraints(
2✔
184
                main_row: ArrayView1<#field>,
2✔
185
                aux_row: ArrayView1<XFieldElement>,
2✔
186
                challenges: &Challenges,
2✔
187
            ) -> Vec<XFieldElement> {
2✔
188
                #term_constraints
2✔
189
            }
2✔
190
        }
2✔
191
        )
2✔
192
    }
2✔
193

194
    /// Return a tuple of [`TokenStream`]s corresponding to code evaluating these constraints as
195
    /// well as their degrees. In particular:
196
    /// 1. The first stream contains code that, when evaluated, produces the constraints' degrees,
197
    /// 1. the second stream contains code that, when evaluated, produces the constraints' values,
198
    ///    with the input type for the main row being `BFieldElement`, and
199
    /// 1. the third stream is like the second, except that the input type for the main row is
200
    ///    `XFieldElement`.
201
    fn tokenize_circuits<II: InputIndicator>(
4✔
202
        constraints: &[ConstraintCircuit<II>],
4✔
203
    ) -> (TokenStream, TokenStream, TokenStream) {
4✔
204
        if constraints.is_empty() {
4✔
205
            return (quote!(), quote!(vec![]), quote!(vec![]));
3✔
206
        }
1✔
207

1✔
208
        let mut backend = Self::default();
1✔
209
        let shared_declarations = backend.declare_shared_nodes(constraints);
1✔
210
        let (main_constraints, aux_constraints): (Vec<_>, Vec<_>) = constraints
1✔
211
            .iter()
1✔
212
            .partition(|constraint| constraint.evaluates_to_base_element());
1✔
213

1✔
214
        // The order of the constraints' degrees must match the order of the constraints.
1✔
215
        // Hence, listing the degrees is only possible after the partition into main and auxiliary
1✔
216
        // constraints is known.
1✔
217
        let tokenized_degree_bounds = main_constraints
1✔
218
            .iter()
1✔
219
            .chain(&aux_constraints)
1✔
220
            .map(|circuit| match circuit.degree() {
1✔
221
                d if d > 1 => quote!(interpolant_degree * #d - zerofier_degree),
1✔
222
                1 => quote!(interpolant_degree - zerofier_degree),
1✔
223
                _ => panic!("Constraint degree must be positive"),
×
224
            })
1✔
225
            .collect_vec();
1✔
226
        let tokenized_degree_bounds = quote!(#(#tokenized_degree_bounds),*);
1✔
227

228
        let tokenize_constraint_evaluation = |constraints: &[&ConstraintCircuit<II>]| {
2✔
229
            constraints
2✔
230
                .iter()
2✔
231
                .map(|constraint| backend.evaluate_single_node(constraint))
2✔
232
                .collect_vec()
2✔
233
        };
2✔
234
        let tokenized_main_constraints = tokenize_constraint_evaluation(&main_constraints);
1✔
235
        let tokenized_aux_constraints = tokenize_constraint_evaluation(&aux_constraints);
1✔
236

237
        // If there are no main constraints, the type needs to be explicitly declared.
238
        let tokenized_bfe_main_constraints = match main_constraints.is_empty() {
1✔
239
            true => quote!(let main_constraints: [BFieldElement; 0] = []),
1✔
240
            false => quote!(let main_constraints = [#(#tokenized_main_constraints),*]),
×
241
        };
242
        let tokenized_bfe_constraints = quote!(
1✔
243
            #(#shared_declarations)*
1✔
244
            #tokenized_bfe_main_constraints;
1✔
245
            let aux_constraints = [#(#tokenized_aux_constraints),*];
1✔
246
            main_constraints
1✔
247
                .into_iter()
1✔
248
                .map(|bfe| bfe.lift())
1✔
249
                .chain(aux_constraints)
1✔
250
                .collect()
1✔
251
        );
1✔
252

253
        let tokenized_xfe_constraints = quote!(
1✔
254
            #(#shared_declarations)*
1✔
255
            let main_constraints = [#(#tokenized_main_constraints),*];
1✔
256
            let aux_constraints = [#(#tokenized_aux_constraints),*];
1✔
257
            main_constraints
1✔
258
                .into_iter()
1✔
259
                .chain(aux_constraints)
1✔
260
                .collect()
1✔
261
        );
1✔
262

263
        (
1✔
264
            tokenized_degree_bounds,
1✔
265
            tokenized_bfe_constraints,
1✔
266
            tokenized_xfe_constraints,
1✔
267
        )
1✔
268
    }
4✔
269

270
    /// Declare all shared variables, i.e., those with a ref count greater than 1.
271
    /// These declarations must be made starting from the highest ref count.
272
    /// Otherwise, the resulting code will refer to bindings that have not yet been made.
273
    fn declare_shared_nodes<II: InputIndicator>(
1✔
274
        &mut self,
1✔
275
        constraints: &[ConstraintCircuit<II>],
1✔
276
    ) -> Vec<TokenStream> {
1✔
277
        let constraints_iter = constraints.iter();
1✔
278
        let all_ref_counts = constraints_iter.flat_map(ConstraintCircuit::all_ref_counters);
1✔
279
        let relevant_ref_counts = all_ref_counts.unique().filter(|&x| x > 1);
1✔
280
        let ordered_ref_counts = relevant_ref_counts.sorted().rev();
1✔
281

1✔
282
        ordered_ref_counts
1✔
283
            .map(|count| self.declare_nodes_with_ref_count(constraints, count))
1✔
284
            .collect()
1✔
285
    }
1✔
286

287
    /// Produce the code to evaluate code for all nodes that share a ref count.
288
    fn declare_nodes_with_ref_count<II: InputIndicator>(
×
289
        &mut self,
×
290
        circuits: &[ConstraintCircuit<II>],
×
291
        ref_count: usize,
×
292
    ) -> TokenStream {
×
293
        let all_nodes_in_circuit =
×
294
            |circuit| self.declare_single_node_with_ref_count(circuit, ref_count);
×
295
        let tokenized_circuits = circuits.iter().filter_map(all_nodes_in_circuit);
×
296
        quote!(#(#tokenized_circuits)*)
×
297
    }
×
298

299
    fn declare_single_node_with_ref_count<II: InputIndicator>(
×
300
        &mut self,
×
301
        circuit: &ConstraintCircuit<II>,
×
302
        ref_count: usize,
×
303
    ) -> Option<TokenStream> {
×
304
        if self.scope.contains(&circuit.id) {
×
305
            return None;
×
306
        }
×
307

308
        // constants can be declared trivially
309
        let CircuitExpression::BinOp(_, lhs, rhs) = &circuit.expression else {
×
310
            return None;
×
311
        };
312

313
        if circuit.ref_count < ref_count {
×
314
            let out_left = self.declare_single_node_with_ref_count(&lhs.borrow(), ref_count);
×
315
            let out_right = self.declare_single_node_with_ref_count(&rhs.borrow(), ref_count);
×
316
            return match (out_left, out_right) {
×
317
                (None, None) => None,
×
318
                (Some(l), None) => Some(l),
×
319
                (None, Some(r)) => Some(r),
×
320
                (Some(l), Some(r)) => Some(quote!(#l #r)),
×
321
            };
322
        }
×
323

×
324
        assert_eq!(circuit.ref_count, ref_count);
×
325
        let binding_name = Self::binding_name(circuit);
×
326
        let evaluation = self.evaluate_single_node(circuit);
×
327
        let new_binding = quote!(let #binding_name = #evaluation;);
×
328

×
329
        let is_new_insertion = self.scope.insert(circuit.id);
×
330
        assert!(is_new_insertion);
×
331

332
        Some(new_binding)
×
333
    }
×
334

335
    /// Recursively construct the code for evaluating a single node.
336
    pub fn evaluate_single_node<II: InputIndicator>(
2,890✔
337
        &self,
2,890✔
338
        circuit: &ConstraintCircuit<II>,
2,890✔
339
    ) -> TokenStream {
2,890✔
340
        if self.scope.contains(&circuit.id) {
2,890✔
341
            return Self::binding_name(circuit);
×
342
        }
2,890✔
343

344
        let CircuitExpression::BinOp(binop, lhs, rhs) = &circuit.expression else {
2,890✔
345
            return Self::binding_name(circuit);
1,580✔
346
        };
347

348
        let lhs = self.evaluate_single_node(&lhs.borrow());
1,310✔
349
        let rhs = self.evaluate_single_node(&rhs.borrow());
1,310✔
350
        quote!((#lhs) #binop (#rhs))
1,310✔
351
    }
2,890✔
352

353
    fn binding_name<II: InputIndicator>(circuit: &ConstraintCircuit<II>) -> TokenStream {
1,580✔
354
        match &circuit.expression {
1,580✔
355
            CircuitExpression::BConst(bfe) => Self::tokenize_bfe(*bfe),
411✔
356
            CircuitExpression::XConst(xfe) => Self::tokenize_xfe(*xfe),
×
357
            CircuitExpression::Input(idx) => quote!(#idx),
928✔
358
            CircuitExpression::Challenge(challenge) => quote!(challenges[#challenge]),
241✔
359
            CircuitExpression::BinOp(_, _, _) => {
360
                let node_ident = format_ident!("node_{}", circuit.id);
×
361
                quote!(#node_ident)
×
362
            }
363
        }
364
    }
1,580✔
365
}
366

367
/// The minimal required size of a memory page in [`BFieldElement`]s.
368
pub const MEM_PAGE_SIZE: usize = 1 << 32;
369

370
/// An offset from the [memory layout][layout]'s `free_mem_page_ptr`, in number of
371
/// [`XFieldElement`]s. Indicates the start of the to-be-returned array.
372
///
373
/// [layout]: memory_layout::IntegralMemoryLayout
374
const OUT_ARRAY_OFFSET: usize = {
375
    let max_num_words_for_evaluated_constraints = 1 << 16; // magic!
376
    let out_array_offset_in_words = MEM_PAGE_SIZE - max_num_words_for_evaluated_constraints;
377
    assert!(out_array_offset_in_words % EXTENSION_DEGREE == 0);
378
    out_array_offset_in_words / EXTENSION_DEGREE
379
};
380

381
/// Convenience macro to get raw opcodes of any [`Instruction`] variant, including its argument if
382
/// applicable.
383
///
384
/// [labelled]: triton_vm::instruction::LabelledInstruction::Instruction
385
macro_rules! instr {
386
    ($($instr:tt)*) => {{
387
        let instr = Instruction::$($instr)*;
388
        let opcode = u64::from(instr.opcode());
389
        match instr.arg().map(|arg| arg.value()) {
30✔
390
            Some(arg) => vec![quote!(#opcode), quote!(#arg)],
391
            None => vec![quote!(#opcode)],
392
        }
393
    }};
394
}
395

396
/// Convenience macro to get raw opcode of a [`Push`][push] instruction including its argument.
397
///
398
/// [push]: triton_vm::instruction::AnInstruction::Push
399
macro_rules! push {
400
    ($arg:ident) => {{
401
        let opcode = u64::from(Instruction::Push(BFieldElement::new(0)).opcode());
402
        let arg = u64::from($arg);
403
        vec![quote!(#opcode), quote!(#arg)]
404
    }};
405
    ($list:ident + $offset:expr) => {{
406
        let opcode = u64::from(Instruction::Push(BFieldElement::new(0)).opcode());
407
        let offset = u64::try_from($offset).unwrap();
408
        assert!(offset < u64::MAX - BFieldElement::P);
409
        // clippy will complain about the generated code if it contains `+ 0`
410
        if offset == 0 {
411
            vec![quote!(#opcode), quote!(#$list)]
412
        } else {
413
            vec![quote!(#opcode), quote!(#$list + #offset)]
414
        }
415
    }};
416
}
417

418
impl Codegen for TasmBackend {
419
    /// Emits a function that emits [Triton assembly][tasm] that evaluates Triton VM's AIR
420
    /// constraints over the [extension field][XFieldElement].
421
    ///
422
    /// [tasm]: isa::triton_asm
423
    fn constraint_evaluation_code(constraints: &Constraints) -> TokenStream {
1✔
424
        let doc_comment = Self::doc_comment_static_version();
1✔
425

1✔
426
        let mut backend = Self::statically_known_input_locations();
1✔
427
        let init_constraints = backend.tokenize_circuits(&constraints.init());
1✔
428
        let cons_constraints = backend.tokenize_circuits(&constraints.cons());
1✔
429
        let tran_constraints = backend.tokenize_circuits(&constraints.tran());
1✔
430
        let term_constraints = backend.tokenize_circuits(&constraints.term());
1✔
431
        let prepare_return_values = Self::prepare_return_values();
1✔
432
        let num_instructions = init_constraints.len()
1✔
433
            + cons_constraints.len()
1✔
434
            + tran_constraints.len()
1✔
435
            + term_constraints.len()
1✔
436
            + prepare_return_values.len();
1✔
437
        let num_instructions = u64::try_from(num_instructions).unwrap();
1✔
438

1✔
439
        let convert_and_decode_assembled_instructions = quote!(
1✔
440
            let raw_instructions = raw_instructions
1✔
441
                .into_iter()
1✔
442
                .map(BFieldElement::new)
1✔
443
                .collect::<Vec<_>>();
1✔
444
            let program = Program::decode(&raw_instructions).unwrap();
1✔
445

1✔
446
            let irrelevant_label = |_: &_| String::new();
1✔
447
            program
1✔
448
                .into_iter()
1✔
449
                .map(|instruction| instruction.map_call_address(irrelevant_label))
1✔
450
                .map(LabelledInstruction::Instruction)
1✔
451
                .collect()
1✔
452
        );
1✔
453

454
        let statically_known_input_locations = quote!(
1✔
455
            #[doc = #doc_comment]
1✔
456
            pub fn static_air_constraint_evaluation_tasm(
1✔
457
                mem_layout: StaticTasmConstraintEvaluationMemoryLayout,
1✔
458
            ) -> Vec<LabelledInstruction> {
1✔
459
                let free_mem_page_ptr = mem_layout.free_mem_page_ptr.value();
1✔
460
                let curr_main_row_ptr = mem_layout.curr_main_row_ptr.value();
1✔
461
                let curr_aux_row_ptr = mem_layout.curr_aux_row_ptr.value();
1✔
462
                let next_main_row_ptr = mem_layout.next_main_row_ptr.value();
1✔
463
                let next_aux_row_ptr = mem_layout.next_aux_row_ptr.value();
1✔
464
                let challenges_ptr = mem_layout.challenges_ptr.value();
1✔
465

1✔
466
                let raw_instructions = vec![
1✔
467
                    #num_instructions,
1✔
468
                    #(#init_constraints,)*
1✔
469
                    #(#cons_constraints,)*
1✔
470
                    #(#tran_constraints,)*
1✔
471
                    #(#term_constraints,)*
1✔
472
                    #(#prepare_return_values,)*
1✔
473
                ];
1✔
474
                #convert_and_decode_assembled_instructions
1✔
475
            }
1✔
476
        );
1✔
477

478
        let doc_comment = Self::doc_comment_dynamic_version();
1✔
479

1✔
480
        let mut backend = Self::dynamically_known_input_locations();
1✔
481
        let move_row_pointers = backend.write_row_pointers_to_ram();
1✔
482
        let init_constraints = backend.tokenize_circuits(&constraints.init());
1✔
483
        let cons_constraints = backend.tokenize_circuits(&constraints.cons());
1✔
484
        let tran_constraints = backend.tokenize_circuits(&constraints.tran());
1✔
485
        let term_constraints = backend.tokenize_circuits(&constraints.term());
1✔
486
        let prepare_return_values = Self::prepare_return_values();
1✔
487
        let num_instructions = move_row_pointers.len()
1✔
488
            + init_constraints.len()
1✔
489
            + cons_constraints.len()
1✔
490
            + tran_constraints.len()
1✔
491
            + term_constraints.len()
1✔
492
            + prepare_return_values.len();
1✔
493
        let num_instructions = u64::try_from(num_instructions).unwrap();
1✔
494

495
        let dynamically_known_input_locations = quote!(
1✔
496
            #[doc = #doc_comment]
1✔
497
            pub fn dynamic_air_constraint_evaluation_tasm(
1✔
498
                mem_layout: DynamicTasmConstraintEvaluationMemoryLayout,
1✔
499
            ) -> Vec<LabelledInstruction> {
1✔
500
                let num_pointer_pointers = 4;
1✔
501
                let free_mem_page_ptr = mem_layout.free_mem_page_ptr.value() + num_pointer_pointers;
1✔
502
                let curr_main_row_ptr = mem_layout.free_mem_page_ptr.value();
1✔
503
                let curr_aux_row_ptr = mem_layout.free_mem_page_ptr.value() + 1;
1✔
504
                let next_main_row_ptr = mem_layout.free_mem_page_ptr.value() + 2;
1✔
505
                let next_aux_row_ptr = mem_layout.free_mem_page_ptr.value() + 3;
1✔
506
                let challenges_ptr = mem_layout.challenges_ptr.value();
1✔
507

1✔
508
                let raw_instructions = vec![
1✔
509
                    #num_instructions,
1✔
510
                    #(#move_row_pointers,)*
1✔
511
                    #(#init_constraints,)*
1✔
512
                    #(#cons_constraints,)*
1✔
513
                    #(#tran_constraints,)*
1✔
514
                    #(#term_constraints,)*
1✔
515
                    #(#prepare_return_values,)*
1✔
516
                ];
1✔
517
                #convert_and_decode_assembled_instructions
1✔
518
            }
1✔
519
        );
1✔
520

521
        let uses = Self::uses();
1✔
522
        quote!(
1✔
523
            #uses
1✔
524
            #statically_known_input_locations
1✔
525
            #dynamically_known_input_locations
1✔
526
        )
1✔
527
    }
1✔
528
}
529

530
impl TasmBackend {
531
    fn statically_known_input_locations() -> Self {
2✔
532
        Self {
2✔
533
            scope: HashSet::new(),
2✔
534
            elements_written: 0,
2✔
535
            input_location_is_static: true,
2✔
536
        }
2✔
537
    }
2✔
538

539
    fn dynamically_known_input_locations() -> Self {
1✔
540
        Self {
1✔
541
            input_location_is_static: false,
1✔
542
            ..Self::statically_known_input_locations()
1✔
543
        }
1✔
544
    }
1✔
545

546
    fn uses() -> TokenStream {
1✔
547
        quote!(
1✔
548
            use twenty_first::prelude::BFieldCodec;
1✔
549
            use twenty_first::prelude::BFieldElement;
1✔
550
            use isa::instruction::LabelledInstruction;
1✔
551
            use isa::program::Program;
1✔
552
            use crate::memory_layout::StaticTasmConstraintEvaluationMemoryLayout;
1✔
553
            use crate::memory_layout::DynamicTasmConstraintEvaluationMemoryLayout;
1✔
554
        )
1✔
555
    }
1✔
556

557
    fn doc_comment_static_version() -> &'static str {
1✔
558
        "
1✔
559
         The emitted Triton assembly has the following signature:
1✔
560

1✔
561
         # Signature
1✔
562

1✔
563
         ```text
1✔
564
         BEFORE: _
1✔
565
         AFTER:  _ *evaluated_constraints
1✔
566
         ```
1✔
567
         # Requirements
1✔
568

1✔
569
         In order for this method to emit Triton assembly, various memory regions need to be
1✔
570
         declared. This is done through [`StaticTasmConstraintEvaluationMemoryLayout`]. The memory
1✔
571
         layout must be [integral].
1✔
572

1✔
573
         # Guarantees
1✔
574

1✔
575
         - The emitted code does not declare any labels.
1✔
576
         - The emitted code is “straight-line”, _i.e._, does not contain any of the instructions
1✔
577
           `call`, `return`, `recurse`, `recurse_or_return`, or `skiz`.
1✔
578
         - The emitted code does not contain instruction `halt`.
1✔
579
         - All memory write access of the emitted code is within the bounds of the memory region
1✔
580
           pointed to by `*free_memory_page`.
1✔
581
         - `*evaluated_constraints` points to an array of [`XFieldElement`][xfe]s of length
1✔
582
            [`NUM_CONSTRAINTS`][total]. Each element is the evaluation of one constraint. In
1✔
583
            particular, the disjoint sequence of slices containing
1✔
584
            [`NUM_INITIAL_CONSTRAINTS`][init], [`NUM_CONSISTENCY_CONSTRAINTS`][cons],
1✔
585
            [`NUM_TRANSITION_CONSTRAINTS`][tran], and [`NUM_TERMINAL_CONSTRAINTS`][term]
1✔
586
            (respectively and in this order) correspond to the evaluations of the initial,
1✔
587
            consistency, transition, and terminal constraints.
1✔
588

1✔
589
         [integral]: crate::memory_layout::IntegralMemoryLayout::is_integral
1✔
590
         [xfe]: twenty_first::prelude::XFieldElement
1✔
591
         [total]: crate::table::master_table::MasterAuxTable::NUM_CONSTRAINTS
1✔
592
         [init]: crate::table::master_table::MasterAuxTable::NUM_INITIAL_CONSTRAINTS
1✔
593
         [cons]: crate::table::master_table::MasterAuxTable::NUM_CONSISTENCY_CONSTRAINTS
1✔
594
         [tran]: crate::table::master_table::MasterAuxTable::NUM_TRANSITION_CONSTRAINTS
1✔
595
         [term]: crate::table::master_table::MasterAuxTable::NUM_TERMINAL_CONSTRAINTS
1✔
596
        "
1✔
597
    }
1✔
598

599
    fn doc_comment_dynamic_version() -> &'static str {
1✔
600
        "
1✔
601
         The emitted Triton assembly has the following signature:
1✔
602

1✔
603
         # Signature
1✔
604

1✔
605
         ```text
1✔
606
         BEFORE: _ *current_main_row *current_aux_row *next_main_row *next_aux_row
1✔
607
         AFTER:  _ *evaluated_constraints
1✔
608
         ```
1✔
609
         # Requirements
1✔
610

1✔
611
         In order for this method to emit Triton assembly, various memory regions need to be
1✔
612
         declared. This is done through [`DynamicTasmConstraintEvaluationMemoryLayout`]. The memory
1✔
613
         layout must be [integral].
1✔
614

1✔
615
         # Guarantees
1✔
616

1✔
617
         - The emitted code does not declare any labels.
1✔
618
         - The emitted code is “straight-line”, _i.e._, does not contain any of the instructions
1✔
619
           `call`, `return`, `recurse`, `recurse_or_return`, or `skiz`.
1✔
620
         - The emitted code does not contain instruction `halt`.
1✔
621
         - All memory write access of the emitted code is within the bounds of the memory region
1✔
622
           pointed to by `*free_memory_page`.
1✔
623
         - `*evaluated_constraints` points to an array of [`XFieldElement`][xfe]s of length
1✔
624
            [`NUM_CONSTRAINTS`][total]. Each element is the evaluation of one constraint. In
1✔
625
            particular, the disjoint sequence of slices containing
1✔
626
            [`NUM_INITIAL_CONSTRAINTS`][init], [`NUM_CONSISTENCY_CONSTRAINTS`][cons],
1✔
627
            [`NUM_TRANSITION_CONSTRAINTS`][tran], and [`NUM_TERMINAL_CONSTRAINTS`][term]
1✔
628
            (respectively and in this order) correspond to the evaluations of the initial,
1✔
629
            consistency, transition, and terminal constraints.
1✔
630

1✔
631
         [integral]: crate::memory_layout::IntegralMemoryLayout::is_integral
1✔
632
         [xfe]: twenty_first::prelude::XFieldElement
1✔
633
         [total]: crate::table::master_table::MasterAuxTable::NUM_CONSTRAINTS
1✔
634
         [init]: crate::table::master_table::MasterAuxTable::NUM_INITIAL_CONSTRAINTS
1✔
635
         [cons]: crate::table::master_table::MasterAuxTable::NUM_CONSISTENCY_CONSTRAINTS
1✔
636
         [tran]: crate::table::master_table::MasterAuxTable::NUM_TRANSITION_CONSTRAINTS
1✔
637
         [term]: crate::table::master_table::MasterAuxTable::NUM_TERMINAL_CONSTRAINTS
1✔
638
        "
1✔
639
    }
1✔
640

641
    /// Moves the dynamic arguments ({current, next} {main, aux} row pointers)
642
    /// to static addresses dedicated to them.
643
    fn write_row_pointers_to_ram(&self) -> Vec<TokenStream> {
1✔
644
        // BEFORE: _ *current_main_row *current_aux_row *next_main_row *next_aux_row
1✔
645
        // AFTER: _
1✔
646

1✔
647
        let write_pointer_to_ram = |list_id| {
4✔
648
            [
4✔
649
                push!(list_id + 0),
4✔
650
                instr!(WriteMem(NumberOfWords::N1)),
4✔
651
                instr!(Pop(NumberOfWords::N1)),
4✔
652
            ]
653
            .concat()
4✔
654
        };
4✔
655

656
        [
1✔
657
            IOList::NextAuxRow,
1✔
658
            IOList::NextMainRow,
1✔
659
            IOList::CurrAuxRow,
1✔
660
            IOList::CurrMainRow,
1✔
661
        ]
1✔
662
        .into_iter()
1✔
663
        .flat_map(write_pointer_to_ram)
1✔
664
        .collect()
1✔
665
    }
1✔
666

667
    fn tokenize_circuits<II: InputIndicator>(
8✔
668
        &mut self,
8✔
669
        constraints: &[ConstraintCircuit<II>],
8✔
670
    ) -> Vec<TokenStream> {
8✔
671
        self.scope = HashSet::new();
8✔
672
        let store_shared_nodes = self.store_all_shared_nodes(constraints);
8✔
673

8✔
674
        // to match the `RustBackend`, main constraints must be emitted first
8✔
675
        let (main_constraints, aux_constraints): (Vec<_>, Vec<_>) = constraints
8✔
676
            .iter()
8✔
677
            .partition(|constraint| constraint.evaluates_to_base_element());
8✔
678
        let sorted_constraints = main_constraints.into_iter().chain(aux_constraints);
8✔
679
        let write_to_output = sorted_constraints
8✔
680
            .map(|c| self.write_evaluated_constraint_into_output_list(c))
8✔
681
            .concat();
8✔
682

8✔
683
        [store_shared_nodes, write_to_output].concat()
8✔
684
    }
8✔
685

686
    fn store_all_shared_nodes<II: InputIndicator>(
8✔
687
        &mut self,
8✔
688
        constraints: &[ConstraintCircuit<II>],
8✔
689
    ) -> Vec<TokenStream> {
8✔
690
        let ref_counts = constraints.iter().flat_map(|c| c.all_ref_counters());
8✔
691
        let relevant_ref_counts = ref_counts.sorted().unique().filter(|&c| c > 1).rev();
8✔
692
        relevant_ref_counts
8✔
693
            .map(|count| self.store_all_shared_nodes_of_ref_count(constraints, count))
8✔
694
            .concat()
8✔
695
    }
8✔
696

697
    fn store_all_shared_nodes_of_ref_count<II: InputIndicator>(
×
698
        &mut self,
×
699
        constraints: &[ConstraintCircuit<II>],
×
700
        count: usize,
×
701
    ) -> Vec<TokenStream> {
×
702
        constraints
×
703
            .iter()
×
704
            .map(|c| self.store_single_shared_node_of_ref_count(c, count))
×
705
            .concat()
×
706
    }
×
707

708
    fn store_single_shared_node_of_ref_count<II: InputIndicator>(
×
709
        &mut self,
×
710
        constraint: &ConstraintCircuit<II>,
×
711
        ref_count: usize,
×
712
    ) -> Vec<TokenStream> {
×
713
        if self.scope.contains(&constraint.id) {
×
714
            return vec![];
×
715
        }
×
716

717
        // Nodes that are not binary operations are already in scope as inputs
718
        // or challenges, or they are constants.
719
        let CircuitExpression::BinOp(_, lhs, rhs) = &constraint.expression else {
×
720
            return vec![];
×
721
        };
722

723
        if constraint.ref_count < ref_count {
×
724
            let out_left = self.store_single_shared_node_of_ref_count(&lhs.borrow(), ref_count);
×
725
            let out_right = self.store_single_shared_node_of_ref_count(&rhs.borrow(), ref_count);
×
726
            return [out_left, out_right].concat();
×
727
        }
×
728

×
729
        assert_eq!(constraint.ref_count, ref_count);
×
730
        let evaluate = self.evaluate_single_node(constraint);
×
731
        let store = Self::store_ext_field_element(constraint.id);
×
732
        let is_new_insertion = self.scope.insert(constraint.id);
×
733
        assert!(is_new_insertion);
×
734

735
        [evaluate, store].concat()
×
736
    }
×
737

738
    fn evaluate_single_node<II: InputIndicator>(
18✔
739
        &self,
18✔
740
        constraint: &ConstraintCircuit<II>,
18✔
741
    ) -> Vec<TokenStream> {
18✔
742
        if self.scope.contains(&constraint.id) {
18✔
743
            return self.load_node(constraint);
×
744
        }
18✔
745

746
        let CircuitExpression::BinOp(binop, lhs, rhs) = &constraint.expression else {
18✔
747
            return self.load_node(constraint);
10✔
748
        };
749

750
        let tokenized_lhs = self.evaluate_single_node(&lhs.borrow());
8✔
751
        let tokenized_rhs = self.evaluate_single_node(&rhs.borrow());
8✔
752
        let tokenized_binop = match binop {
8✔
753
            BinOp::Add => instr!(XxAdd),
2✔
754
            BinOp::Mul => instr!(XxMul),
6✔
755
        };
756

757
        // Use more efficient instructions if one side is a base field element.
758
        // Applying domain-specific knowledge, `CircuitExpression::Input`s can
759
        // never be base field elements as the verifier only evaluates the
760
        // constraints on out-of-domain rows. The TASM backend is only intended
761
        // for verification.
762
        let extract_bfe_const =
8✔
763
            |circuit: &Rc<RefCell<ConstraintCircuit<II>>>| match circuit.borrow().expression {
16✔
764
                CircuitExpression::BConst(bfe) => Some(bfe),
4✔
765
                _ => None,
12✔
766
            };
16✔
767

768
        match (binop, extract_bfe_const(lhs), extract_bfe_const(rhs)) {
8✔
769
            (_, Some(_), Some(_)) => {
770
                panic!("Constant folding should have eliminated this binary operation")
×
771
            }
772
            (_, None, None) => [tokenized_lhs, tokenized_rhs, tokenized_binop].concat(),
4✔
773
            (BinOp::Add, None, Some(c)) => [tokenized_lhs, instr!(AddI(c))].concat(),
×
774
            (BinOp::Add, Some(c), None) => [tokenized_rhs, instr!(AddI(c))].concat(),
×
775
            (BinOp::Mul, None, Some(_)) => [tokenized_lhs, tokenized_rhs, instr!(XbMul)].concat(),
2✔
776
            (BinOp::Mul, Some(_), None) => [tokenized_rhs, tokenized_lhs, instr!(XbMul)].concat(),
2✔
777
        }
778
    }
18✔
779

780
    fn write_evaluated_constraint_into_output_list<II: InputIndicator>(
2✔
781
        &mut self,
2✔
782
        constraint: &ConstraintCircuit<II>,
2✔
783
    ) -> Vec<TokenStream> {
2✔
784
        let evaluated_constraint = self.evaluate_single_node(constraint);
2✔
785
        let element_index = OUT_ARRAY_OFFSET + self.elements_written;
2✔
786
        let store_element = Self::store_ext_field_element(element_index);
2✔
787
        self.elements_written += 1;
2✔
788
        [evaluated_constraint, store_element].concat()
2✔
789
    }
2✔
790

791
    fn load_node<II: InputIndicator>(&self, circuit: &ConstraintCircuit<II>) -> Vec<TokenStream> {
10✔
792
        match circuit.expression {
10✔
793
            CircuitExpression::BConst(bfe) => push!(bfe),
4✔
794
            CircuitExpression::XConst(xfe) => {
×
795
                let [c0, c1, c2] = xfe.coefficients.map(|c| push!(c));
×
796
                [c2, c1, c0].concat()
×
797
            }
798
            CircuitExpression::Input(input) => self.load_input(input),
4✔
799
            CircuitExpression::Challenge(challenge_idx) => Self::load_challenge(challenge_idx),
2✔
800
            CircuitExpression::BinOp(_, _, _) => Self::load_evaluated_bin_op(circuit.id),
×
801
        }
802
    }
10✔
803

804
    fn load_input<II: InputIndicator>(&self, input: II) -> Vec<TokenStream> {
4✔
805
        let list = match (input.is_current_row(), input.is_main_table_column()) {
4✔
806
            (true, true) => IOList::CurrMainRow,
2✔
807
            (true, false) => IOList::CurrAuxRow,
2✔
808
            (false, true) => IOList::NextMainRow,
×
809
            (false, false) => IOList::NextAuxRow,
×
810
        };
811
        if self.input_location_is_static {
4✔
812
            Self::load_ext_field_element_from_list(list, input.column())
2✔
813
        } else {
814
            Self::load_ext_field_element_from_pointed_to_list(list, input.column())
2✔
815
        }
816
    }
4✔
817

818
    fn load_challenge(challenge_idx: usize) -> Vec<TokenStream> {
2✔
819
        Self::load_ext_field_element_from_list(IOList::Challenges, challenge_idx)
2✔
820
    }
2✔
821

822
    fn load_evaluated_bin_op(node_id: usize) -> Vec<TokenStream> {
×
823
        Self::load_ext_field_element_from_list(IOList::FreeMemPage, node_id)
×
824
    }
×
825

826
    fn load_ext_field_element_from_list(list: IOList, element_index: usize) -> Vec<TokenStream> {
4✔
827
        let word_index = Self::element_index_to_word_index_for_reading(element_index);
4✔
828

4✔
829
        [
4✔
830
            push!(list + word_index),
4✔
831
            instr!(ReadMem(NumberOfWords::N3)),
4✔
832
            instr!(Pop(NumberOfWords::N1)),
4✔
833
        ]
834
        .concat()
4✔
835
    }
4✔
836

837
    fn load_ext_field_element_from_pointed_to_list(
2✔
838
        list: IOList,
2✔
839
        element_index: usize,
2✔
840
    ) -> Vec<TokenStream> {
2✔
841
        let word_index = Self::element_index_to_word_index_for_reading(element_index);
2✔
842

2✔
843
        [
2✔
844
            push!(list + 0),
2✔
845
            instr!(ReadMem(NumberOfWords::N1)),
2✔
846
            instr!(Pop(NumberOfWords::N1)),
2✔
847
            instr!(AddI(word_index)),
2✔
848
            instr!(ReadMem(NumberOfWords::N3)),
2✔
849
            instr!(Pop(NumberOfWords::N1)),
2✔
850
        ]
851
        .concat()
2✔
852
    }
2✔
853

854
    fn element_index_to_word_index_for_reading(element_index: usize) -> BFieldElement {
6✔
855
        let word_offset = element_index * EXTENSION_DEGREE;
6✔
856
        let start_to_read_offset = EXTENSION_DEGREE - 1;
6✔
857
        let word_index = word_offset + start_to_read_offset;
6✔
858
        bfe!(u64::try_from(word_index).unwrap())
6✔
859
    }
6✔
860

861
    fn store_ext_field_element(element_index: usize) -> Vec<TokenStream> {
2✔
862
        let free_mem_page = IOList::FreeMemPage;
2✔
863

2✔
864
        let word_offset = element_index * EXTENSION_DEGREE;
2✔
865
        let word_index = u64::try_from(word_offset).unwrap();
2✔
866

867
        let push_address = push!(free_mem_page + word_index);
2✔
868
        let write_mem = instr!(WriteMem(NumberOfWords::N3));
2✔
869
        let pop = instr!(Pop(NumberOfWords::N1));
2✔
870

871
        [push_address, write_mem, pop].concat()
2✔
872
    }
2✔
873

874
    fn prepare_return_values() -> Vec<TokenStream> {
2✔
875
        let free_mem_page = IOList::FreeMemPage;
2✔
876
        let out_array_offset_in_num_bfes = OUT_ARRAY_OFFSET * EXTENSION_DEGREE;
2✔
877
        let out_array_offset = u64::try_from(out_array_offset_in_num_bfes).unwrap();
2✔
878
        push!(free_mem_page + out_array_offset)
2✔
879
    }
2✔
880
}
881

882
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
883
enum IOList {
884
    FreeMemPage,
885
    CurrMainRow,
886
    CurrAuxRow,
887
    NextMainRow,
888
    NextAuxRow,
889
    Challenges,
890
}
891

892
impl ToTokens for IOList {
893
    fn to_tokens(&self, tokens: &mut TokenStream) {
14✔
894
        match self {
14✔
895
            IOList::FreeMemPage => tokens.extend(quote!(free_mem_page_ptr)),
4✔
896
            IOList::CurrMainRow => tokens.extend(quote!(curr_main_row_ptr)),
3✔
897
            IOList::CurrAuxRow => tokens.extend(quote!(curr_aux_row_ptr)),
3✔
898
            IOList::NextMainRow => tokens.extend(quote!(next_main_row_ptr)),
1✔
899
            IOList::NextAuxRow => tokens.extend(quote!(next_aux_row_ptr)),
1✔
900
            IOList::Challenges => tokens.extend(quote!(challenges_ptr)),
2✔
901
        }
902
    }
14✔
903
}
904

905
#[cfg(test)]
906
mod tests {
907
    use constraint_circuit::ConstraintCircuitBuilder;
908
    use constraint_circuit::SingleRowIndicator;
909
    use twenty_first::prelude::*;
910

911
    use super::*;
912

913
    pub(crate) fn mini_constraints() -> Constraints {
2✔
914
        let circuit_builder = ConstraintCircuitBuilder::new();
2✔
915
        let challenge = |c: usize| circuit_builder.challenge(c);
2✔
916
        let constant = |c: u32| circuit_builder.x_constant(c);
2✔
917
        let main_row = |i| circuit_builder.input(SingleRowIndicator::Main(i));
2✔
918
        let aux_row = |i| circuit_builder.input(SingleRowIndicator::Aux(i));
2✔
919

920
        let constraint = main_row(0) * challenge(3) - aux_row(1) * constant(42);
2✔
921

2✔
922
        Constraints {
2✔
923
            init: vec![constraint],
2✔
924
            cons: vec![],
2✔
925
            tran: vec![],
2✔
926
            term: vec![],
2✔
927
        }
2✔
928
    }
2✔
929

930
    pub fn print_constraints<B: Codegen>(constraints: &Constraints) {
2✔
931
        let code = B::constraint_evaluation_code(constraints);
2✔
932
        let syntax_tree = syn::parse2(code).unwrap();
2✔
933
        let code = prettyplease::unparse(&syntax_tree);
2✔
934
        println!("{code}");
2✔
935
    }
2✔
936

937
    #[test]
938
    fn tokenizing_base_field_elements_produces_expected_result() {
1✔
939
        let bfe = bfe!(42);
1✔
940
        let expected = "BFieldElement :: from_raw_u64 (180388626390u64)";
1✔
941
        assert_eq!(expected, RustBackend::tokenize_bfe(bfe).to_string());
1✔
942
    }
1✔
943

944
    #[test]
945
    fn tokenizing_extension_field_elements_produces_expected_result() {
1✔
946
        let xfe = xfe!([42, 43, 44]);
1✔
947
        let expected = "XFieldElement :: new ([\
1✔
948
            BFieldElement :: from_raw_u64 (180388626390u64) , \
1✔
949
            BFieldElement :: from_raw_u64 (184683593685u64) , \
1✔
950
            BFieldElement :: from_raw_u64 (188978560980u64)\
1✔
951
        ])";
1✔
952
        assert_eq!(expected, RustBackend::tokenize_xfe(xfe).to_string());
1✔
953
    }
1✔
954

955
    #[test]
956
    fn print_mini_constraints_rust() {
1✔
957
        print_constraints::<RustBackend>(&mini_constraints());
1✔
958
    }
1✔
959

960
    #[test]
961
    fn print_mini_constraints_tasm() {
1✔
962
        print_constraints::<TasmBackend>(&mini_constraints());
1✔
963
    }
1✔
964
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc