• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

mattwparas / steel / 18461079395

13 Oct 2025 09:20AM UTC coverage: 42.731% (-0.9%) from 43.668%
18461079395

Pull #536

github

web-flow
Merge 6f55a8b56 into e378cba22
Pull Request #536: Initial proposal for no_std support

63 of 755 new or added lines in 38 files covered. (8.34%)

73 existing lines in 15 files now uncovered.

12324 of 28841 relevant lines covered (42.73%)

3215759.81 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

12.5
/crates/steel-core/tests/no_std_suite.rs
1
#![cfg_attr(all(not(feature = "std"), test), no_std)]
2
#![cfg_attr(all(not(feature = "std"), test), no_main)]
3

4
#[cfg(all(not(feature = "std"), test))]
5
extern crate alloc;
6
#[cfg(all(not(feature = "std"), test))]
7
use alloc::vec;
8
#[cfg(all(not(feature = "std"), test))]
9
use core::ptr::null;
10

11
#[cfg(all(not(feature = "std"), test))]
12
use steel::core::instructions::{disassemble, u24, Instruction};
13
#[cfg(all(not(feature = "std"), test))]
14
use steel::core::labels::fresh as fresh_label;
15
#[cfg(all(not(feature = "std"), test))]
16
use steel::core::opcode::OpCode;
17

18
#[cfg(all(not(feature = "std"), target_arch = "wasm32", test))]
19
#[allow(static_mut_refs)]
20
mod alloc_support {
21
    use core::alloc::{GlobalAlloc, Layout};
22
    use core::sync::atomic::{AtomicUsize, Ordering};
23

24
    pub struct BumpAllocator;
25

26
    const HEAP_SIZE: usize = 1024 * 1024; // 1 MiB scratch space for tests.
27
    static mut HEAP: [u8; HEAP_SIZE] = [0; HEAP_SIZE];
28
    static NEXT: AtomicUsize = AtomicUsize::new(0);
29

30
    unsafe impl GlobalAlloc for BumpAllocator {
31
        unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
32
            let align_mask = layout.align().saturating_sub(1);
33
            let size = layout.size();
34
            let mut current = NEXT.load(Ordering::Relaxed);
35

36
            loop {
37
                let aligned = (current + align_mask) & !align_mask;
38
                let new_next = match aligned.checked_add(size) {
39
                    Some(value) => value,
40
                    None => return core::ptr::null_mut(),
41
                };
42

43
                if new_next > HEAP_SIZE {
44
                    return core::ptr::null_mut();
45
                }
46

47
                match NEXT.compare_exchange(current, new_next, Ordering::SeqCst, Ordering::Relaxed)
48
                {
49
                    Ok(_) => unsafe { return HEAP.as_mut_ptr().add(aligned) },
50
                    Err(previous) => current = previous,
51
                }
52
            }
53
        }
54

55
        unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
56
            // bump allocator: leak allocations for the duration of the test run
57
        }
58
    }
59

60
    #[global_allocator]
61
    pub static ALLOCATOR: BumpAllocator = BumpAllocator;
62
}
63

64
#[cfg(all(not(feature = "std"), not(target_arch = "wasm32"), test))]
65
#[allow(static_mut_refs)]
66
mod alloc_support {
67
    use core::alloc::{GlobalAlloc, Layout};
68
    use core::sync::atomic::{AtomicUsize, Ordering};
69

70
    pub struct BumpAllocator;
71

72
    const HEAP_SIZE: usize = 1024 * 1024; // 1 MiB scratch space for tests.
73
    static mut HEAP: [u8; HEAP_SIZE] = [0; HEAP_SIZE];
74
    static NEXT: AtomicUsize = AtomicUsize::new(0);
75

76
    unsafe impl GlobalAlloc for BumpAllocator {
77
        unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
78
            let align_mask = layout.align().saturating_sub(1);
79
            let size = layout.size();
80
            let mut current = NEXT.load(Ordering::Relaxed);
81

82
            loop {
83
                let aligned = (current + align_mask) & !align_mask;
84
                let new_next = match aligned.checked_add(size) {
85
                    Some(value) => value,
86
                    None => return core::ptr::null_mut(),
87
                };
88

89
                if new_next > HEAP_SIZE {
90
                    return core::ptr::null_mut();
91
                }
92

93
                match NEXT.compare_exchange(current, new_next, Ordering::SeqCst, Ordering::Relaxed)
94
                {
95
                    Ok(_) => unsafe { return HEAP.as_mut_ptr().add(aligned) },
96
                    Err(previous) => current = previous,
97
                }
98
            }
99
        }
100

101
        unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {
102
            // bump allocator: leak allocations for the duration of the test build
103
        }
104
    }
105

106
    #[global_allocator]
107
    pub static ALLOCATOR: BumpAllocator = BumpAllocator;
108
}
109

110
/// Entry point invoked by a custom wasm runner.
111
/// Runs all no_std tests in sequence; on panic (trap), the process aborts.
112
#[cfg(all(not(feature = "std"), test))]
113
#[no_mangle]
114
pub extern "C" fn run() -> i32 {
115
    for &(_, f) in named() {
116
        f();
117
    }
118
    0
119
}
120

121
#[cfg(all(not(feature = "std"), test))]
122
#[panic_handler]
123
fn panic(_info: &core::panic::PanicInfo<'_>) -> ! {
124
    // Propagate panic as a trap so the runner sees a non-zero exit.
125
    #[cfg(target_arch = "wasm32")]
126
    core::arch::wasm32::unreachable();
127

128
    #[cfg(not(target_arch = "wasm32"))]
129
    loop {}
130
}
131

132
#[cfg(all(not(feature = "std"), test))]
133
fn u24_roundtrip() {
134
    let values = [0u32, 1, 255, 256, 65_535, 1 << 20, (1 << 24) - 1];
135
    for &n in &values {
136
        let x = u24::from_u32(n);
137
        assert_eq!(x.to_u32(), n);
138
    }
139
}
140

141
#[cfg(all(not(feature = "std"), test))]
142
fn disassemble_contains_opcode() {
143
    let instrs = vec![Instruction::new_from_parts(OpCode::ADD, u24::from_u32(2), None)];
144
    let text = disassemble(&instrs);
145
    assert!(text.contains("ADD"));
146
    assert!(text.contains("2"));
147
}
148

149
#[cfg(all(not(feature = "std"), test))]
150
fn labels_are_unique_and_increasing() {
151
    let a = fresh_label();
152
    let b = fresh_label();
153
    assert_ne!(a, b);
154
}
155

156
// ---- Named test API for per-test execution ----
157

158
#[cfg(all(not(feature = "std"), test))]
159
type TestFn = fn();
160

161
#[inline(always)]
162
#[cfg(all(not(feature = "std"), test))]
NEW
163
fn named() -> &'static [(&'static str, TestFn)] {
×
NEW
164
    &[
×
NEW
165
        ("u24_roundtrip", u24_roundtrip as TestFn),
×
166
        (
NEW
167
            "disassemble_contains_opcode",
×
NEW
168
            disassemble_contains_opcode as TestFn,
×
169
        ),
170
        (
NEW
171
            "labels_are_unique_and_increasing",
×
NEW
172
            labels_are_unique_and_increasing as TestFn,
×
173
        ),
174
    ]
175
}
176

177
#[cfg(all(not(feature = "std"), test))]
178
#[no_mangle]
179
pub extern "C" fn test_count() -> i32 {
180
    named().len() as i32
181
}
182

183
#[cfg(all(not(feature = "std"), test))]
184
#[no_mangle]
185
pub extern "C" fn test_name_ptr(i: i32) -> *const u8 {
186
    let i = i as usize;
187
    let table = named();
188
    if i >= table.len() { return null(); }
189
    table[i].0.as_ptr()
190
}
191

192
#[cfg(all(not(feature = "std"), test))]
193
#[no_mangle]
194
pub extern "C" fn test_name_len(i: i32) -> i32 {
195
    let i = i as usize;
196
    let table = named();
197
    if i >= table.len() { return 0; }
198
    table[i].0.len() as i32
199
}
200

201
#[cfg(all(not(feature = "std"), test))]
202
#[no_mangle]
203
pub extern "C" fn test_run_index(i: i32) -> i32 {
204
    let i = i as usize;
205
    let table = named();
206
    let f = table[i].1;
207
    f();
208
    0
209
}
210

211
// For host builds (with `std`), provide a dummy main so this
212
// harness=false test compiles and is ignored by nextest.
213
#[cfg(not(all(not(feature = "std"), test)))]
214
fn main() {}
2✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc