• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

TyRoXx / NonlocalityOS / 22052397223

16 Feb 2026 06:24AM UTC coverage: 78.335% (+0.3%) from 78.043%
22052397223

Pull #418

github

web-flow
Merge 43453a16b into 9d1f18b24
Pull Request #418: Fix: Sometimes the storage garbage collector appears to collect new trees that are still needed

679 of 767 new or added lines in 29 files covered. (88.53%)

9 existing lines in 5 files now uncovered.

7293 of 9310 relevant lines covered (78.34%)

26517.47 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

67.42
/fuzz/fuzz_functions/src/write_read_large_files.rs
1
use astraea::{
2
    in_memory_storage::InMemoryTreeStorage,
3
    storage::StoreTree,
4
    tree::{HashedTree, Tree, TreeBlob, TreeChildren, TREE_BLOB_MAX_LENGTH},
5
};
6
use dogbox_tree_editor::{OpenFileContentBuffer, OptimizedWriteBuffer};
7
use pretty_assertions::assert_eq;
8
use serde::{Deserialize, Serialize};
9
use std::{collections::BTreeSet, sync::Arc};
10
use tokio::runtime::Runtime;
11
use tracing::info;
12

13
struct BufferState {
14
    storage: Arc<InMemoryTreeStorage>,
15
    buffer: OpenFileContentBuffer,
16
}
17

18
impl BufferState {
19
    fn new(storage: Arc<InMemoryTreeStorage>, buffer: OpenFileContentBuffer) -> Self {
3✔
20
        Self { storage, buffer }
21
    }
22
}
23

24
async fn compare_buffers(buffers: &mut [BufferState]) {
8✔
25
    assert_eq!(
26
        1,
27
        std::collections::BTreeSet::from_iter(buffers.iter().map(|buffer| buffer.buffer.size()))
28
            .len()
29
    );
30
    let mut checked = 0;
8✔
31
    let expected_size = buffers[0].buffer.size();
12✔
32
    while checked < expected_size {
149✔
33
        let mut all_read_bytes = std::collections::BTreeSet::new();
290✔
34
        let position = checked;
290✔
35
        for read_result in buffers.iter_mut().map(|buffer| {
1,305✔
36
            buffer.buffer.read(
870✔
37
                position,
435✔
38
                (expected_size - position) as usize,
435✔
39
                buffer.storage.clone(),
435✔
40
            )
41
        }) {
42
            let read_bytes = read_result.await.unwrap();
1,305✔
43
            let is_expected_to_be_new = all_read_bytes.is_empty();
1,305✔
44
            if is_expected_to_be_new {
580✔
45
                checked += read_bytes.len() as u64;
145✔
46
            }
47
            let is_new = all_read_bytes.insert(read_bytes);
1,740✔
48
            assert_eq!(is_expected_to_be_new, is_new);
49
        }
50
    }
51
    assert_eq!(expected_size, checked);
52
}
53

54
#[derive(Serialize, Deserialize, Debug)]
55
enum FileOperation {
56
    Write {
57
        position: u32,
58
        data: Vec<u8>,
59
    },
60
    WriteRandomData {
61
        position: u32,
62
        size: u32,
63
    },
64
    Nothing,
65
    WriteWholeBlockOfRandomData {
66
        block_index: u16,
67
    },
68
    CopyBlock {
69
        from_block_index: u16,
70
        to_block_index: u16,
71
    },
72
    SaveToStorage,
73
}
74

75
#[derive(Serialize, Deserialize, Debug)]
76
pub struct GeneratedTest {
77
    operations: Vec<FileOperation>,
78
    write_buffer_in_blocks: u8,
79
}
80

81
async fn write_to_all_buffers(buffers: &mut [BufferState], position: u64, data: &bytes::Bytes) {
4✔
82
    for buffer in buffers {
8✔
83
        buffer
6✔
84
            .buffer
6✔
85
            .write(
86
                position,
6✔
87
                OptimizedWriteBuffer::from_bytes(position, data.clone()).await,
24✔
88
                buffer.storage.clone(),
6✔
89
            )
90
            .await
6✔
91
            .unwrap();
92
    }
93
}
94

NEW
95
async fn read_from_all_buffers(
×
96
    buffers: &mut [BufferState],
97
    position: u64,
98
    count: usize,
99
) -> Option<bytes::Bytes> {
NEW
100
    let mut all_data_read = BTreeSet::new();
×
NEW
101
    for buffer in buffers {
×
NEW
102
        let data_read = buffer
×
NEW
103
            .buffer
×
NEW
104
            .read(position, count, buffer.storage.clone())
×
NEW
105
            .await
×
106
            .unwrap();
107
        assert!(data_read.len() <= count);
NEW
108
        all_data_read.insert(data_read);
×
109
    }
110
    assert_eq!(1, all_data_read.len());
NEW
111
    let read = all_data_read.into_iter().next().unwrap();
×
NEW
112
    if read.len() == count {
×
NEW
113
        Some(read)
×
114
    } else {
NEW
115
        None
×
116
    }
117
}
118

119
async fn save_all_buffers(buffers: &mut [BufferState]) {
2✔
120
    let mut status = BTreeSet::new();
2✔
121
    for buffer in buffers {
4✔
122
        buffer
6✔
123
            .buffer
6✔
124
            .store_all(buffer.storage.clone())
6✔
125
            .await
3✔
126
            .unwrap();
127
        status.insert(buffer.buffer.last_known_digest());
12✔
128
    }
129
    assert_eq!(1, status.len());
130
}
131

132
fn run_generated_test(test: GeneratedTest) -> bool {
1✔
133
    let runtime = Runtime::new().unwrap();
3✔
134
    runtime.block_on(async move {
3✔
135
        let max_tested_file_size = TREE_BLOB_MAX_LENGTH * 128;
2✔
136
        use rand::rngs::SmallRng;
137
        use rand::Rng;
138
        use rand::SeedableRng;
139
        let mut small_rng = SmallRng::seed_from_u64(12345);
2✔
140

141
        let initial_content: Vec<u8> = Vec::new();
3✔
142
        let last_known_digest_file_size = initial_content.len();
3✔
143
        let mut buffers = Vec::new();
2✔
144
        for _ in 0..3 {
1✔
145
            let storage = Arc::new(InMemoryTreeStorage::empty());
9✔
146
            let last_known_reference = storage
9✔
147
                .store_tree(&HashedTree::from(Arc::new(Tree::new(
15✔
148
                    TreeBlob::empty(),
3✔
149
                    TreeChildren::empty(),
3✔
150
                ))))
151
                .await
3✔
152
                .unwrap();
3✔
153
            buffers.push(BufferState::new(
9✔
154
                storage,
3✔
155
                OpenFileContentBuffer::from_data(
3✔
156
                    initial_content.clone(),
6✔
157
                    last_known_reference,
3✔
158
                    last_known_digest_file_size as u64,
3✔
159
                    test.write_buffer_in_blocks as usize,
3✔
160
                )
161
                .unwrap(),
3✔
162
            ));
163
        }
164

165
        for operation in test.operations {
4✔
166
            // buffers[2] is recreated from storage before every operation.
167
            {
168
                let storage = buffers[2].storage.clone();
9✔
169
                buffers[2].buffer.store_all(storage).await.unwrap();
12✔
170
                let (_digest, size, reference) = buffers[2].buffer.last_known_digest();
12✔
171
                buffers[2].buffer = OpenFileContentBuffer::from_storage(
6✔
172
                    reference,
3✔
173
                    size,
3✔
174
                    test.write_buffer_in_blocks as usize,
3✔
175
                );
176
            }
177

178
            info!("{:?}", &operation);
6✔
179
            match &operation {
3✔
NEW
180
                FileOperation::Write { position, data } => {
×
NEW
181
                    if (*position as usize + data.len()) > max_tested_file_size {
×
NEW
182
                        return false;
×
183
                    }
NEW
184
                    let data = bytes::Bytes::copy_from_slice(&data[..]);
×
NEW
185
                    let position = *position as u64;
×
NEW
186
                    write_to_all_buffers(&mut buffers, position, &data).await;
×
187
                }
NEW
188
                FileOperation::WriteRandomData { position, size } => {
×
NEW
189
                    if (*position as usize + *size as usize) > max_tested_file_size {
×
NEW
190
                        return false;
×
191
                    }
NEW
192
                    let data = bytes::Bytes::from_iter((0..*size).map(|_| small_rng.gen()));
×
NEW
193
                    let position = *position as u64;
×
NEW
194
                    write_to_all_buffers(&mut buffers, position, &data).await;
×
195
                }
196
                FileOperation::Nothing => {}
1✔
197
                FileOperation::WriteWholeBlockOfRandomData { block_index } => {
2✔
198
                    if ((*block_index as u64 + 1) * TREE_BLOB_MAX_LENGTH as u64)
2✔
199
                        > max_tested_file_size as u64
2✔
200
                    {
NEW
201
                        return false;
×
202
                    }
203
                    let data =
2✔
204
                        bytes::Bytes::from_iter((0..TREE_BLOB_MAX_LENGTH).map(|_| small_rng.gen()));
256,006✔
205
                    let position = *block_index as u64 * TREE_BLOB_MAX_LENGTH as u64;
4✔
206
                    write_to_all_buffers(&mut buffers, position, &data).await;
8✔
207
                }
208
                FileOperation::CopyBlock {
NEW
209
                    from_block_index,
×
NEW
210
                    to_block_index,
×
211
                } => {
NEW
212
                    if ((*from_block_index as u64 + 1) * TREE_BLOB_MAX_LENGTH as u64)
×
NEW
213
                        > max_tested_file_size as u64
×
214
                    {
NEW
215
                        return false;
×
216
                    }
NEW
217
                    if ((*to_block_index as u64 + 1) * TREE_BLOB_MAX_LENGTH as u64)
×
NEW
218
                        > max_tested_file_size as u64
×
219
                    {
NEW
220
                        return false;
×
221
                    }
NEW
222
                    let read_position = *from_block_index as u64 * TREE_BLOB_MAX_LENGTH as u64;
×
NEW
223
                    let maybe_data =
×
NEW
224
                        read_from_all_buffers(&mut buffers, read_position, TREE_BLOB_MAX_LENGTH)
×
NEW
225
                            .await;
×
NEW
226
                    if let Some(data) = maybe_data {
×
NEW
227
                        let write_position = *to_block_index as u64 * TREE_BLOB_MAX_LENGTH as u64;
×
NEW
228
                        write_to_all_buffers(&mut buffers, write_position, &data).await;
×
229
                    }
230
                }
231
                FileOperation::SaveToStorage => {
NEW
232
                    save_all_buffers(&mut buffers).await;
×
233
                }
234
            }
235

236
            // nothing special happens with buffers[0].
237

238
            // buffers[1] is forced into the storage after every operation.
239
            {
240
                let storage = buffers[1].storage.clone();
9✔
241
                buffers[1].buffer.store_all(storage).await.unwrap();
12✔
242
            }
243

244
            compare_buffers(&mut buffers).await;
6✔
245
        }
246

247
        save_all_buffers(&mut buffers).await;
2✔
248
        compare_buffers(&mut buffers).await;
2✔
249
        true
1✔
250
    })
251
}
252

253
pub fn fuzz_function(data: &[u8]) -> bool {
1✔
254
    let generated_test = match postcard::take_from_bytes(data) {
2✔
255
        Ok((parsed, rest)) => {
2✔
256
            if rest.is_empty() {
2✔
257
                parsed
1✔
258
            } else {
NEW
259
                return false;
×
260
            }
261
        }
NEW
262
        Err(_) => return false,
×
263
    };
264
    info!("{:?}", &generated_test);
2✔
265
    run_generated_test(generated_test)
2✔
266
}
267

268
#[test]
269
fn crash_0() {
1✔
270
    assert!(fuzz_function(&[3, 2, 3, 16, 3, 63, 7]));
271
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc