• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

google / alioth / 17185293572

24 Aug 2025 06:28AM UTC coverage: 14.438% (+0.6%) from 13.887%
17185293572

Pull #277

github

web-flow
Merge bd92def9a into 861f19073
Pull Request #277: feat: Unix domain socket based vsock device

110 of 161 new or added lines in 11 files covered. (68.32%)

3 existing lines in 3 files now uncovered.

1019 of 7058 relevant lines covered (14.44%)

18.24 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

74.56
/alioth/src/virtio/queue/split.rs
1
// Copyright 2024 Google LLC
2
//
3
// Licensed under the Apache License, Version 2.0 (the "License");
4
// you may not use this file except in compliance with the License.
5
// You may obtain a copy of the License at
6
//
7
//     https://www.apache.org/licenses/LICENSE-2.0
8
//
9
// Unless required by applicable law or agreed to in writing, software
10
// distributed under the License is distributed on an "AS IS" BASIS,
11
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
// See the License for the specific language governing permissions and
13
// limitations under the License.
14

15
#[path = "split_test.rs"]
16
#[cfg(test)]
17
mod tests;
18

19
use std::marker::PhantomData;
20
use std::mem::size_of;
21
use std::sync::atomic::{Ordering, fence};
22

23
use alioth_macros::Layout;
24
use bitflags::bitflags;
25
use zerocopy::{FromBytes, Immutable, IntoBytes};
26

27
use crate::mem::mapped::Ram;
28
use crate::virtio::queue::{DescChain, DescFlag, QueueReg, VirtQueue};
29
use crate::virtio::{Result, error};
30

31
#[repr(C, align(16))]
32
#[derive(Debug, Clone, Default, FromBytes, Immutable, IntoBytes)]
33
pub struct Desc {
34
    pub addr: u64,
35
    pub len: u32,
36
    pub flag: u16,
37
    pub next: u16,
38
}
39

40
bitflags! {
41
    #[derive(Default, Debug, Clone, Copy, PartialEq, Eq, Hash)]
42
    pub struct AvailFlag: u16 {
43
        const NO_INTERRUPT = 1;
44
    }
45
}
46

47
#[repr(C, align(2))]
48
#[derive(Debug, Clone, Layout, Immutable, FromBytes, IntoBytes)]
49
pub struct AvailHeader {
50
    flags: u16,
51
    idx: u16,
52
}
53

54
bitflags! {
55
    #[derive(Default, Debug, Clone, Copy, PartialEq, Eq, Hash)]
56
    pub struct UsedFlag: u16 {
57
        const NO_NOTIFY = 1;
58
    }
59
}
60

61
#[repr(C, align(4))]
62
#[derive(Debug, Clone, Layout)]
63
pub struct UsedHeader {
64
    flags: u16,
65
    idx: u16,
66
}
67

68
#[repr(C)]
69
#[derive(Debug, Clone, Default)]
70
pub struct UsedElem {
71
    id: u32,
72
    len: u32,
73
}
74

75
#[derive(Debug)]
76
pub struct SplitQueue<'m> {
77
    size: u16,
78
    avail_hdr: *mut AvailHeader,
79
    avail_ring: *mut u16,
80
    used_event: Option<*mut u16>,
81
    used_hdr: *mut UsedHeader,
82
    used_ring: *mut UsedElem,
83
    avail_event: Option<*mut u16>,
84
    desc: *mut Desc,
85
    _phantom: PhantomData<&'m ()>,
86
}
87

88
impl SplitQueue<'_> {
89
    pub fn avail_index(&self) -> u16 {
147✔
90
        unsafe { &*self.avail_hdr }.idx
147✔
91
    }
92

93
    pub fn set_used_index(&self, val: u16) {
25✔
94
        unsafe { &mut *self.used_hdr }.idx = val;
25✔
95
    }
96

97
    pub fn used_event(&self) -> Option<u16> {
23✔
98
        self.used_event.map(|event| unsafe { *event })
51✔
99
    }
100

101
    pub fn set_avail_event(&self, op: impl FnOnce(&mut u16)) -> bool {
56✔
102
        match self.avail_event {
56✔
103
            Some(avail_event) => {
3✔
104
                op(unsafe { &mut *avail_event });
4✔
105
                true
3✔
106
            }
107
            None => false,
53✔
108
        }
109
    }
110

111
    pub fn set_flag_notification(&self, enabled: bool) {
53✔
112
        unsafe { &mut *self.used_hdr }.flags = (!enabled) as _;
105✔
113
    }
114

115
    pub fn flag_interrupt_enabled(&self) -> bool {
21✔
116
        unsafe { &*self.avail_hdr }.flags == 0
22✔
117
    }
118

119
    fn get_desc(&self, id: u16) -> Result<&Desc> {
45✔
120
        if id < self.size {
46✔
121
            Ok(unsafe { &*self.desc.offset(id as isize) })
91✔
122
        } else {
123
            error::InvalidDescriptor { id }.fail()
124
        }
125
    }
126
}
127

128
impl<'m> SplitQueue<'m> {
129
    pub fn new(reg: &QueueReg, ram: &'m Ram, event_idx: bool) -> Result<Option<SplitQueue<'m>>> {
25✔
130
        if !reg.enabled.load(Ordering::Acquire) {
49✔
131
            return Ok(None);
3✔
132
        }
133
        let size = reg.size.load(Ordering::Acquire) as u64;
67✔
134
        let mut avail_event = None;
45✔
135
        let mut used_event = None;
45✔
136
        let used = reg.device.load(Ordering::Acquire);
89✔
137
        let avail = reg.driver.load(Ordering::Acquire);
89✔
138
        if event_idx {
24✔
139
            let avail_event_gpa =
3✔
140
                used + size_of::<UsedHeader>() as u64 + size * size_of::<UsedElem>() as u64;
4✔
141
            avail_event = Some(ram.get_ptr(avail_event_gpa)?);
7✔
142
            let used_event_gpa =
3✔
143
                avail + size_of::<AvailHeader>() as u64 + size * size_of::<u16>() as u64;
×
144
            used_event = Some(ram.get_ptr(used_event_gpa)?);
1✔
145
        }
146
        let used_hdr = ram.get_ptr::<UsedHeader>(used)?;
45✔
147
        let avail_ring_gpa = avail + size_of::<AvailHeader>() as u64;
1✔
148
        let used_ring_gpa = used + size_of::<UsedHeader>() as u64;
2✔
149
        let desc = reg.desc.load(Ordering::Acquire);
1✔
150
        Ok(Some(SplitQueue {
1✔
151
            size: size as u16,
×
152
            avail_hdr: ram.get_ptr(avail)?,
1✔
153
            avail_ring: ram.get_ptr(avail_ring_gpa)?,
23✔
154
            used_event,
23✔
155
            used_hdr,
×
156
            used_ring: ram.get_ptr(used_ring_gpa)?,
1✔
157
            avail_event,
23✔
158
            desc: ram.get_ptr(desc)?,
1✔
159
            _phantom: PhantomData,
22✔
160
        }))
161
    }
162
}
163

164
impl<'m> VirtQueue<'m> for SplitQueue<'m> {
165
    type Index = u16;
166

167
    const INIT_INDEX: u16 = 0;
168

169
    fn desc_avail(&self, index: u16) -> bool {
117✔
170
        let avail_index = self.avail_index();
349✔
171
        index < avail_index || index - avail_index >= !(self.size - 1)
165✔
172
    }
173

174
    fn get_avail(&self, index: Self::Index, ram: &'m Ram) -> Result<Option<DescChain<'m>>> {
55✔
175
        if !self.desc_avail(index) {
109✔
176
            return Ok(None);
20✔
177
        }
178
        let mut readable = Vec::new();
73✔
179
        let mut writable = Vec::new();
73✔
180
        let wrapped_index = index & (self.size - 1);
73✔
181
        let head_id = unsafe { *self.avail_ring.offset(wrapped_index as isize) };
110✔
182
        let mut id = head_id;
73✔
NEW
183
        loop {
2✔
184
            let desc = self.get_desc(id)?;
177✔
NEW
185
            let flag = DescFlag::from_bits_retain(desc.flag);
1✔
NEW
186
            if flag.contains(DescFlag::INDIRECT) {
1✔
NEW
187
                let mut id = 0;
×
NEW
188
                loop {
×
NEW
189
                    let addr = desc.addr + id as u64 * size_of::<Desc>() as u64;
×
NEW
190
                    let desc: Desc = ram.read_t(addr)?;
×
NEW
191
                    let flag = DescFlag::from_bits_retain(desc.flag);
×
NEW
192
                    assert!(!flag.contains(DescFlag::INDIRECT));
×
NEW
193
                    if flag.contains(DescFlag::WRITE) {
×
NEW
194
                        writable.push((desc.addr, desc.len as u64));
×
195
                    } else {
NEW
196
                        readable.push((desc.addr, desc.len as u64));
×
197
                    }
NEW
198
                    if flag.contains(DescFlag::NEXT) {
×
NEW
199
                        id = desc.next;
×
200
                    } else {
NEW
201
                        break;
×
202
                    }
203
                }
204
            } else if flag.contains(DescFlag::WRITE) {
66✔
205
                writable.push((desc.addr, desc.len as u64));
62✔
206
            } else {
207
                readable.push((desc.addr, desc.len as u64));
26✔
208
            }
209
            if flag.contains(DescFlag::NEXT) {
54✔
210
                id = desc.next;
9✔
211
            } else {
212
                break;
36✔
213
            }
214
        }
215
        let readable = ram.translate_iov(&readable)?;
38✔
216
        let writable = ram.translate_iov_mut(&writable)?;
38✔
217
        Ok(Some(DescChain {
1✔
NEW
218
            id: head_id,
×
NEW
219
            delta: 1,
×
220
            readable,
1✔
221
            writable,
×
222
        }))
223
    }
224

225
    fn set_used(&self, index: Self::Index, id: u16, len: u32) {
25✔
226
        let used_elem = UsedElem { id: id as u32, len };
49✔
227
        log::info!("used_elem: {used_elem:x?}");
26✔
228
        let wrapped_index = index & (self.size - 1);
50✔
229
        unsafe { *self.used_ring.offset(wrapped_index as isize) = used_elem };
75✔
230
        fence(Ordering::SeqCst);
49✔
231
        self.set_used_index(index.wrapping_add(1));
97✔
232
    }
233

234
    fn enable_notification(&self, enabled: bool) {
53✔
235
        if !self.set_avail_event(|event| {
105✔
236
            let mut avail_index = self.avail_index();
×
237
            if enabled {
×
238
                loop {
×
239
                    *event = avail_index;
×
240
                    fence(Ordering::SeqCst);
×
241
                    let new_avail_index = self.avail_index();
×
242
                    if new_avail_index == avail_index {
×
243
                        break;
×
244
                    } else {
245
                        avail_index = new_avail_index;
×
246
                    }
247
                }
248
            } else {
249
                *event = avail_index.wrapping_sub(1);
×
250
            }
251
        }) {
252
            self.set_flag_notification(enabled);
53✔
253
        }
254
    }
255

256
    fn interrupt_enabled(&self, index: Self::Index, _: u16) -> bool {
21✔
257
        match self.used_event() {
21✔
NEW
258
            Some(used_event) => used_event == index.wrapping_sub(1),
×
259
            None => self.flag_interrupt_enabled(),
21✔
260
        }
261
    }
262

263
    fn index_add(&self, index: Self::Index, _: u16) -> Self::Index {
77✔
264
        index.wrapping_add(1)
153✔
265
    }
266
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc