• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

vcfxb / wright-lang / 14051529229

25 Mar 2025 04:13AM UTC coverage: 75.076% (+20.9%) from 54.177%
14051529229

push

github

vcfxb
coveralls gh action needed a version

994 of 1324 relevant lines covered (75.08%)

29.77 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

67.53
/wright/src/parser.rs
1
//! This parser module is responsible for turning the stream of [Token]s from the [Lexer] into a tree of [AST] nodes.
2
//!
3
//! [AST]: crate::ast
4
//! [Token]: crate::lexer::token::Token
5

6
use error::{ParserError, ParserErrorKind};
7

8
use super::lexer::Lexer;
9
use crate::{
10
    lexer::token::{Token, TokenTy},
11
    source_tracking::fragment::Fragment,
12
};
13
use std::collections::VecDeque;
14

15
mod decl;
16
pub mod error;
17
mod identifier;
18
mod literal;
19
mod path;
20
pub mod whitespace;
21

22
/// The [Parser] struct wraps a [Lexer] and adds lookahead and functions that are useful for parsing.
23
#[derive(Debug)]
24
pub struct Parser {
25
    lexer: Lexer,
26
    lookahead: VecDeque<Token>,
27
}
28

29
impl Parser {
30
    /// Construct a new parser around a given [Lexer].
31
    pub fn new(lexer: Lexer) -> Self {
22✔
32
        Parser {
22✔
33
            lexer,
22✔
34
            lookahead: VecDeque::new(),
22✔
35
        }
22✔
36
    }
22✔
37

38
    /// Get the next [Token] from this [Parser]. This may be a token that's already been peeked.
39
    /// 
40
    /// Skips any non-document comments encountered via the lexer implementation.
41
    /// 
42
    /// Return an error if a [Token] with [TokenTy::Unknown] is encountered.
43
    pub fn next_token(&mut self) -> Result<Option<Token>, ParserError> {
×
44
        let token = self
×
45
            .lookahead
×
46
            .pop_front()
×
47
            .or_else(|| self.lexer.next_token());
×
48

49
        // Check for unknown tokens, which should always convert to an error.
50
        match token {
×
51
            Some(Token {
52
                variant: TokenTy::Unknown,
53
                fragment,
×
54
            }) => Err(ParserErrorKind::EncounteredUnknownToken.at(fragment)),
×
55
            known_token_or_none => Ok(known_token_or_none),
×
56
        }
57
    }
×
58

59
    /// Advance this [Parser] by `n` [Token]s. If this [Parser] runs out of [Token]s, panic.
60
    ///
61
    /// Panics
62
    /// - If `n` is greater than the number of remaining tokens.
63
    pub fn advance(&mut self, n: usize) {
12✔
64
        // Add tokens to the lookahead buffer until we have enough to split off.
65
        while self.lookahead.len() < n {
12✔
66
            let token = self
×
67
                .lexer
×
68
                .next_token()
×
69
                .expect("advance: `n` <= number of remaining tokens");
×
70

×
71
            self.lookahead.push_back(token);
×
72
        }
×
73

74
        // Split them off.
75
        self.lookahead = self.lookahead.split_off(n);
12✔
76
    }
12✔
77

78
    /// Peek at the next token from the [Lexer] (cached in the lookahead queue if peeked before).
79
    pub fn peek(&mut self) -> Option<&Token> {
73✔
80
        if self.lookahead.is_empty() {
73✔
81
            self.lookahead.push_back(self.lexer.next_token()?);
34✔
82
        }
39✔
83

84
        self.lookahead.front()
71✔
85
    }
73✔
86

87
    /// Peek the [Fragment] of the next [Token].
88
    pub fn peek_fragment(&mut self) -> Option<&Fragment> {
×
89
        self.peek().map(|token| &token.fragment)
×
90
    }
×
91

92
    /// Peek the [Fragment] of the next [Token] and clone it or return a clone of the
93
    /// remainder [Fragment] of the internal [Lexer]
94
    /// (which will be empty, since there wasn't a [Token] to peek).
95
    ///
96
    /// This is likely only useful for error reporting -- a clone of a potentially empty fragment is
97
    /// rarely ever useful otherwise.
98
    pub fn peek_fragment_or_rest_cloned(&mut self) -> Fragment {
14✔
99
        match self.peek() {
14✔
100
            Some(Token { fragment, .. }) => fragment.clone(),
13✔
101
            None => {
102
                let rest = self.lexer.remaining.clone();
1✔
103

1✔
104
                // Assert that we're making the right assumptions about the remaining fragment.
1✔
105
                // These are (unidiomatically) done using debug_assert -- perhaps that changes eventually
1✔
106
                // however it should be fine for now, since this can only produce logic bugs (never memory or
1✔
107
                // concurrency bugs).
1✔
108
                debug_assert!(rest.is_valid());
1✔
109
                debug_assert!(rest.is_empty());
1✔
110
                debug_assert!(rest.is_empty_at_end_of_source());
1✔
111

112
                rest
1✔
113
            }
114
        }
115
    }
14✔
116

117
    /// Get the [Lexer] that's wrapped.
118
    pub fn lexer(&self) -> &Lexer {
1✔
119
        &self.lexer
1✔
120
    }
1✔
121

122
    /// Lookahead `k` [Token]s.
123
    ///
124
    /// If `k == 0` then this is effectively peeking at the next [Token] from the wrapped [Lexer].
125
    pub fn lookahead(&mut self, k: usize) -> Option<&Token> {
×
126
        while self.lookahead.len() <= k {
×
127
            self.lookahead.push_back(self.lexer.next_token()?);
×
128
        }
129

130
        self.lookahead.get(k)
×
131
    }
×
132

133
    /// Similar to [Parser::lookahead] but instead returns a slice of `n` [Token]s, starting with the next [Token].
134
    ///
135
    /// Returns [None] if `n` is greater than the number of remaining [Token]s for this [Parser].
136
    pub fn lookahead_window(&mut self, n: usize) -> Option<&[Token]> {
59✔
137
        while self.lookahead.len() < n {
92✔
138
            self.lookahead.push_back(self.lexer.next_token()?);
63✔
139
        }
140

141
        // Use make contiguous here to get a unified/single slice.
142
        Some(&self.lookahead.make_contiguous()[..n])
29✔
143
    }
59✔
144

145
    /// Get the next [Token] from this parser if its [Token::variant] is the given `token_ty`.
146
    pub fn next_if_is(&mut self, token_ty: TokenTy) -> Option<Token> {
47✔
147
        // Peeking successfully first means that the lookahead vec will never be empty here.
47✔
148
        (self.peek()?.variant == token_ty)
47✔
149
            // SAFETY: We just peeked a token to check its variant so this unwrap is always ok.
46✔
150
            .then(|| unsafe { self.lookahead.pop_front().unwrap_unchecked() })
46✔
151
    }
47✔
152

153
    /// Peek at the next [Token]s of this [Parser] and determine if the [Token::variant]s match this
154
    /// sequence of [TokenTy]s.
155
    pub fn matches(&mut self, seq: &[TokenTy]) -> bool {
59✔
156
        // Use the rare let-else to ensure there are at minimum, the given number of tokens remaining.
157
        let Some(lookahead_window) = self.lookahead_window(seq.len()) else {
59✔
158
            return false;
30✔
159
        };
160

161
        // Use a zipped iterator to compare all the token variants.
162
        lookahead_window
29✔
163
            .iter()
29✔
164
            .zip(seq)
29✔
165
            .all(|(token, matches)| token.variant == *matches)
53✔
166
    }
59✔
167
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc