• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

SPF-OST / trnsys-dck-parser / 8924936794

02 May 2024 01:45PM UTC coverage: 0.0%. First build
8924936794

Pull #1

github

web-flow
Merge 16fd89c64 into add13752b
Pull Request #1: Expressions

0 of 401 new or added lines in 9 files covered. (0.0%)

0 of 401 relevant lines covered (0.0%)

0.0 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/src/trnsys_dck_parser/parse/common.py
NEW
1
import abc as _abc
×
NEW
2
import dataclasses as _dc
×
NEW
3
import re as _re
×
NEW
4
import typing as _tp
×
5

6

NEW
7
@_dc.dataclass
×
NEW
8
class ParseError:
×
NEW
9
    error_message: str
×
NEW
10
    input_string: str
×
NEW
11
    error_start: int
×
12

NEW
13
    @property
×
NEW
14
    def error_string(self) -> str:
×
NEW
15
        return self.input_string[self.error_start:]
×
16

NEW
17
    def __repr__(self) -> str:
×
NEW
18
        return f"Parse error: {self.error_message}: {self.error_string[:10]}"
×
19

20

NEW
21
_T_co = _tp.TypeVar("_T_co", covariant=True)
×
NEW
22
_S_co = _tp.TypeVar("_S_co", covariant=True)
×
23

24

NEW
25
@_dc.dataclass
×
NEW
26
class ParseSuccess(_tp.Generic[_T_co]):
×
NEW
27
    value: _T_co
×
NEW
28
    remaining_string_input_start_index: int
×
29

30

NEW
31
ParseResult = ParseSuccess[_T_co] | ParseError
×
32

33

NEW
34
def is_success(result: ParseResult) -> _tp.TypeGuard[ParseSuccess]:
×
NEW
35
    return isinstance(result, ParseSuccess)
×
36

37

NEW
38
def is_error(result: ParseResult) -> _tp.TypeGuard[ParseError]:
×
NEW
39
    return not is_success(result)
×
40

41

NEW
42
def success(result: ParseResult[_T_co]) -> ParseSuccess[_T_co]:
×
NEW
43
    if is_error(result):
×
NEW
44
        raise ValueError(f"Not a success: {result.error_message}.")
×
45

NEW
46
    return _tp.cast(ParseSuccess[_T_co], result)
×
47

48

NEW
49
def error(result: ParseResult[_T_co]) -> ParseError:
×
NEW
50
    if is_success(result):
×
NEW
51
        raise ValueError(f"Not a failure: {result.value}.")
×
52

NEW
53
    return _tp.cast(ParseError, result)
×
54

55

NEW
56
@_dc.dataclass
×
NEW
57
class TokenDefinition:
×
NEW
58
    description: str
×
NEW
59
    pattern: _re.Pattern = _dc.field(init=False)
×
NEW
60
    regex: _dc.InitVar[str]
×
NEW
61
    flags: _re.RegexFlag = _re.RegexFlag.NOFLAG
×
62

63
    # For tokens with "overlapping" patterns, the one with the higher priority
64
    # will be tried to match first
NEW
65
    priority: int = -1
×
66

NEW
67
    def __post_init__(self, regex: str) -> None:
×
NEW
68
        self.pattern = _re.compile(regex, self.flags)
×
69

70

NEW
71
class Tokens:
×
NEW
72
    END = TokenDefinition("end of input", r"$")
×
73

74

NEW
75
@_dc.dataclass
×
NEW
76
class Token:
×
NEW
77
    definition: TokenDefinition
×
NEW
78
    value: str
×
NEW
79
    input_string: str
×
NEW
80
    start_index_inclusive: int
×
NEW
81
    end_index_exclusive: int
×
82

83

NEW
84
LexerResult = Token | ParseError
×
85

86

NEW
87
@_dc.dataclass
×
NEW
88
class ParseErrorException(Exception):
×
NEW
89
    parse_error: ParseError
×
90

91

NEW
92
class _Ignore:
×
NEW
93
    _IGNORE_REGEX = "|".join(
×
94
        [
95
            r"[ \t\n]+",  # Whitespace
96
            r"(?m:!.*$)",  # Comment
97
        ]
98
    )
99

NEW
100
    Pattern = _re.compile(_IGNORE_REGEX, _re.RegexFlag.MULTILINE)
×
101

102

NEW
103
class Lexer:
×
NEW
104
    def __init__(self, input_string: str, token_definitions: _tp.Sequence[TokenDefinition]) -> None:
×
NEW
105
        self.input_string = input_string
×
106

NEW
107
        def get_priority(token_definition: TokenDefinition) -> int:
×
NEW
108
            return token_definition.priority
×
109

NEW
110
        self._token_definitions = [
×
111
            *sorted(token_definitions, key=get_priority, reverse=True),
112
            Tokens.END,
113
        ]
NEW
114
        self.current_pos = 0
×
115

NEW
116
    def get_next_token(self) -> LexerResult:
×
NEW
117
        while match := self._match(_Ignore.Pattern):
×
NEW
118
            self.advance_input(match.end())
×
119

NEW
120
        for token_definition in self._token_definitions:
×
NEW
121
            match = self._match(token_definition.pattern)
×
NEW
122
            if match:
×
NEW
123
                self.advance_input(match.end())
×
NEW
124
                token = Token(token_definition, match.group(), self.input_string, match.start(), match.end())
×
NEW
125
                return token
×
126

NEW
127
        parsing_error = ParseError(
×
128
            "Not a recognized token.",
129
            self.input_string,
130
            self.current_pos,
131
        )
132

NEW
133
        return parsing_error
×
134

NEW
135
    def _match(self, pattern: _re.Pattern) -> _re.Match | None:
×
NEW
136
        match = pattern.match(self.input_string, pos=self.current_pos)
×
NEW
137
        return match
×
138

NEW
139
    def advance_input(self, to: int) -> None:
×
NEW
140
        if to < self.current_pos:
×
NEW
141
            return
×
142

NEW
143
        self.current_pos = to
×
144

145

NEW
146
class ParserBase(_tp.Generic[_T_co], _abc.ABC):
×
NEW
147
    def __init__(self, lexer: Lexer) -> None:
×
NEW
148
        self._lexer = lexer
×
NEW
149
        self._current_token: _tp.Optional[Token] = None
×
NEW
150
        self._remaining_input_string_start_index = 0
×
151

NEW
152
    @property
×
NEW
153
    def _remaining_input_string(self) -> str:
×
NEW
154
        return self._lexer.input_string[self._remaining_input_string_start_index:]
×
155

NEW
156
    def _accept(self, token_definition: TokenDefinition) -> str | None:
×
NEW
157
        if not self._current_token:
×
NEW
158
            self._set_next_token()
×
159

NEW
160
        assert self._current_token
×
161

NEW
162
        if self._current_token.definition != token_definition:
×
NEW
163
            return None
×
164

NEW
165
        self._remaining_input_string_start_index = self._current_token.end_index_exclusive
×
NEW
166
        value = self._current_token.value
×
167

NEW
168
        self._current_token = None
×
169

NEW
170
        return value
×
171

NEW
172
    def _expect(self, token_definition: TokenDefinition) -> str:
×
NEW
173
        value = self._accept(token_definition)
×
NEW
174
        if value is not None:
×
NEW
175
            return value
×
176

NEW
177
        expected_token = token_definition.description
×
178

NEW
179
        self._raise_parsing_error(f"Expected {expected_token} but found {{actual_token}}.")
×
180

NEW
181
    def _expect_sub_parser(self, parser: "ParserBase[_S_co]") -> _S_co:
×
NEW
182
        match result := parser.parse():
×
NEW
183
            case ParseError():
×
NEW
184
                raise ParseErrorException(result)
×
NEW
185
            case ParseSuccess():
×
NEW
186
                remaining_string_input_start_index = (
×
187
                        self._remaining_input_string_start_index + result.remaining_string_input_start_index
188
                )
NEW
189
                self._advance_input(remaining_string_input_start_index)
×
190

NEW
191
                return result.value
×
NEW
192
            case _ as unreachable:
×
NEW
193
                _tp.assert_never(unreachable)
×
194

NEW
195
    def _set_next_token(self) -> None:
×
NEW
196
        next_token = self._lexer.get_next_token()
×
NEW
197
        if isinstance(next_token, ParseError):
×
NEW
198
            raise ParseErrorException(next_token)
×
NEW
199
        self._current_token = next_token
×
200

NEW
201
    def _advance_input(self, to: int) -> None:
×
NEW
202
        self._remaining_input_string_start_index = to
×
NEW
203
        self._lexer.advance_input(to)
×
204

NEW
205
    def _raise_parsing_error(self, error_message: str, actual_token_key: str = "actual_token") -> _tp.NoReturn:
×
NEW
206
        assert self._current_token
×
207

NEW
208
        actual_token_description = self._current_token.definition.description
×
209

NEW
210
        formatted_error_message = error_message.format(**{actual_token_key: actual_token_description})
×
211

NEW
212
        parsing_error = ParseError(
×
213
            formatted_error_message, self._lexer.input_string, self._current_token.start_index_inclusive
214
        )
215

NEW
216
        raise ParseErrorException(parsing_error)
×
217

NEW
218
    @_abc.abstractmethod
×
NEW
219
    def parse(self) -> ParseResult:
×
NEW
220
        raise NotImplementedError()
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc