• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

idaholab / MontePy / 13886024455

16 Mar 2025 06:06PM UTC coverage: 98.106% (+0.3%) from 97.83%
13886024455

Pull #698

github

web-flow
Merge 24fe69472 into 895002fc1
Pull Request #698: Fixing syntax error with updating is_reflecting

3 of 3 new or added lines in 2 files covered. (100.0%)

130 existing lines in 31 files now uncovered.

7718 of 7867 relevant lines covered (98.11%)

0.98 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

96.44
/montepy/input_parser/parser_base.py
1
# Copyright 2024, Battelle Energy Alliance, LLC All Rights Reserved.
2
from montepy.input_parser.tokens import MCNP_Lexer
1✔
3
from montepy.input_parser import syntax_node
1✔
4
from sly import Parser
1✔
5
import sly
1✔
6

7
_dec = sly.yacc._decorator
1✔
8

9

10
class MetaBuilder(sly.yacc.ParserMeta):
1✔
11
    """Custom MetaClass for allowing subclassing of MCNP_Parser.
12

13
    Note: overloading functions is not allowed.
14
    """
15

16
    protected_names = {
1✔
17
        "debugfile",
18
        "errok",
19
        "error",
20
        "index_position",
21
        "line_position",
22
        "log",
23
        "parse",
24
        "restart",
25
        "tokens",
26
        "dont_copy",
27
    }
28

29
    def __new__(meta, classname, bases, attributes):
1✔
30
        if classname != "MCNP_Parser":
1✔
31
            for basis in bases:
1✔
32
                MetaBuilder._flatten_rules(classname, basis, attributes)
1✔
33
        cls = super().__new__(meta, classname, bases, attributes)
1✔
34
        return cls
1✔
35

36
    @staticmethod
1✔
37
    def _flatten_rules(classname, basis, attributes):
1✔
38
        for attr_name in dir(basis):
1✔
39
            if (
1✔
40
                not attr_name.startswith("_")
41
                and attr_name not in MetaBuilder.protected_names
42
                and attr_name not in attributes.get("dont_copy", set())
43
            ):
44
                func = getattr(basis, attr_name)
1✔
45
                attributes[attr_name] = func
1✔
46
        parent = basis.__bases__
1✔
47
        for par_basis in parent:
1✔
48
            if par_basis != Parser:
1✔
49
                return
1✔
50

51

52
class SLY_Supressor:
1✔
53
    """This is a fake logger meant to mostly make warnings dissapear."""
54

55
    def __init__(self):
1✔
56
        self._parse_fail_queue = []
1✔
57

58
    def debug(self, msg, *args, **kwargs):
1✔
59
        pass
60

61
    info = debug
1✔
62

63
    warning = debug
1✔
64

65
    error = debug
1✔
66

67
    critical = debug
1✔
68

69
    def parse_error(self, msg, token=None, lineno=0, index=0):
1✔
70
        """Adds a SLY parsing error to the error queue for being dumped later.
71

72
        Parameters
73
        ----------
74
        msg : str
75
            The message to display.
76
        token : Token
77
            the token that caused the error if any.
78
        lineno : int
79
            the current lineno of the error (from SLY not the file), if
80
            any.
81
        """
82
        self._parse_fail_queue.append(
1✔
83
            {"message": msg, "token": token, "line": lineno, "index": index}
84
        )
85

86
    def clear_queue(self):
1✔
87
        """Clears the error queue and returns all errors.
88

89
        Returns a list of dictionaries. The dictionary has the keys: "message", "token", "line.
90

91
        Returns
92
        -------
93
        list
94
            A list of the errors since the queue was last cleared.
95
        """
96
        ret = self._parse_fail_queue
1✔
97
        self._parse_fail_queue = []
1✔
98
        return ret
1✔
99

100
    def __len__(self):
1✔
101
        return len(self._parse_fail_queue)
1✔
102

103

104
class MCNP_Parser(Parser, metaclass=MetaBuilder):
1✔
105
    """Base class for all MCNP parsers that provides basics."""
106

107
    # Remove this if trying to see issues with parser
108
    log = SLY_Supressor()
1✔
109
    tokens = MCNP_Lexer.tokens
1✔
110
    debugfile = None
1✔
111

112
    def restart(self):
1✔
113
        """Clears internal state information about the current parse.
114

115
        Should be ran before a new object is parsed.
116
        """
117
        self.log.clear_queue()
1✔
118
        super().restart()
1✔
119

120
    def parse(self, token_generator, input=None):
1✔
121
        """Parses the token stream and returns a syntax tree.
122

123
        If the parsing fails None will be returned.
124
        The error queue can be retrieved from ``parser.log.clear_queue()``.
125

126
        Parameters
127
        ----------
128
        token_generator : generator
129
            the token generator from ``lexer.tokenize``.
130
        input : Input
131
            the input that is being lexed and parsed.
132

133
        Returns
134
        -------
135
        SyntaxNode
136
        """
137
        self._input = input
1✔
138

139
        # debug every time a token is taken
140
        def gen_wrapper():
1✔
UNCOV
141
            while True:
×
UNCOV
142
                token = next(token_generator, None)
×
UNCOV
143
                self._debug_parsing_error(token)
×
UNCOV
144
                yield token
×
145

146
        # change to using `gen_wrapper()` to debug
147
        tree = super().parse(token_generator)
1✔
148
        # treat any previous errors as being fatal even if it recovered.
149
        if len(self.log) > 0:
1✔
150
            return None
1✔
151
        self.tokens = {}
1✔
152
        return tree
1✔
153

154
    precedence = (("left", SPACE), ("left", TEXT))
1✔
155

156
    @_("NUMBER", "NUMBER padding")
1✔
157
    def number_phrase(self, p):
1✔
158
        """A non-zero number with or without padding.
159

160
        Returns
161
        -------
162
        ValueNode
163
            a float ValueNode
164
        """
165
        return self._flush_phrase(p, float)
1✔
166

167
    @_("NUMBER", "NUMBER padding")
1✔
168
    def identifier_phrase(self, p):
1✔
169
        """A non-zero number with or without padding converted to int.
170

171
        Returns
172
        -------
173
        ValueNode
174
            an int ValueNode
175
        """
176
        return self._flush_phrase(p, int)
1✔
177

178
    @_(
1✔
179
        "numerical_phrase",
180
        "shortcut_phrase",
181
        "number_sequence numerical_phrase",
182
        "number_sequence shortcut_phrase",
183
    )
184
    def number_sequence(self, p):
1✔
185
        """A list of numbers.
186

187
        Returns
188
        -------
189
        ListNode
190
        """
191
        if len(p) == 1:
1✔
192
            sequence = syntax_node.ListNode("number sequence")
1✔
193
            if type(p[0]) == syntax_node.ListNode:
1✔
194
                return p[0]
1✔
195
            sequence.append(p[0])
1✔
196
        else:
197
            sequence = p[0]
1✔
198
            if type(p[1]) == syntax_node.ListNode:
1✔
UNCOV
199
                for node in p[1].nodes:
×
UNCOV
200
                    sequence.append(node)
×
201
            else:
202
                sequence.append(p[1])
1✔
203
        return sequence
1✔
204

205
    @_(
1✔
206
        "numerical_phrase numerical_phrase",
207
        "shortcut_phrase",
208
        "even_number_sequence numerical_phrase numerical_phrase",
209
        "even_number_sequence shortcut_phrase",
210
    )
211
    def even_number_sequence(self, p):
1✔
212
        """
213
        A list of numbers with an even number of elements*.
214

215
        * shortcuts will break this.
216
        """
217
        if not hasattr(p, "even_number_sequence"):
1✔
218
            sequence = syntax_node.ListNode("number sequence")
1✔
219
            sequence.append(p[0])
1✔
220
        else:
221
            sequence = p[0]
1✔
222
        if len(p) > 1:
1✔
223
            for idx in range(1, len(p)):
1✔
224
                sequence.append(p[idx])
1✔
225
        return sequence
1✔
226

227
    @_("number_phrase", "null_phrase")
1✔
228
    def numerical_phrase(self, p):
1✔
229
        """Any number, including 0, with its padding.
230

231
        Returns
232
        -------
233
        ValueNode
234
            a float ValueNode
235
        """
236
        return p[0]
1✔
237

238
    @_("numerical_phrase", "shortcut_phrase")
1✔
239
    def shortcut_start(self, p):
1✔
240
        return p[0]
1✔
241

242
    @_(
1✔
243
        "shortcut_start NUM_REPEAT",
244
        "shortcut_start REPEAT",
245
        "shortcut_start NUM_MULTIPLY",
246
        "shortcut_start MULTIPLY",
247
        "shortcut_start NUM_INTERPOLATE padding number_phrase",
248
        "shortcut_start INTERPOLATE padding number_phrase",
249
        "shortcut_start NUM_LOG_INTERPOLATE padding number_phrase",
250
        "shortcut_start LOG_INTERPOLATE padding number_phrase",
251
        "NUM_JUMP",
252
        "JUMP",
253
    )
254
    def shortcut_sequence(self, p):
1✔
255
        """A shortcut (repeat, multiply, interpolate, or jump).
256

257
        Returns
258
        -------
259
        ShortcutNode
260
            the parsed shortcut.
261
        """
262
        short_cut = syntax_node.ShortcutNode(p)
1✔
263
        if isinstance(p[0], syntax_node.ShortcutNode):
1✔
264
            list_node = syntax_node.ListNode("next_shortcuts")
1✔
265
            list_node.append(p[0])
1✔
266
            list_node.append(short_cut)
1✔
267
            return list_node
1✔
268
        return short_cut
1✔
269

270
    @_("shortcut_sequence", "shortcut_sequence padding")
1✔
271
    def shortcut_phrase(self, p):
1✔
272
        """A complete shortcut, which should be used, and not shortcut_sequence.
273

274
        Returns
275
        -------
276
        ShortcutNode
277
            the parsed shortcut.
278
        """
279
        sequence = p.shortcut_sequence
1✔
280
        if len(p) == 2:
1✔
281
            sequence.end_padding = p.padding
1✔
282
        return sequence
1✔
283

284
    @_("NULL", "NULL padding")
1✔
285
    def null_phrase(self, p):
1✔
286
        """A zero number with or without its padding.
287

288
        Returns
289
        -------
290
        ValueNode
291
            a float ValueNode
292
        """
293
        return self._flush_phrase(p, float)
1✔
294

295
    @_("NULL", "NULL padding")
1✔
296
    def null_ident_phrase(self, p):
1✔
297
        """A zero number with or without its padding, for identification.
298

299
        Returns
300
        -------
301
        ValueNode
302
            an int ValueNode
303
        """
304
        return self._flush_phrase(p, int)
1✔
305

306
    @_("TEXT", "TEXT padding")
1✔
307
    def text_phrase(self, p):
1✔
308
        """A string with or without its padding.
309

310
        Returns
311
        -------
312
        ValueNode
313
            a str ValueNode.
314
        """
315
        return self._flush_phrase(p, str)
1✔
316

317
    def _flush_phrase(self, p, token_type):
1✔
318
        """Creates a ValueNode."""
319
        if len(p) > 1:
1✔
320
            padding = p[1]
1✔
321
        else:
322
            padding = None
1✔
323
        return syntax_node.ValueNode(p[0], token_type, padding)
1✔
324

325
    @_("SPACE", "DOLLAR_COMMENT", "COMMENT")
1✔
326
    def padding(self, p):
1✔
327
        """Anything that is not semantically significant: white space, and comments.
328

329
        Returns
330
        -------
331
        PaddingNode
332
            All sequential padding.
333
        """
334
        if hasattr(p, "DOLLAR_COMMENT") or hasattr(p, "COMMENT"):
1✔
335
            is_comment = True
1✔
336
        else:
337
            is_comment = False
1✔
338
        return syntax_node.PaddingNode(p[0], is_comment)
1✔
339

340
    @_("padding SPACE", "padding DOLLAR_COMMENT", "padding COMMENT", 'padding "&"')
1✔
341
    def padding(self, p):
1✔
342
        """Anything that is not semantically significant: white space, and comments.
343

344
        Returns
345
        -------
346
        PaddingNode
347
            All sequential padding.
348
        """
349
        if hasattr(p, "DOLLAR_COMMENT") or hasattr(p, "COMMENT"):
1✔
350
            is_comment = True
1✔
351
        else:
352
            is_comment = False
1✔
353
        p[0].append(p[1], is_comment)
1✔
354
        return p[0]
1✔
355

356
    @_("parameter", "parameters parameter")
1✔
357
    def parameters(self, p):
1✔
358
        """A list of the parameters (key, value pairs) for this input.
359

360
        Returns
361
        -------
362
        ParametersNode
363
            all parameters
364
        """
365
        if len(p) == 1:
1✔
366
            params = syntax_node.ParametersNode()
1✔
367
            param = p[0]
1✔
368
        else:
369
            params = p[0]
1✔
370
            param = p[1]
1✔
371
        params.append(param)
1✔
372
        return params
1✔
373

374
    @_(
1✔
375
        "classifier param_seperator number_sequence",
376
        "classifier param_seperator text_phrase",
377
    )
378
    def parameter(self, p):
1✔
379
        """A singular Key-value pair.
380

381
        Returns
382
        -------
383
        SyntaxNode
384
            the parameter.
385
        """
386
        return syntax_node.SyntaxNode(
1✔
387
            p.classifier.prefix.value,
388
            {"classifier": p.classifier, "seperator": p.param_seperator, "data": p[2]},
389
        )
390

391
    @_("file_atom", "file_name file_atom")
1✔
392
    def file_name(self, p):
1✔
393
        """A file name.
394

395
        Returns
396
        -------
397
        str
398
        """
399
        ret = p[0]
1✔
400
        if len(p) > 1:
1✔
401
            ret += p[1]
1✔
402
        return ret
1✔
403

404
    @_(
1✔
405
        "TEXT",
406
        "FILE_PATH",
407
        "NUMBER",
408
        "PARTICLE",
409
        "PARTICLE_SPECIAL",
410
        "INTERPOLATE",
411
        "JUMP",
412
        "KEYWORD",
413
        "LOG_INTERPOLATE",
414
        "NULL",
415
        "REPEAT",
416
        "SURFACE_TYPE",
417
        "THERMAL_LAW",
418
        "ZAID",
419
        "NUMBER_WORD",
420
    )
421
    def file_atom(self, p):
1✔
422
        return p[0]
1✔
423

424
    @_("file_name", "file_name padding")
1✔
425
    def file_phrase(self, p):
1✔
426
        """A file name with or without its padding.
427

428
        Returns
429
        -------
430
        ValueNode
431
            a str ValueNode.
432
        """
433
        return self._flush_phrase(p, str)
1✔
434

435
    @_("padding", "equals_sign", "padding equals_sign")
1✔
436
    def param_seperator(self, p):
1✔
437
        """The seperation between a key and value for a parameter.
438

439
        Returns
440
        -------
441
        ValueNode
442
            a str ValueNode
443
        """
444
        padding = p[0]
1✔
445
        if len(p) > 1:
1✔
446
            padding += p[1]
1✔
447
        return padding
1✔
448

449
    @_('"="', '"=" padding')
1✔
450
    def equals_sign(self, p):
1✔
451
        """The seperation between a key and value for a parameter.
452

453
        Returns
454
        -------
455
        ValueNode
456
            a str ValueNode
457
        """
458
        padding = syntax_node.PaddingNode(p[0])
1✔
459
        if hasattr(p, "padding"):
1✔
460
            padding += p.padding
1✔
461
        return padding
1✔
462

463
    @_('":" part', 'particle_type "," part')
1✔
464
    def particle_type(self, p):
1✔
465
        if hasattr(p, "particle_type"):
1✔
466
            token = p.particle_type.token + "".join(list(p)[1:])
1✔
467
            particle_node = syntax_node.ParticleNode("data particles", token)
1✔
468
        else:
469
            particle_node = syntax_node.ParticleNode("data particles", "".join(list(p)))
1✔
470

471
        return particle_node
1✔
472

473
    @_("PARTICLE", "PARTICLE_SPECIAL")
1✔
474
    def part(self, p):
1✔
475
        return p[0]
1✔
476

477
    @_(
1✔
478
        "TEXT",
479
        "KEYWORD",
480
        "PARTICLE",
481
        "SOURCE_COMMENT",
482
        "TALLY_COMMENT",
483
    )
484
    def data_prefix(self, p):
1✔
485
        return syntax_node.ValueNode(p[0], str)
1✔
486

487
    @_(
1✔
488
        "modifier data_prefix",
489
        "data_prefix",
490
        "classifier NUMBER",
491
        "classifier NULL",
492
        "classifier particle_type",
493
    )
494
    def classifier(self, p):
1✔
495
        """The classifier of a data input.
496

497
        This represents the first word of the data input.
498
        E.g.: ``M4``, `IMP:N`, ``F104:p``
499

500
        Returns
501
        -------
502
        ClassifierNode
503
        """
504
        if hasattr(p, "classifier"):
1✔
505
            classifier = p.classifier
1✔
506
        else:
507
            classifier = syntax_node.ClassifierNode()
1✔
508

509
        if hasattr(p, "modifier"):
1✔
510
            classifier.modifier = syntax_node.ValueNode(p.modifier, str)
1✔
511
        if hasattr(p, "data_prefix"):
1✔
512
            classifier.prefix = p.data_prefix
1✔
513
        if hasattr(p, "NUMBER") or hasattr(p, "NULL"):
1✔
514
            if hasattr(p, "NUMBER"):
1✔
515
                num = p.NUMBER
1✔
516
            else:
517
                num = p.NULL
1✔
518
            classifier.number = syntax_node.ValueNode(num, int)
1✔
519
        if hasattr(p, "particle_type"):
1✔
520
            classifier.particles = p.particle_type
1✔
521
        return classifier
1✔
522

523
    @_("classifier padding", "classifier")
1✔
524
    def classifier_phrase(self, p):
1✔
525
        """A classifier with its padding.
526

527
        Returns
528
        -------
529
        ClassifierNode
530
        """
531
        classifier = p.classifier
1✔
532
        if len(p) > 1:
1✔
533
            classifier.padding = p.padding
1✔
534
        return classifier
1✔
535

536
    @_('"*"', "PARTICLE_SPECIAL")
1✔
537
    def modifier(self, p):
1✔
538
        """A character that modifies a classifier, e.g., ``*TR``.
539

540
        Returns
541
        -------
542
        str
543
            the modifier
544
        """
545
        if hasattr(p, "PARTICLE_SPECIAL"):
1✔
546
            if p.PARTICLE_SPECIAL == "*":
1✔
547
                return "*"
1✔
UNCOV
548
        return p[0]
×
549

550
    def error(self, token):
1✔
551
        """Default error handling.
552

553
        Puts the data into a queue that can be pulled out later for one final clear debug.
554

555
        Parameters
556
        ----------
557
        token : Token
558
            the token that broke the parsing rules.
559
        """
560
        if token:
1✔
561
            lineno = getattr(token, "lineno", 0)
1✔
562
            if self._input and self._input.lexer:
1✔
563
                lexer = self._input.lexer
1✔
564
                index = lexer.find_column(lexer.text, token)
1✔
565
            else:
566
                index = 0
1✔
567
            if lineno:
1✔
568
                self.log.parse_error(
1✔
569
                    f"sly: Syntax error at line {lineno}, token={token.type}\n",
570
                    token,
571
                    lineno,
572
                    index,
573
                )
574
            else:
UNCOV
575
                self.log.parse_error(
×
576
                    f"sly: Syntax error, token={token.type}", token, lineno
577
                )
578
        else:
579
            self.log.parse_error("sly: Parse error in input. EOF\n")
1✔
580

581
    def _debug_parsing_error(self, token):  # pragma: no cover
582
        """A function that should be called from error when debugging a parsing error.
583

584
        Call this from the method error. Also you will need the relevant debugfile to be set and saving the parser
585
        tables to file. e.g.,
586

587
        debugfile = 'parser.out'
588
        """
589
        print(f"********* New Parsing Error from: {type(self)} ************ ")
590
        print(f"Token: {token}")
591
        print(f"State: {self.state}, statestack: {self.statestack}")
592
        print(f"Symstack: {self.symstack}")
593
        print(f"Log length: {len(self.log)}")
594
        print()
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc