• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

idaholab / MontePy / 18539205608

15 Oct 2025 06:44PM UTC coverage: 84.326% (-13.2%) from 97.503%
18539205608

push

github

MicahGale
Updated annotations for all top level modules.

24 of 25 new or added lines in 5 files covered. (96.0%)

1103 existing lines in 38 files now uncovered.

7064 of 8377 relevant lines covered (84.33%)

0.84 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

94.82
/montepy/input_parser/parser_base.py
1
# Copyright 2024, Battelle Energy Alliance, LLC All Rights Reserved.
2
from montepy.input_parser.tokens import MCNP_Lexer
1✔
3
from montepy.input_parser import syntax_node
1✔
4
from sly import Parser
1✔
5
import sly
1✔
6

7
_dec = sly.yacc._decorator
1✔
8

9

10
class MetaBuilder(sly.yacc.ParserMeta):
1✔
11
    """Custom MetaClass for allowing subclassing of MCNP_Parser.
12

13
    Note: overloading functions is not allowed.
14
    """
15

16
    protected_names = {
1✔
17
        "debugfile",
18
        "errok",
19
        "error",
20
        "index_position",
21
        "line_position",
22
        "log",
23
        "parse",
24
        "restart",
25
        "tokens",
26
        "dont_copy",
27
    }
28

29
    def __new__(meta, classname, bases, attributes):
1✔
30
        if classname != "MCNP_Parser":
1✔
31
            for basis in bases:
1✔
32
                MetaBuilder._flatten_rules(classname, basis, attributes)
1✔
33
        cls = super().__new__(meta, classname, bases, attributes)
1✔
34
        return cls
1✔
35

36
    @staticmethod
1✔
37
    def _flatten_rules(classname, basis, attributes):
1✔
38
        for attr_name in dir(basis):
1✔
39
            if (
1✔
40
                not attr_name.startswith("_")
41
                and attr_name not in MetaBuilder.protected_names
42
                and attr_name not in attributes.get("dont_copy", set())
43
            ):
44
                func = getattr(basis, attr_name)
1✔
45
                attributes[attr_name] = func
1✔
46
        parent = basis.__bases__
1✔
47
        for par_basis in parent:
1✔
48
            if par_basis != Parser:
1✔
49
                return
1✔
50

51

52
class SLY_Supressor:
1✔
53
    """This is a fake logger meant to mostly make warnings dissapear."""
54

55
    def __init__(self):
1✔
56
        self._parse_fail_queue = []
1✔
57

58
    def debug(self, msg, *args, **kwargs):
1✔
59
        pass
60

61
    info = debug
1✔
62

63
    warning = debug
1✔
64

65
    error = debug
1✔
66

67
    critical = debug
1✔
68

69
    def parse_error(self, msg, token=None, lineno=0, index=0):
1✔
70
        """Adds a SLY parsing error to the error queue for being dumped later.
71

72
        Parameters
73
        ----------
74
        msg : str
75
            The message to display.
76
        token : Token
77
            the token that caused the error if any.
78
        lineno : int
79
            the current lineno of the error (from SLY not the file), if
80
            any.
81
        """
82
        self._parse_fail_queue.append(
1✔
83
            {"message": msg, "token": token, "line": lineno, "index": index}
84
        )
85

86
    def clear_queue(self):
1✔
87
        """Clears the error queue and returns all errors.
88

89
        Returns a list of dictionaries. The dictionary has the keys: "message", "token", "line.
90

91
        Returns
92
        -------
93
        list
94
            A list of the errors since the queue was last cleared.
95
        """
96
        ret = self._parse_fail_queue
1✔
97
        self._parse_fail_queue = []
1✔
98
        return ret
1✔
99

100
    def __len__(self):
1✔
101
        return len(self._parse_fail_queue)
1✔
102

103

104
class MCNP_Parser(Parser, metaclass=MetaBuilder):
1✔
105
    """Base class for all MCNP parsers that provides basics."""
106

107
    # Remove this if trying to see issues with parser
108
    log = SLY_Supressor()
1✔
109
    tokens = MCNP_Lexer.tokens
1✔
110
    debugfile = None
1✔
111

112
    def restart(self):
1✔
113
        """Clears internal state information about the current parse.
114

115
        Should be ran before a new object is parsed.
116
        """
117
        self.log.clear_queue()
1✔
118
        super().restart()
1✔
119

120
    def parse(self, token_generator, input=None):
1✔
121
        """Parses the token stream and returns a syntax tree.
122

123
        If the parsing fails None will be returned.
124
        The error queue can be retrieved from ``parser.log.clear_queue()``.
125

126
        Parameters
127
        ----------
128
        token_generator : generator
129
            the token generator from ``lexer.tokenize``.
130
        input : Input
131
            the input that is being lexed and parsed.
132

133
        Returns
134
        -------
135
        SyntaxNode
136
        """
137
        self._input = input
1✔
138

139
        # debug every time a token is taken
140
        def gen_wrapper():
1✔
141
            while True:
×
142
                token = next(token_generator, None)
×
143
                self._debug_parsing_error(token)
×
144
                yield token
×
145

146
        # change to using `gen_wrapper()` to debug
147
        tree = super().parse(token_generator)
1✔
148
        # treat any previous errors as being fatal even if it recovered.
149
        if len(self.log) > 0:
1✔
150
            return None
1✔
151
        self.tokens = {}
1✔
152
        return tree
1✔
153

154
    precedence = (("left", SPACE), ("left", TEXT))
1✔
155

156
    @_("NUMBER", "NUMBER padding")
1✔
157
    def number_phrase(self, p):
1✔
158
        """A non-zero number with or without padding.
159

160
        Returns
161
        -------
162
        ValueNode
163
            a float ValueNode
164
        """
165
        return self._flush_phrase(p, float)
1✔
166

167
    @_("NUMBER", "NUMBER padding")
1✔
168
    def identifier_phrase(self, p):
1✔
169
        """A non-zero number with or without padding converted to int.
170

171
        Returns
172
        -------
173
        ValueNode
174
            an int ValueNode
175
        """
176
        return self._flush_phrase(p, int)
1✔
177

178
    @_(
1✔
179
        "numerical_phrase",
180
        "shortcut_phrase",
181
        "number_sequence numerical_phrase",
182
        "number_sequence shortcut_phrase",
183
    )
184
    def number_sequence(self, p):
1✔
185
        """A list of numbers.
186

187
        Returns
188
        -------
189
        ListNode
190
        """
191
        if len(p) == 1:
1✔
192
            sequence = syntax_node.ListNode("number sequence")
1✔
193
            if type(p[0]) == syntax_node.ListNode:
1✔
194
                return p[0]
1✔
195
            sequence.append(p[0])
1✔
196
        else:
197
            sequence = p[0]
1✔
198
            if type(p[1]) == syntax_node.ListNode:
1✔
199
                for node in p[1].nodes:
×
200
                    sequence.append(node)
×
201
            else:
202
                sequence.append(p[1])
1✔
203
        return sequence
1✔
204

205
    @_(
1✔
206
        '"(" number_sequence ")"',
207
        '"(" number_sequence ")" padding',
208
        '"(" padding number_sequence ")" padding',
209
    )
210
    def number_sequence(self, p):
1✔
211
        sequence = syntax_node.ListNode("parenthetical statement")
1✔
212
        sequence.append(p[0])
1✔
213
        for node in list(p)[1:]:
1✔
214
            if isinstance(node, syntax_node.ListNode):
1✔
215
                for val in node.nodes:
1✔
216
                    sequence.append(val)
1✔
217
            elif isinstance(node, str):
1✔
218
                sequence.append(syntax_node.PaddingNode(node))
1✔
219
            else:
220
                sequence.append(node)
1✔
221
        return sequence
1✔
222

223
    @_(
1✔
224
        "numerical_phrase numerical_phrase",
225
        "shortcut_phrase",
226
        "even_number_sequence numerical_phrase numerical_phrase",
227
        "even_number_sequence shortcut_phrase",
228
    )
229
    def even_number_sequence(self, p):
1✔
230
        """
231
        A list of numbers with an even number of elements*.
232

233
        * shortcuts will break this.
234
        """
235
        if not hasattr(p, "even_number_sequence"):
1✔
236
            sequence = syntax_node.ListNode("number sequence")
1✔
237
            sequence.append(p[0])
1✔
238
        else:
239
            sequence = p[0]
1✔
240
        if len(p) > 1:
1✔
241
            for idx in range(1, len(p)):
1✔
242
                sequence.append(p[idx])
1✔
243
        return sequence
1✔
244

245
    @_("number_phrase", "null_phrase")
1✔
246
    def numerical_phrase(self, p):
1✔
247
        """Any number, including 0, with its padding.
248

249
        Returns
250
        -------
251
        ValueNode
252
            a float ValueNode
253
        """
254
        return p[0]
1✔
255

256
    @_("numerical_phrase", "shortcut_phrase")
1✔
257
    def shortcut_start(self, p):
1✔
258
        return p[0]
1✔
259

260
    @_(
1✔
261
        "shortcut_start NUM_REPEAT",
262
        "shortcut_start REPEAT",
263
        "shortcut_start NUM_MULTIPLY",
264
        "shortcut_start MULTIPLY",
265
        "shortcut_start NUM_INTERPOLATE padding number_phrase",
266
        "shortcut_start INTERPOLATE padding number_phrase",
267
        "shortcut_start NUM_LOG_INTERPOLATE padding number_phrase",
268
        "shortcut_start LOG_INTERPOLATE padding number_phrase",
269
        "NUM_JUMP",
270
        "JUMP",
271
    )
272
    def shortcut_sequence(self, p):
1✔
273
        """A shortcut (repeat, multiply, interpolate, or jump).
274

275
        Returns
276
        -------
277
        ShortcutNode
278
            the parsed shortcut.
279
        """
280
        short_cut = syntax_node.ShortcutNode(p)
1✔
281
        if isinstance(p[0], syntax_node.ShortcutNode):
1✔
282
            list_node = syntax_node.ListNode("next_shortcuts")
1✔
283
            list_node.append(p[0])
1✔
284
            list_node.append(short_cut)
1✔
285
            return list_node
1✔
286
        return short_cut
1✔
287

288
    @_("shortcut_sequence", "shortcut_sequence padding")
1✔
289
    def shortcut_phrase(self, p):
1✔
290
        """A complete shortcut, which should be used, and not shortcut_sequence.
291

292
        Returns
293
        -------
294
        ShortcutNode
295
            the parsed shortcut.
296
        """
297
        sequence = p.shortcut_sequence
1✔
298
        if len(p) == 2:
1✔
299
            sequence.end_padding = p.padding
1✔
300
        return sequence
1✔
301

302
    @_("NULL", "NULL padding")
1✔
303
    def null_phrase(self, p):
1✔
304
        """A zero number with or without its padding.
305

306
        Returns
307
        -------
308
        ValueNode
309
            a float ValueNode
310
        """
311
        return self._flush_phrase(p, float)
1✔
312

313
    @_("NULL", "NULL padding")
1✔
314
    def null_ident_phrase(self, p):
1✔
315
        """A zero number with or without its padding, for identification.
316

317
        Returns
318
        -------
319
        ValueNode
320
            an int ValueNode
321
        """
322
        return self._flush_phrase(p, int)
1✔
323

324
    @_("TEXT", "TEXT padding")
1✔
325
    def text_phrase(self, p):
1✔
326
        """A string with or without its padding.
327

328
        Returns
329
        -------
330
        ValueNode
331
            a str ValueNode.
332
        """
333
        return self._flush_phrase(p, str)
1✔
334

335
    def _flush_phrase(self, p, token_type):
1✔
336
        """Creates a ValueNode."""
337
        if len(p) > 1:
1✔
338
            padding = p[1]
1✔
339
        else:
340
            padding = None
1✔
341
        return syntax_node.ValueNode(p[0], token_type, padding)
1✔
342

343
    @_("SPACE", "DOLLAR_COMMENT", "COMMENT")
1✔
344
    def padding(self, p):
1✔
345
        """Anything that is not semantically significant: white space, and comments.
346

347
        Returns
348
        -------
349
        PaddingNode
350
            All sequential padding.
351
        """
352
        if hasattr(p, "DOLLAR_COMMENT") or hasattr(p, "COMMENT"):
1✔
353
            is_comment = True
1✔
354
        else:
355
            is_comment = False
1✔
356
        return syntax_node.PaddingNode(p[0], is_comment)
1✔
357

358
    @_("padding SPACE", "padding DOLLAR_COMMENT", "padding COMMENT", 'padding "&"')
1✔
359
    def padding(self, p):
1✔
360
        """Anything that is not semantically significant: white space, and comments.
361

362
        Returns
363
        -------
364
        PaddingNode
365
            All sequential padding.
366
        """
367
        if hasattr(p, "DOLLAR_COMMENT") or hasattr(p, "COMMENT"):
1✔
368
            is_comment = True
1✔
369
        else:
370
            is_comment = False
1✔
371
        p[0].append(p[1], is_comment)
1✔
372
        return p[0]
1✔
373

374
    @_("parameter", "parameters parameter")
1✔
375
    def parameters(self, p):
1✔
376
        """A list of the parameters (key, value pairs) for this input.
377

378
        Returns
379
        -------
380
        ParametersNode
381
            all parameters
382
        """
383
        if len(p) == 1:
1✔
384
            params = syntax_node.ParametersNode()
1✔
385
            param = p[0]
1✔
386
        else:
387
            params = p[0]
×
388
            param = p[1]
×
389
        params.append(param)
1✔
390
        return params
1✔
391

392
    @_(
1✔
393
        "classifier param_seperator number_sequence",
394
        "classifier param_seperator text_phrase",
395
    )
396
    def parameter(self, p):
1✔
397
        """A singular Key-value pair.
398

399
        Returns
400
        -------
401
        SyntaxNode
402
            the parameter.
403
        """
404
        return syntax_node.SyntaxNode(
1✔
405
            p.classifier.prefix.value,
406
            {"classifier": p.classifier, "seperator": p.param_seperator, "data": p[2]},
407
        )
408

409
    @_("file_atom", "file_name file_atom")
1✔
410
    def file_name(self, p):
1✔
411
        """A file name.
412

413
        Returns
414
        -------
415
        str
416
        """
417
        ret = p[0]
1✔
418
        if len(p) > 1:
1✔
419
            ret += p[1]
1✔
420
        return ret
1✔
421

422
    @_(
1✔
423
        "TEXT",
424
        "FILE_PATH",
425
        "NUMBER",
426
        "PARTICLE",
427
        "PARTICLE_SPECIAL",
428
        "INTERPOLATE",
429
        "JUMP",
430
        "KEYWORD",
431
        "LOG_INTERPOLATE",
432
        "NULL",
433
        "REPEAT",
434
        "SURFACE_TYPE",
435
        "THERMAL_LAW",
436
        "ZAID",
437
        "NUMBER_WORD",
438
    )
439
    def file_atom(self, p):
1✔
440
        return p[0]
1✔
441

442
    @_("file_name", "file_name padding")
1✔
443
    def file_phrase(self, p):
1✔
444
        """A file name with or without its padding.
445

446
        Returns
447
        -------
448
        ValueNode
449
            a str ValueNode.
450
        """
451
        return self._flush_phrase(p, str)
1✔
452

453
    @_("padding", "equals_sign", "padding equals_sign")
1✔
454
    def param_seperator(self, p):
1✔
455
        """The seperation between a key and value for a parameter.
456

457
        Returns
458
        -------
459
        ValueNode
460
            a str ValueNode
461
        """
462
        padding = p[0]
1✔
463
        if len(p) > 1:
1✔
464
            padding += p[1]
1✔
465
        return padding
1✔
466

467
    @_('"="', '"=" padding')
1✔
468
    def equals_sign(self, p):
1✔
469
        """The seperation between a key and value for a parameter.
470

471
        Returns
472
        -------
473
        ValueNode
474
            a str ValueNode
475
        """
476
        padding = syntax_node.PaddingNode(p[0])
1✔
477
        if hasattr(p, "padding"):
1✔
478
            padding += p.padding
1✔
479
        return padding
1✔
480

481
    @_('":" part', 'particle_type "," part')
1✔
482
    def particle_type(self, p):
1✔
483
        if hasattr(p, "particle_type"):
1✔
484
            token = p.particle_type.token + "".join(list(p)[1:])
1✔
485
            particle_node = syntax_node.ParticleNode("data particles", token)
1✔
486
        else:
487
            particle_node = syntax_node.ParticleNode("data particles", "".join(list(p)))
1✔
488

489
        return particle_node
1✔
490

491
    @_("PARTICLE", "PARTICLE_SPECIAL")
1✔
492
    def part(self, p):
1✔
493
        return p[0]
1✔
494

495
    @_(
1✔
496
        "TEXT",
497
        "KEYWORD",
498
        "PARTICLE",
499
        "SOURCE_COMMENT",
500
        "TALLY_COMMENT",
501
    )
502
    def data_prefix(self, p):
1✔
503
        return syntax_node.ValueNode(p[0], str)
1✔
504

505
    @_(
1✔
506
        "modifier data_prefix",
507
        "data_prefix",
508
        "classifier NUMBER",
509
        "classifier NULL",
510
        "classifier particle_type",
511
    )
512
    def classifier(self, p):
1✔
513
        """The classifier of a data input.
514

515
        This represents the first word of the data input.
516
        E.g.: ``M4``, `IMP:N`, ``F104:p``
517

518
        Returns
519
        -------
520
        ClassifierNode
521
        """
522
        if hasattr(p, "classifier"):
1✔
523
            classifier = p.classifier
1✔
524
        else:
525
            classifier = syntax_node.ClassifierNode()
1✔
526

527
        if hasattr(p, "modifier"):
1✔
528
            classifier.modifier = syntax_node.ValueNode(p.modifier, str)
1✔
529
        if hasattr(p, "data_prefix"):
1✔
530
            classifier.prefix = p.data_prefix
1✔
531
        if hasattr(p, "NUMBER") or hasattr(p, "NULL"):
1✔
532
            if hasattr(p, "NUMBER"):
1✔
533
                num = p.NUMBER
1✔
534
            else:
UNCOV
535
                num = p.NULL
×
536
            classifier.number = syntax_node.ValueNode(num, int)
1✔
537
        if hasattr(p, "particle_type"):
1✔
538
            classifier.particles = p.particle_type
1✔
539
        return classifier
1✔
540

541
    @_("classifier padding", "classifier")
1✔
542
    def classifier_phrase(self, p):
1✔
543
        """A classifier with its padding.
544

545
        Returns
546
        -------
547
        ClassifierNode
548
        """
549
        classifier = p.classifier
1✔
550
        if len(p) > 1:
1✔
551
            classifier.padding = p.padding
1✔
552
        return classifier
1✔
553

554
    @_('"*"', "PARTICLE_SPECIAL")
1✔
555
    def modifier(self, p):
1✔
556
        """A character that modifies a classifier, e.g., ``*TR``.
557

558
        Returns
559
        -------
560
        str
561
            the modifier
562
        """
563
        if hasattr(p, "PARTICLE_SPECIAL"):
1✔
564
            if p.PARTICLE_SPECIAL == "*":
1✔
565
                return "*"
1✔
566
        return p[0]
×
567

568
    @_('"("', '"(" padding')
1✔
569
    def lparen_phrase(self, p):
1✔
570
        """
571
        A left parenthesis "(" and its padding.
572
        """
573
        pad = syntax_node.PaddingNode(p[0])
1✔
574
        if len(p) > 1:
1✔
575
            for node in p.padding.nodes:
1✔
576
                pad.append(node)
1✔
577
        return pad
1✔
578

579
    @_('")"', '")" padding')
1✔
580
    def rparen_phrase(self, p):
1✔
581
        """
582
        A right parenthesis ")" and its padding.
583
        """
584
        pad = syntax_node.PaddingNode(p[0])
1✔
585
        if len(p) > 1:
1✔
UNCOV
586
            for node in p.padding.nodes:
×
UNCOV
587
                pad.append(node)
×
588
        return pad
1✔
589

590
    def error(self, token):
1✔
591
        """Default error handling.
592

593
        Puts the data into a queue that can be pulled out later for one final clear debug.
594

595
        Parameters
596
        ----------
597
        token : Token
598
            the token that broke the parsing rules.
599
        """
600
        # self._debug_parsing_error(token)
601
        if token:
1✔
602
            lineno = getattr(token, "lineno", 0)
1✔
603
            if self._input and self._input.lexer:
1✔
604
                lexer = self._input.lexer
1✔
605
                index = lexer.find_column(lexer.text, token)
1✔
606
            else:
607
                index = 0
1✔
608
            if lineno:
1✔
609
                self.log.parse_error(
1✔
610
                    f"sly: Syntax error at line {lineno}, token={token.type}\n",
611
                    token,
612
                    lineno,
613
                    index,
614
                )
615
            else:
616
                self.log.parse_error(
×
617
                    f"sly: Syntax error, token={token.type}", token, lineno
618
                )
619
        else:
620
            self.log.parse_error("sly: Parse error in input. EOF\n")
1✔
621

622
    def _debug_parsing_error(self, token):  # pragma: no cover
623
        """A function that should be called from error when debugging a parsing error.
624

625
        Call this from the method error. Also you will need the relevant debugfile to be set and saving the parser
626
        tables to file. e.g.,
627

628
        debugfile = 'parser.out'
629
        """
630
        print(f"********* New Parsing Error from: {type(self)} ************ ")
631
        print(f"Token: {token}")
632
        print(f"State: {self.state}, statestack: {self.statestack}")
633
        print(f"Symstack: {self.symstack}")
634
        print(f"Log length: {len(self.log)}")
635
        print()
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc