• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

idaholab / MontePy / 13812715759

12 Mar 2025 01:36PM UTC coverage: 98.034%. First build
13812715759

Pull #668

github

web-flow
Merge 3ef80199c into f26e57899
Pull Request #668: Implemented clear for material

48 of 51 new or added lines in 5 files covered. (94.12%)

7679 of 7833 relevant lines covered (98.03%)

0.98 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

95.63
/montepy/input_parser/parser_base.py
1
# Copyright 2024, Battelle Energy Alliance, LLC All Rights Reserved.
2
from montepy.input_parser.tokens import MCNP_Lexer
1✔
3
from montepy.input_parser import syntax_node
1✔
4
from sly import Parser
1✔
5
import sly
1✔
6

7
_dec = sly.yacc._decorator
1✔
8

9

10
class MetaBuilder(sly.yacc.ParserMeta):
1✔
11
    """Custom MetaClass for allowing subclassing of MCNP_Parser.
12

13
    Note: overloading functions is not allowed.
14
    """
15

16
    protected_names = {
1✔
17
        "debugfile",
18
        "errok",
19
        "error",
20
        "index_position",
21
        "line_position",
22
        "log",
23
        "parse",
24
        "restart",
25
        "tokens",
26
        "dont_copy",
27
    }
28

29
    def __new__(meta, classname, bases, attributes):
1✔
30
        if classname != "MCNP_Parser":
1✔
31
            for basis in bases:
1✔
32
                MetaBuilder._flatten_rules(classname, basis, attributes)
1✔
33
        cls = super().__new__(meta, classname, bases, attributes)
1✔
34
        return cls
1✔
35

36
    @staticmethod
1✔
37
    def _flatten_rules(classname, basis, attributes):
1✔
38
        for attr_name in dir(basis):
1✔
39
            if (
1✔
40
                not attr_name.startswith("_")
41
                and attr_name not in MetaBuilder.protected_names
42
                and attr_name not in attributes.get("dont_copy", set())
43
            ):
44
                func = getattr(basis, attr_name)
1✔
45
                attributes[attr_name] = func
1✔
46
        parent = basis.__bases__
1✔
47
        for par_basis in parent:
1✔
48
            if par_basis != Parser:
1✔
49
                return
1✔
50

51

52
class SLY_Supressor:
1✔
53
    """This is a fake logger meant to mostly make warnings dissapear."""
54

55
    def __init__(self):
1✔
56
        self._parse_fail_queue = []
1✔
57

58
    def debug(self, msg, *args, **kwargs):
1✔
59
        pass
60

61
    info = debug
1✔
62

63
    warning = debug
1✔
64

65
    error = debug
1✔
66

67
    critical = debug
1✔
68

69
    def parse_error(self, msg, token=None, lineno=0, index=0):
1✔
70
        """Adds a SLY parsing error to the error queue for being dumped later.
71

72
        Parameters
73
        ----------
74
        msg : str
75
            The message to display.
76
        token : Token
77
            the token that caused the error if any.
78
        lineno : int
79
            the current lineno of the error (from SLY not the file), if
80
            any.
81
        """
82
        self._parse_fail_queue.append(
1✔
83
            {"message": msg, "token": token, "line": lineno, "index": index}
84
        )
85

86
    def clear_queue(self):
1✔
87
        """Clears the error queue and returns all errors.
88

89
        Returns a list of dictionaries. The dictionary has the keys: "message", "token", "line.
90

91
        Returns
92
        -------
93
        list
94
            A list of the errors since the queue was last cleared.
95
        """
96
        ret = self._parse_fail_queue
1✔
97
        self._parse_fail_queue = []
1✔
98
        return ret
1✔
99

100
    def __len__(self):
1✔
101
        return len(self._parse_fail_queue)
1✔
102

103

104
class MCNP_Parser(Parser, metaclass=MetaBuilder):
1✔
105
    """Base class for all MCNP parsers that provides basics."""
106

107
    # Remove this if trying to see issues with parser
108
    log = SLY_Supressor()
1✔
109
    tokens = MCNP_Lexer.tokens
1✔
110
    debugfile = None
1✔
111

112
    def restart(self):
1✔
113
        """Clears internal state information about the current parse.
114

115
        Should be ran before a new object is parsed.
116
        """
117
        self.log.clear_queue()
1✔
118
        super().restart()
1✔
119

120
    def parse(self, token_generator, input=None):
1✔
121
        """Parses the token stream and returns a syntax tree.
122

123
        If the parsing fails None will be returned.
124
        The error queue can be retrieved from ``parser.log.clear_queue()``.
125

126
        Parameters
127
        ----------
128
        token_generator : generator
129
            the token generator from ``lexer.tokenize``.
130
        input : Input
131
            the input that is being lexed and parsed.
132

133
        Returns
134
        -------
135
        SyntaxNode
136
        """
137
        self._input = input
1✔
138

139
        # debug every time a token is taken
140
        def gen_wrapper():
1✔
141
            while True:
×
142
                token = next(token_generator, None)
×
143
                self._debug_parsing_error(token)
×
144
                yield token
×
145

146
        # change to using `gen_wrapper()` to debug
147
        tree = super().parse(token_generator)
1✔
148
        # treat any previous errors as being fatal even if it recovered.
149
        if len(self.log) > 0:
1✔
150
            return None
1✔
151
        self.tokens = {}
1✔
152
        return tree
1✔
153

154
    precedence = (("left", SPACE), ("left", TEXT))
1✔
155

156
    @_("NUMBER", "NUMBER padding")
1✔
157
    def number_phrase(self, p):
1✔
158
        """A non-zero number with or without padding.
159

160
        Returns
161
        -------
162
        ValueNode
163
            a float ValueNode
164
        """
165
        return self._flush_phrase(p, float)
1✔
166

167
    @_("NUMBER", "NUMBER padding")
1✔
168
    def identifier_phrase(self, p):
1✔
169
        """A non-zero number with or without padding converted to int.
170

171
        Returns
172
        -------
173
        ValueNode
174
            an int ValueNode
175
        """
176
        return self._flush_phrase(p, int)
1✔
177

178
    @_(
1✔
179
        "numerical_phrase",
180
        "shortcut_phrase",
181
        "number_sequence numerical_phrase",
182
        "number_sequence shortcut_phrase",
183
    )
184
    def number_sequence(self, p):
1✔
185
        """A list of numbers.
186

187
        Returns
188
        -------
189
        ListNode
190
        """
191
        if len(p) == 1:
1✔
192
            sequence = syntax_node.ListNode("number sequence")
1✔
193
            if type(p[0]) == syntax_node.ListNode:
1✔
194
                return p[0]
1✔
195
            sequence.append(p[0])
1✔
196
        else:
197
            sequence = p[0]
1✔
198
            if type(p[1]) == syntax_node.ListNode:
1✔
199
                for node in p[1].nodes:
×
200
                    sequence.append(node)
×
201
            else:
202
                sequence.append(p[1])
1✔
203
        return sequence
1✔
204

205
    @_(
1✔
206
        "numerical_phrase numerical_phrase",
207
        "shortcut_phrase",
208
        "even_number_sequence numerical_phrase numerical_phrase",
209
        "even_number_sequence shortcut_phrase",
210
    )
211
    def even_number_sequence(self, p):
1✔
212
        """
213
        A list of numbers with an even number of elements*.
214

215
        * shortcuts will break this.
216
        """
217
        if not hasattr(p, "even_number_sequence"):
1✔
218
            sequence = syntax_node.ListNode("number sequence")
1✔
219
            if type(p[0]) == syntax_node.ListNode:
1✔
NEW
220
                return p[0]
×
221
            sequence.append(p[0])
1✔
222
        else:
223
            sequence = p[0]
1✔
224
        if type(p[1]) == syntax_node.ListNode:
1✔
NEW
225
            for node in p[1].nodes:
×
NEW
226
                sequence.append(node)
×
227
        else:
228
            for idx in range(1, len(p)):
1✔
229
                sequence.append(p[idx])
1✔
230
        return sequence
1✔
231

232
    @_("number_phrase", "null_phrase")
1✔
233
    def numerical_phrase(self, p):
1✔
234
        """Any number, including 0, with its padding.
235

236
        Returns
237
        -------
238
        ValueNode
239
            a float ValueNode
240
        """
241
        return p[0]
1✔
242

243
    @_("numerical_phrase", "shortcut_phrase")
1✔
244
    def shortcut_start(self, p):
1✔
245
        return p[0]
1✔
246

247
    @_(
1✔
248
        "shortcut_start NUM_REPEAT",
249
        "shortcut_start REPEAT",
250
        "shortcut_start NUM_MULTIPLY",
251
        "shortcut_start MULTIPLY",
252
        "shortcut_start NUM_INTERPOLATE padding number_phrase",
253
        "shortcut_start INTERPOLATE padding number_phrase",
254
        "shortcut_start NUM_LOG_INTERPOLATE padding number_phrase",
255
        "shortcut_start LOG_INTERPOLATE padding number_phrase",
256
        "NUM_JUMP",
257
        "JUMP",
258
    )
259
    def shortcut_sequence(self, p):
1✔
260
        """A shortcut (repeat, multiply, interpolate, or jump).
261

262
        Returns
263
        -------
264
        ShortcutNode
265
            the parsed shortcut.
266
        """
267
        short_cut = syntax_node.ShortcutNode(p)
1✔
268
        if isinstance(p[0], syntax_node.ShortcutNode):
1✔
269
            list_node = syntax_node.ListNode("next_shortcuts")
1✔
270
            list_node.append(p[0])
1✔
271
            list_node.append(short_cut)
1✔
272
            return list_node
1✔
273
        return short_cut
1✔
274

275
    @_("shortcut_sequence", "shortcut_sequence padding")
1✔
276
    def shortcut_phrase(self, p):
1✔
277
        """A complete shortcut, which should be used, and not shortcut_sequence.
278

279
        Returns
280
        -------
281
        ShortcutNode
282
            the parsed shortcut.
283
        """
284
        sequence = p.shortcut_sequence
1✔
285
        if len(p) == 2:
1✔
286
            sequence.end_padding = p.padding
1✔
287
        return sequence
1✔
288

289
    @_("NULL", "NULL padding")
1✔
290
    def null_phrase(self, p):
1✔
291
        """A zero number with or without its padding.
292

293
        Returns
294
        -------
295
        ValueNode
296
            a float ValueNode
297
        """
298
        return self._flush_phrase(p, float)
1✔
299

300
    @_("NULL", "NULL padding")
1✔
301
    def null_ident_phrase(self, p):
1✔
302
        """A zero number with or without its padding, for identification.
303

304
        Returns
305
        -------
306
        ValueNode
307
            an int ValueNode
308
        """
309
        return self._flush_phrase(p, int)
1✔
310

311
    @_("TEXT", "TEXT padding")
1✔
312
    def text_phrase(self, p):
1✔
313
        """A string with or without its padding.
314

315
        Returns
316
        -------
317
        ValueNode
318
            a str ValueNode.
319
        """
320
        return self._flush_phrase(p, str)
1✔
321

322
    def _flush_phrase(self, p, token_type):
1✔
323
        """Creates a ValueNode."""
324
        if len(p) > 1:
1✔
325
            padding = p[1]
1✔
326
        else:
327
            padding = None
1✔
328
        return syntax_node.ValueNode(p[0], token_type, padding)
1✔
329

330
    @_("SPACE", "DOLLAR_COMMENT", "COMMENT")
1✔
331
    def padding(self, p):
1✔
332
        """Anything that is not semantically significant: white space, and comments.
333

334
        Returns
335
        -------
336
        PaddingNode
337
            All sequential padding.
338
        """
339
        if hasattr(p, "DOLLAR_COMMENT") or hasattr(p, "COMMENT"):
1✔
340
            is_comment = True
1✔
341
        else:
342
            is_comment = False
1✔
343
        return syntax_node.PaddingNode(p[0], is_comment)
1✔
344

345
    @_("padding SPACE", "padding DOLLAR_COMMENT", "padding COMMENT", 'padding "&"')
1✔
346
    def padding(self, p):
1✔
347
        """Anything that is not semantically significant: white space, and comments.
348

349
        Returns
350
        -------
351
        PaddingNode
352
            All sequential padding.
353
        """
354
        if hasattr(p, "DOLLAR_COMMENT") or hasattr(p, "COMMENT"):
1✔
355
            is_comment = True
1✔
356
        else:
357
            is_comment = False
1✔
358
        p[0].append(p[1], is_comment)
1✔
359
        return p[0]
1✔
360

361
    @_("parameter", "parameters parameter")
1✔
362
    def parameters(self, p):
1✔
363
        """A list of the parameters (key, value pairs) for this input.
364

365
        Returns
366
        -------
367
        ParametersNode
368
            all parameters
369
        """
370
        if len(p) == 1:
1✔
371
            params = syntax_node.ParametersNode()
1✔
372
            param = p[0]
1✔
373
        else:
374
            params = p[0]
1✔
375
            param = p[1]
1✔
376
        params.append(param)
1✔
377
        return params
1✔
378

379
    @_(
1✔
380
        "classifier param_seperator number_sequence",
381
        "classifier param_seperator text_phrase",
382
    )
383
    def parameter(self, p):
1✔
384
        """A singular Key-value pair.
385

386
        Returns
387
        -------
388
        SyntaxNode
389
            the parameter.
390
        """
391
        return syntax_node.SyntaxNode(
1✔
392
            p.classifier.prefix.value,
393
            {"classifier": p.classifier, "seperator": p.param_seperator, "data": p[2]},
394
        )
395

396
    @_("file_atom", "file_name file_atom")
1✔
397
    def file_name(self, p):
1✔
398
        """A file name.
399

400
        Returns
401
        -------
402
        str
403
        """
404
        ret = p[0]
1✔
405
        if len(p) > 1:
1✔
406
            ret += p[1]
1✔
407
        return ret
1✔
408

409
    @_(
1✔
410
        "TEXT",
411
        "FILE_PATH",
412
        "NUMBER",
413
        "PARTICLE",
414
        "INTERPOLATE",
415
        "JUMP",
416
        "KEYWORD",
417
        "LOG_INTERPOLATE",
418
        "NULL",
419
        "REPEAT",
420
        "SURFACE_TYPE",
421
        "THERMAL_LAW",
422
        "ZAID",
423
        "NUMBER_WORD",
424
    )
425
    def file_atom(self, p):
1✔
426
        return p[0]
1✔
427

428
    @_("file_name", "file_name padding")
1✔
429
    def file_phrase(self, p):
1✔
430
        """A file name with or without its padding.
431

432
        Returns
433
        -------
434
        ValueNode
435
            a str ValueNode.
436
        """
437
        return self._flush_phrase(p, str)
1✔
438

439
    @_("padding", "equals_sign", "padding equals_sign")
1✔
440
    def param_seperator(self, p):
1✔
441
        """The seperation between a key and value for a parameter.
442

443
        Returns
444
        -------
445
        ValueNode
446
            a str ValueNode
447
        """
448
        padding = p[0]
1✔
449
        if len(p) > 1:
1✔
450
            padding += p[1]
1✔
451
        return padding
1✔
452

453
    @_('"="', '"=" padding')
1✔
454
    def equals_sign(self, p):
1✔
455
        """The seperation between a key and value for a parameter.
456

457
        Returns
458
        -------
459
        ValueNode
460
            a str ValueNode
461
        """
462
        padding = syntax_node.PaddingNode(p[0])
1✔
463
        if hasattr(p, "padding"):
1✔
464
            padding += p.padding
1✔
465
        return padding
1✔
466

467
    @_('":" part', 'particle_type "," part')
1✔
468
    def particle_type(self, p):
1✔
469
        if hasattr(p, "particle_type"):
1✔
470
            token = p.particle_type.token + "".join(list(p)[1:])
1✔
471
            particle_node = syntax_node.ParticleNode("data particles", token)
1✔
472
        else:
473
            particle_node = syntax_node.ParticleNode("data particles", "".join(list(p)))
1✔
474

475
        return particle_node
1✔
476

477
    @_("PARTICLE", "PARTICLE_SPECIAL")
1✔
478
    def part(self, p):
1✔
479
        return p[0]
1✔
480

481
    @_(
1✔
482
        "TEXT",
483
        "KEYWORD",
484
        "PARTICLE",
485
        "SOURCE_COMMENT",
486
        "TALLY_COMMENT",
487
    )
488
    def data_prefix(self, p):
1✔
489
        return syntax_node.ValueNode(p[0], str)
1✔
490

491
    @_(
1✔
492
        "modifier data_prefix",
493
        "data_prefix",
494
        "classifier NUMBER",
495
        "classifier NULL",
496
        "classifier particle_type",
497
    )
498
    def classifier(self, p):
1✔
499
        """The classifier of a data input.
500

501
        This represents the first word of the data input.
502
        E.g.: ``M4``, `IMP:N`, ``F104:p``
503

504
        Returns
505
        -------
506
        ClassifierNode
507
        """
508
        if hasattr(p, "classifier"):
1✔
509
            classifier = p.classifier
1✔
510
        else:
511
            classifier = syntax_node.ClassifierNode()
1✔
512

513
        if hasattr(p, "modifier"):
1✔
514
            classifier.modifier = syntax_node.ValueNode(p.modifier, str)
1✔
515
        if hasattr(p, "data_prefix"):
1✔
516
            classifier.prefix = p.data_prefix
1✔
517
        if hasattr(p, "NUMBER") or hasattr(p, "NULL"):
1✔
518
            if hasattr(p, "NUMBER"):
1✔
519
                num = p.NUMBER
1✔
520
            else:
521
                num = p.NULL
1✔
522
            classifier.number = syntax_node.ValueNode(num, int)
1✔
523
        if hasattr(p, "particle_type"):
1✔
524
            classifier.particles = p.particle_type
1✔
525
        return classifier
1✔
526

527
    @_("classifier padding", "classifier")
1✔
528
    def classifier_phrase(self, p):
1✔
529
        """A classifier with its padding.
530

531
        Returns
532
        -------
533
        ClassifierNode
534
        """
535
        classifier = p.classifier
1✔
536
        if len(p) > 1:
1✔
537
            classifier.padding = p.padding
1✔
538
        return classifier
1✔
539

540
    @_('"*"', "PARTICLE_SPECIAL")
1✔
541
    def modifier(self, p):
1✔
542
        """A character that modifies a classifier, e.g., ``*TR``.
543

544
        Returns
545
        -------
546
        str
547
            the modifier
548
        """
549
        if hasattr(p, "PARTICLE_SPECIAL"):
1✔
550
            if p.PARTICLE_SPECIAL == "*":
1✔
551
                return "*"
1✔
552
        return p[0]
1✔
553

554
    def error(self, token):
1✔
555
        """Default error handling.
556

557
        Puts the data into a queue that can be pulled out later for one final clear debug.
558

559
        Parameters
560
        ----------
561
        token : Token
562
            the token that broke the parsing rules.
563
        """
564
        if token:
1✔
565
            lineno = getattr(token, "lineno", 0)
1✔
566
            if self._input and self._input.lexer:
1✔
567
                lexer = self._input.lexer
1✔
568
                index = lexer.find_column(lexer.text, token)
1✔
569
            else:
570
                index = 0
1✔
571
            if lineno:
1✔
572
                self.log.parse_error(
1✔
573
                    f"sly: Syntax error at line {lineno}, token={token.type}\n",
574
                    token,
575
                    lineno,
576
                    index,
577
                )
578
            else:
579
                self.log.parse_error(
×
580
                    f"sly: Syntax error, token={token.type}", token, lineno
581
                )
582
        else:
583
            self.log.parse_error("sly: Parse error in input. EOF\n")
1✔
584

585
    def _debug_parsing_error(self, token):  # pragma: no cover
586
        """A function that should be called from error when debugging a parsing error.
587

588
        Call this from the method error. Also you will need the relevant debugfile to be set and saving the parser
589
        tables to file. e.g.,
590

591
        debugfile = 'parser.out'
592
        """
593
        print(f"********* New Parsing Error from: {type(self)} ************ ")
594
        print(f"Token: {token}")
595
        print(f"State: {self.state}, statestack: {self.statestack}")
596
        print(f"Symstack: {self.symstack}")
597
        print(f"Log length: {len(self.log)}")
598
        print()
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc