• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

idaholab / MontePy / 13678731217

05 Mar 2025 02:50PM UTC coverage: 98.028%. First build
13678731217

Pull #668

github

web-flow
Merge 0f28b6414 into 572214d91
Pull Request #668: Implemented clear for material

38 of 41 new or added lines in 3 files covered. (92.68%)

7657 of 7811 relevant lines covered (98.03%)

0.98 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

95.63
/montepy/input_parser/parser_base.py
1
# Copyright 2024, Battelle Energy Alliance, LLC All Rights Reserved.
2
from montepy.input_parser.tokens import MCNP_Lexer
1✔
3
from montepy.input_parser import syntax_node
1✔
4
from sly import Parser
1✔
5
import sly
1✔
6

7
_dec = sly.yacc._decorator
1✔
8

9

10
class MetaBuilder(sly.yacc.ParserMeta):
1✔
11
    """
12
    Custom MetaClass for allowing subclassing of MCNP_Parser.
13

14

15
    Note: overloading functions is not allowed.
16
    """
17

18
    protected_names = {
1✔
19
        "debugfile",
20
        "errok",
21
        "error",
22
        "index_position",
23
        "line_position",
24
        "log",
25
        "parse",
26
        "restart",
27
        "tokens",
28
        "dont_copy",
29
    }
30

31
    def __new__(meta, classname, bases, attributes):
1✔
32
        if classname != "MCNP_Parser":
1✔
33
            for basis in bases:
1✔
34
                MetaBuilder._flatten_rules(classname, basis, attributes)
1✔
35
        cls = super().__new__(meta, classname, bases, attributes)
1✔
36
        return cls
1✔
37

38
    @staticmethod
1✔
39
    def _flatten_rules(classname, basis, attributes):
1✔
40
        for attr_name in dir(basis):
1✔
41
            if (
1✔
42
                not attr_name.startswith("_")
43
                and attr_name not in MetaBuilder.protected_names
44
                and attr_name not in attributes.get("dont_copy", set())
45
            ):
46
                func = getattr(basis, attr_name)
1✔
47
                attributes[attr_name] = func
1✔
48
        parent = basis.__bases__
1✔
49
        for par_basis in parent:
1✔
50
            if par_basis != Parser:
1✔
51
                return
1✔
52

53

54
class SLY_Supressor:
1✔
55
    """
56
    This is a fake logger meant to mostly make warnings dissapear.
57
    """
58

59
    def __init__(self):
1✔
60
        self._parse_fail_queue = []
1✔
61

62
    def debug(self, msg, *args, **kwargs):
1✔
63
        pass
64

65
    info = debug
1✔
66

67
    warning = debug
1✔
68

69
    error = debug
1✔
70

71
    critical = debug
1✔
72

73
    def parse_error(self, msg, token=None, lineno=0, index=0):
1✔
74
        """
75
        Adds a SLY parsing error to the error queue for being dumped later.
76

77
        :param msg: The message to display.
78
        :type msg: str
79
        :param token: the token that caused the error if any.
80
        :type token: Token
81
        :param lineno: the current lineno of the error (from SLY not the file), if any.
82
        :type lineno: int
83
        """
84
        self._parse_fail_queue.append(
1✔
85
            {"message": msg, "token": token, "line": lineno, "index": index}
86
        )
87

88
    def clear_queue(self):
1✔
89
        """
90
        Clears the error queue and returns all errors.
91

92
        Returns a list of dictionaries. The dictionary has the keys: "message", "token", "line.
93

94
        :returns: A list of the errors since the queue was last cleared.
95
        :rtype: list
96
        """
97
        ret = self._parse_fail_queue
1✔
98
        self._parse_fail_queue = []
1✔
99
        return ret
1✔
100

101
    def __len__(self):
1✔
102
        return len(self._parse_fail_queue)
1✔
103

104

105
class MCNP_Parser(Parser, metaclass=MetaBuilder):
1✔
106
    """
107
    Base class for all MCNP parsers that provides basics.
108
    """
109

110
    # Remove this if trying to see issues with parser
111
    log = SLY_Supressor()
1✔
112
    tokens = MCNP_Lexer.tokens
1✔
113
    debugfile = None
1✔
114

115
    def restart(self):
1✔
116
        """
117
        Clears internal state information about the current parse.
118

119
        Should be ran before a new object is parsed.
120
        """
121
        self.log.clear_queue()
1✔
122
        super().restart()
1✔
123

124
    def parse(self, token_generator, input=None):
1✔
125
        """
126
        Parses the token stream and returns a syntax tree.
127

128
        If the parsing fails None will be returned.
129
        The error queue can be retrieved from ``parser.log.clear_queue()``.
130

131
        :param token_generator: the token generator from ``lexer.tokenize``.
132
        :type token_generator: generator
133
        :param input: the input that is being lexed and parsed.
134
        :type input: Input
135
        :rtype: SyntaxNode
136
        """
137
        self._input = input
1✔
138

139
        # debug every time a token is taken
140
        def gen_wrapper():
1✔
141
            while True:
×
142
                token = next(token_generator, None)
×
143
                self._debug_parsing_error(token)
×
144
                yield token
×
145

146
        # change to using `gen_wrapper()` to debug
147
        tree = super().parse(token_generator)
1✔
148
        # treat any previous errors as being fatal even if it recovered.
149
        if len(self.log) > 0:
1✔
150
            return None
1✔
151
        self.tokens = {}
1✔
152
        return tree
1✔
153

154
    precedence = (("left", SPACE), ("left", TEXT))
1✔
155

156
    @_("NUMBER", "NUMBER padding")
1✔
157
    def number_phrase(self, p):
1✔
158
        """
159
        A non-zero number with or without padding.
160

161
        :returns: a float ValueNode
162
        :rtype: ValueNode
163
        """
164
        return self._flush_phrase(p, float)
1✔
165

166
    @_("NUMBER", "NUMBER padding")
1✔
167
    def identifier_phrase(self, p):
1✔
168
        """
169
        A non-zero number with or without padding converted to int.
170

171
        :returns: an int ValueNode
172
        :rtype: ValueNode
173
        """
174
        return self._flush_phrase(p, int)
1✔
175

176
    @_(
1✔
177
        "numerical_phrase",
178
        "shortcut_phrase",
179
        "number_sequence numerical_phrase",
180
        "number_sequence shortcut_phrase",
181
    )
182
    def number_sequence(self, p):
1✔
183
        """
184
        A list of numbers.
185

186
        :rtype: ListNode
187
        """
188
        if len(p) == 1:
1✔
189
            sequence = syntax_node.ListNode("number sequence")
1✔
190
            if type(p[0]) == syntax_node.ListNode:
1✔
191
                return p[0]
1✔
192
            sequence.append(p[0])
1✔
193
        else:
194
            sequence = p[0]
1✔
195
            if type(p[1]) == syntax_node.ListNode:
1✔
196
                for node in p[1].nodes:
×
197
                    sequence.append(node)
×
198
            else:
199
                sequence.append(p[1])
1✔
200
        return sequence
1✔
201

202
    @_(
1✔
203
        "numerical_phrase numerical_phrase",
204
        "shortcut_phrase",
205
        "even_number_sequence numerical_phrase numerical_phrase",
206
        "even_number_sequence shortcut_phrase",
207
    )
208
    def even_number_sequence(self, p):
1✔
209
        """
210
        A list of numbers with an even number of elements*.
211

212
        * shortcuts will break this.
213
        """
214
        if not hasattr(p, "even_number_sequence"):
1✔
215
            sequence = syntax_node.ListNode("number sequence")
1✔
216
            if type(p[0]) == syntax_node.ListNode:
1✔
NEW
217
                return p[0]
×
218
            sequence.append(p[0])
1✔
219
        else:
220
            sequence = p[0]
1✔
221
        if type(p[1]) == syntax_node.ListNode:
1✔
NEW
222
            for node in p[1].nodes:
×
NEW
223
                sequence.append(node)
×
224
        else:
225
            for idx in range(1, len(p)):
1✔
226
                sequence.append(p[idx])
1✔
227
        return sequence
1✔
228

229
    @_("number_phrase", "null_phrase")
1✔
230
    def numerical_phrase(self, p):
1✔
231
        """
232
        Any number, including 0, with its padding.
233

234
        :returns: a float ValueNode
235
        :rtype: ValueNode
236
        """
237
        return p[0]
1✔
238

239
    @_("numerical_phrase", "shortcut_phrase")
1✔
240
    def shortcut_start(self, p):
1✔
241
        return p[0]
1✔
242

243
    @_(
1✔
244
        "shortcut_start NUM_REPEAT",
245
        "shortcut_start REPEAT",
246
        "shortcut_start NUM_MULTIPLY",
247
        "shortcut_start MULTIPLY",
248
        "shortcut_start NUM_INTERPOLATE padding number_phrase",
249
        "shortcut_start INTERPOLATE padding number_phrase",
250
        "shortcut_start NUM_LOG_INTERPOLATE padding number_phrase",
251
        "shortcut_start LOG_INTERPOLATE padding number_phrase",
252
        "NUM_JUMP",
253
        "JUMP",
254
    )
255
    def shortcut_sequence(self, p):
1✔
256
        """
257
        A shortcut (repeat, multiply, interpolate, or jump).
258

259
        :returns: the parsed shortcut.
260
        :rtype: ShortcutNode
261
        """
262
        short_cut = syntax_node.ShortcutNode(p)
1✔
263
        if isinstance(p[0], syntax_node.ShortcutNode):
1✔
264
            list_node = syntax_node.ListNode("next_shortcuts")
1✔
265
            list_node.append(p[0])
1✔
266
            list_node.append(short_cut)
1✔
267
            return list_node
1✔
268
        return short_cut
1✔
269

270
    @_("shortcut_sequence", "shortcut_sequence padding")
1✔
271
    def shortcut_phrase(self, p):
1✔
272
        """
273
        A complete shortcut, which should be used, and not shortcut_sequence.
274

275
        :returns: the parsed shortcut.
276
        :rtype: ShortcutNode
277
        """
278
        sequence = p.shortcut_sequence
1✔
279
        if len(p) == 2:
1✔
280
            sequence.end_padding = p.padding
1✔
281
        return sequence
1✔
282

283
    @_("NULL", "NULL padding")
1✔
284
    def null_phrase(self, p):
1✔
285
        """
286
        A zero number with or without its padding.
287

288
        :returns: a float ValueNode
289
        :rtype: ValueNode
290
        """
291
        return self._flush_phrase(p, float)
1✔
292

293
    @_("NULL", "NULL padding")
1✔
294
    def null_ident_phrase(self, p):
1✔
295
        """
296
        A zero number with or without its padding, for identification.
297

298
        :returns: an int ValueNode
299
        :rtype: ValueNode
300
        """
301
        return self._flush_phrase(p, int)
1✔
302

303
    @_("TEXT", "TEXT padding")
1✔
304
    def text_phrase(self, p):
1✔
305
        """
306
        A string with or without its padding.
307

308
        :returns: a str ValueNode.
309
        :rtype: ValueNode
310
        """
311
        return self._flush_phrase(p, str)
1✔
312

313
    def _flush_phrase(self, p, token_type):
1✔
314
        """
315
        Creates a ValueNode.
316
        """
317
        if len(p) > 1:
1✔
318
            padding = p[1]
1✔
319
        else:
320
            padding = None
1✔
321
        return syntax_node.ValueNode(p[0], token_type, padding)
1✔
322

323
    @_("SPACE", "DOLLAR_COMMENT", "COMMENT")
1✔
324
    def padding(self, p):
1✔
325
        """
326
        Anything that is not semantically significant: white space, and comments.
327

328
        :returns: All sequential padding.
329
        :rtype: PaddingNode
330
        """
331
        if hasattr(p, "DOLLAR_COMMENT") or hasattr(p, "COMMENT"):
1✔
332
            is_comment = True
1✔
333
        else:
334
            is_comment = False
1✔
335
        return syntax_node.PaddingNode(p[0], is_comment)
1✔
336

337
    @_("padding SPACE", "padding DOLLAR_COMMENT", "padding COMMENT", 'padding "&"')
1✔
338
    def padding(self, p):
1✔
339
        """
340
        Anything that is not semantically significant: white space, and comments.
341

342
        :returns: All sequential padding.
343
        :rtype: PaddingNode
344
        """
345
        if hasattr(p, "DOLLAR_COMMENT") or hasattr(p, "COMMENT"):
1✔
346
            is_comment = True
1✔
347
        else:
348
            is_comment = False
1✔
349
        p[0].append(p[1], is_comment)
1✔
350
        return p[0]
1✔
351

352
    @_("parameter", "parameters parameter")
1✔
353
    def parameters(self, p):
1✔
354
        """
355
        A list of the parameters (key, value pairs) for this input.
356

357
        :returns: all parameters
358
        :rtype: ParametersNode
359
        """
360
        if len(p) == 1:
1✔
361
            params = syntax_node.ParametersNode()
1✔
362
            param = p[0]
1✔
363
        else:
364
            params = p[0]
1✔
365
            param = p[1]
1✔
366
        params.append(param)
1✔
367
        return params
1✔
368

369
    @_(
1✔
370
        "classifier param_seperator number_sequence",
371
        "classifier param_seperator text_phrase",
372
    )
373
    def parameter(self, p):
1✔
374
        """
375
        A singular Key-value pair.
376

377
        :returns: the parameter.
378
        :rtype: SyntaxNode
379
        """
380
        return syntax_node.SyntaxNode(
1✔
381
            p.classifier.prefix.value,
382
            {"classifier": p.classifier, "seperator": p.param_seperator, "data": p[2]},
383
        )
384

385
    @_("file_atom", "file_name file_atom")
1✔
386
    def file_name(self, p):
1✔
387
        """
388
        A file name.
389

390
        :rtype: str
391
        """
392
        ret = p[0]
1✔
393
        if len(p) > 1:
1✔
394
            ret += p[1]
1✔
395
        return ret
1✔
396

397
    @_(
1✔
398
        "TEXT",
399
        "FILE_PATH",
400
        "NUMBER",
401
        "PARTICLE",
402
        "INTERPOLATE",
403
        "JUMP",
404
        "KEYWORD",
405
        "LOG_INTERPOLATE",
406
        "NULL",
407
        "REPEAT",
408
        "SURFACE_TYPE",
409
        "THERMAL_LAW",
410
        "ZAID",
411
        "NUMBER_WORD",
412
    )
413
    def file_atom(self, p):
1✔
414
        return p[0]
1✔
415

416
    @_("file_name", "file_name padding")
1✔
417
    def file_phrase(self, p):
1✔
418
        """
419
        A file name with or without its padding.
420

421
        :returns: a str ValueNode.
422
        :rtype: ValueNode
423
        """
424
        return self._flush_phrase(p, str)
1✔
425

426
    @_("padding", "equals_sign", "padding equals_sign")
1✔
427
    def param_seperator(self, p):
1✔
428
        """
429
        The seperation between a key and value for a parameter.
430

431
        :returns: a str ValueNode
432
        :rtype: ValueNode
433
        """
434
        padding = p[0]
1✔
435
        if len(p) > 1:
1✔
436
            padding += p[1]
1✔
437
        return padding
1✔
438

439
    @_('"="', '"=" padding')
1✔
440
    def equals_sign(self, p):
1✔
441
        """
442
        The seperation between a key and value for a parameter.
443

444
        :returns: a str ValueNode
445
        :rtype: ValueNode
446
        """
447
        padding = syntax_node.PaddingNode(p[0])
1✔
448
        if hasattr(p, "padding"):
1✔
449
            padding += p.padding
1✔
450
        return padding
1✔
451

452
    @_('":" part', 'particle_type "," part')
1✔
453
    def particle_type(self, p):
1✔
454
        if hasattr(p, "particle_type"):
1✔
455
            token = p.particle_type.token + "".join(list(p)[1:])
1✔
456
            particle_node = syntax_node.ParticleNode("data particles", token)
1✔
457
        else:
458
            particle_node = syntax_node.ParticleNode("data particles", "".join(list(p)))
1✔
459

460
        return particle_node
1✔
461

462
    @_("PARTICLE", "PARTICLE_SPECIAL")
1✔
463
    def part(self, p):
1✔
464
        return p[0]
1✔
465

466
    @_(
1✔
467
        "TEXT",
468
        "KEYWORD",
469
        "PARTICLE",
470
        "SOURCE_COMMENT",
471
        "TALLY_COMMENT",
472
    )
473
    def data_prefix(self, p):
1✔
474
        return syntax_node.ValueNode(p[0], str)
1✔
475

476
    @_(
1✔
477
        "modifier data_prefix",
478
        "data_prefix",
479
        "classifier NUMBER",
480
        "classifier NULL",
481
        "classifier particle_type",
482
    )
483
    def classifier(self, p):
1✔
484
        """
485
        The classifier of a data input.
486

487
        This represents the first word of the data input.
488
        E.g.: ``M4``, `IMP:N`, ``F104:p``
489

490
        :rtype: ClassifierNode
491
        """
492
        if hasattr(p, "classifier"):
1✔
493
            classifier = p.classifier
1✔
494
        else:
495
            classifier = syntax_node.ClassifierNode()
1✔
496

497
        if hasattr(p, "modifier"):
1✔
498
            classifier.modifier = syntax_node.ValueNode(p.modifier, str)
1✔
499
        if hasattr(p, "data_prefix"):
1✔
500
            classifier.prefix = p.data_prefix
1✔
501
        if hasattr(p, "NUMBER") or hasattr(p, "NULL"):
1✔
502
            if hasattr(p, "NUMBER"):
1✔
503
                num = p.NUMBER
1✔
504
            else:
505
                num = p.NULL
1✔
506
            classifier.number = syntax_node.ValueNode(num, int)
1✔
507
        if hasattr(p, "particle_type"):
1✔
508
            classifier.particles = p.particle_type
1✔
509
        return classifier
1✔
510

511
    @_("classifier padding", "classifier")
1✔
512
    def classifier_phrase(self, p):
1✔
513
        """
514
        A classifier with its padding.
515

516
        :rtype: ClassifierNode
517
        """
518
        classifier = p.classifier
1✔
519
        if len(p) > 1:
1✔
520
            classifier.padding = p.padding
1✔
521
        return classifier
1✔
522

523
    @_('"*"', "PARTICLE_SPECIAL")
1✔
524
    def modifier(self, p):
1✔
525
        """
526
        A character that modifies a classifier, e.g., ``*TR``.
527

528
        :returns: the modifier
529
        :rtype: str
530
        """
531
        if hasattr(p, "PARTICLE_SPECIAL"):
1✔
532
            if p.PARTICLE_SPECIAL == "*":
1✔
533
                return "*"
1✔
534
        return p[0]
1✔
535

536
    def error(self, token):
1✔
537
        """
538
        Default error handling.
539

540
        Puts the data into a queue that can be pulled out later for one final clear debug.
541

542
        :param token: the token that broke the parsing rules.
543
        :type token: Token
544
        """
545
        if token:
1✔
546
            lineno = getattr(token, "lineno", 0)
1✔
547
            if self._input and self._input.lexer:
1✔
548
                lexer = self._input.lexer
1✔
549
                index = lexer.find_column(lexer.text, token)
1✔
550
            else:
551
                index = 0
1✔
552
            if lineno:
1✔
553
                self.log.parse_error(
1✔
554
                    f"sly: Syntax error at line {lineno}, token={token.type}\n",
555
                    token,
556
                    lineno,
557
                    index,
558
                )
559
            else:
560
                self.log.parse_error(
×
561
                    f"sly: Syntax error, token={token.type}", token, lineno
562
                )
563
        else:
564
            self.log.parse_error("sly: Parse error in input. EOF\n")
1✔
565

566
    def _debug_parsing_error(self, token):  # pragma: no cover
567
        """
568
        A function that should be called from error when debugging a parsing error.
569

570
        Call this from the method error. Also you will need the relevant debugfile to be set and saving the parser
571
        tables to file. e.g.,
572

573
        debugfile = 'parser.out'
574
        """
575
        print(f"********* New Parsing Error from: {type(self)} ************ ")
576
        print(f"Token: {token}")
577
        print(f"State: {self.state}, statestack: {self.statestack}")
578
        print(f"Symstack: {self.symstack}")
579
        print(f"Log length: {len(self.log)}")
580
        print()
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc