• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

source-academy / py-slang / 23586367727

26 Mar 2026 09:12AM UTC coverage: 40.084% (-1.1%) from 41.233%
23586367727

Pull #117

github

web-flow
Merge adc811e75 into 553cb45dc
Pull Request #117: Add Pyodide implementation

293 of 1051 branches covered (27.88%)

Branch coverage included in aggregate %.

51 of 53 new or added lines in 2 files covered. (96.23%)

13 existing lines in 1 file now uncovered.

1138 of 2519 relevant lines covered (45.18%)

57.64 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

71.64
/src/parser/python-grammar.ts
1
// Generated automatically by nearley, version 2.20.1
2
// http://github.com/Hardmath123/nearley
3
function id(x) {
4
  return x[0];
9,275✔
5
}
6

7
import { StmtNS, ExprNS } from "../ast-types";
8
import pythonLexer from "./lexer";
9
import { toAstToken } from "./token-bridge";
10

11
const nil = () => null;
5✔
12
const list = ([x]) => [x];
5✔
13
const drop = () => [];
45✔
14

15
/** Strip surrounding quotes and process escape sequences. */
16
function stripQuotes(s) {
17
  let inner;
18
  if (s.startsWith('"""') || s.startsWith("'''")) inner = s.slice(3, -3);
32✔
19
  else if (s.startsWith('"') || s.startsWith("'")) inner = s.slice(1, -1);
26!
UNCOV
20
  else return s;
×
21
  return inner.replace(/\\(["'\\\/bfnrtav0]|x[0-9a-fA-F]{2}|u[0-9a-fA-F]{4}|.)/g, (_, ch) => {
32✔
22
    switch (ch[0]) {
7!
23
      case "n":
24
        return "\n";
2✔
25
      case "t":
26
        return "\t";
1✔
27
      case "r":
UNCOV
28
        return "\r";
×
29
      case "\\":
30
        return "\\";
1✔
31
      case "'":
UNCOV
32
        return "'";
×
33
      case '"':
UNCOV
34
        return '"';
×
35
      case "/":
UNCOV
36
        return "/";
×
37
      case "b":
UNCOV
38
        return "\b";
×
39
      case "f":
UNCOV
40
        return "\f";
×
41
      case "a":
UNCOV
42
        return "\x07";
×
43
      case "v":
UNCOV
44
        return "\x0B";
×
45
      case "0":
46
        return "\0";
1✔
47
      case "x":
48
        return String.fromCharCode(parseInt(ch.slice(1), 16));
1✔
49
      case "u":
UNCOV
50
        return String.fromCharCode(parseInt(ch.slice(1), 16));
×
51
      default:
52
        return "\\" + ch; // unrecognized escapes kept literally
1✔
53
    }
54
  });
55
}
56

57
// ── Leaf AST constructors (token → node) ────────────────────────────────
58
const astVariable = ([t]) => {
5✔
59
  const k = toAstToken(t);
342✔
60
  return new ExprNS.Variable(k, k, k);
342✔
61
};
62
const astBigInt = ([t]) => {
5✔
63
  const k = toAstToken(t);
338✔
64
  return new ExprNS.BigIntLiteral(k, k, t.value);
338✔
65
};
66
const astComplex = ([t]) => {
5✔
67
  const k = toAstToken(t);
12✔
68
  return new ExprNS.Complex(k, k, t.value);
12✔
69
};
70
const astNone = ([t]) => {
5✔
71
  const k = toAstToken(t);
7✔
72
  return new ExprNS.None(k, k);
7✔
73
};
74
const astString = ([t]) => {
5✔
75
  const k = toAstToken(t);
32✔
76
  return new ExprNS.Literal(k, k, stripQuotes(t.value));
32✔
77
};
78
const astTrue = ([t]) => {
5✔
79
  const k = toAstToken(t);
31✔
80
  return new ExprNS.Literal(k, k, true);
31✔
81
};
82
const astFalse = ([t]) => {
5✔
83
  const k = toAstToken(t);
1✔
84
  return new ExprNS.Literal(k, k, false);
1✔
85
};
86

87
// ── Operator AST constructors (children → node) ────────────────────────
88
const astBinary = ([l, op, r]) => new ExprNS.Binary(l.startToken, r.endToken, l, op, r);
72✔
89
const astBinaryTok = ([l, op, r]) =>
5✔
90
  new ExprNS.Binary(l.startToken, r.endToken, l, toAstToken(op), r);
6✔
91
const astBoolOp = ([l, op, r]) => new ExprNS.BoolOp(l.startToken, r.endToken, l, toAstToken(op), r);
29✔
92
const astUnary = ([op, arg]) => new ExprNS.Unary(toAstToken(op), arg.endToken, toAstToken(op), arg);
13✔
93
const astCompare = ([l, op, r]) => new ExprNS.Compare(l.startToken, r.endToken, l, op, r);
38✔
94

95
// ── Token / list helpers ────────────────────────────────────────────────
96
const tok = ([t]) => toAstToken(t);
103✔
97
const flatList = ([first, rest]) => [first, ...rest.map(d => d[1])];
193✔
98
const tokList = ([first, rest]) => [toAstToken(first), ...rest.map(d => toAstToken(d[1]))];
11✔
99
let Lexer = pythonLexer;
5✔
100
let ParserRules = [
5✔
101
  { name: "program$ebnf$1", symbols: [] },
102
  {
103
    name: "program$ebnf$1$subexpression$1",
104
    symbols: ["import_stmt", pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
105
  },
106
  {
107
    name: "program$ebnf$1",
108
    symbols: ["program$ebnf$1", "program$ebnf$1$subexpression$1"],
109
    postprocess: function arrpush(d) {
110
      return d[0].concat([d[1]]);
32✔
111
    },
112
  },
113
  { name: "program$ebnf$2", symbols: [] },
114
  { name: "program$ebnf$2$subexpression$1", symbols: ["statement"] },
115
  {
116
    name: "program$ebnf$2$subexpression$1",
117
    symbols: [pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
118
  },
119
  {
120
    name: "program$ebnf$2",
121
    symbols: ["program$ebnf$2", "program$ebnf$2$subexpression$1"],
122
    postprocess: function arrpush(d) {
123
      return d[0].concat([d[1]]);
370✔
124
    },
125
  },
126
  {
127
    name: "program",
128
    symbols: ["program$ebnf$1", "program$ebnf$2"],
129
    postprocess: ([imports, stmts]) => {
130
      const importNodes = imports.map(d => d[0]);
693✔
131
      const stmtNodes = stmts.map(d => d[0]).filter(s => s && s.startToken !== undefined);
693✔
132
      const filtered = [...importNodes, ...stmtNodes];
693✔
133
      const start = filtered[0]
693✔
134
        ? filtered[0].startToken
135
        : toAstToken({ type: "newline", value: "", line: 1, col: 1, offset: 0 });
136
      const end = filtered.length > 0 ? filtered[filtered.length - 1].endToken : start;
693✔
137
      return new StmtNS.FileInput(start, end, filtered, []);
693✔
138
    },
139
  },
140
  {
141
    name: "import_stmt",
142
    symbols: [{ literal: "from" }, "dotted_name", { literal: "import" }, "import_clause"],
143
    postprocess: ([kw, mod, , names]) => {
144
      const last = names[names.length - 1];
39✔
145
      const endTok = last.alias || last.name;
39✔
146
      return new StmtNS.FromImport(toAstToken(kw), endTok, mod, names);
39✔
147
    },
148
  },
149
  { name: "dotted_name$ebnf$1", symbols: [] },
150
  {
151
    name: "dotted_name$ebnf$1$subexpression$1",
152
    symbols: [{ literal: "." }, pythonLexer.has("name") ? { type: "name" } : name],
5!
153
  },
154
  {
155
    name: "dotted_name$ebnf$1",
156
    symbols: ["dotted_name$ebnf$1", "dotted_name$ebnf$1$subexpression$1"],
157
    postprocess: function arrpush(d) {
158
      return d[0].concat([d[1]]);
9✔
159
    },
160
  },
161
  {
162
    name: "dotted_name",
163
    symbols: [pythonLexer.has("name") ? { type: "name" } : name, "dotted_name$ebnf$1"],
5!
164
    postprocess: ([first, rest]) => {
165
      let tok = toAstToken(first);
41✔
166
      for (const [, n] of rest) {
41✔
167
        const right = toAstToken(n);
10✔
168
        tok.lexeme = tok.lexeme + "." + right.lexeme;
10✔
169
      }
170
      return tok;
41✔
171
    },
172
  },
173
  { name: "import_clause", symbols: ["import_as_names"], postprocess: id },
174
  {
175
    name: "import_clause",
176
    symbols: [{ literal: "(" }, "import_as_names", { literal: ")" }],
177
    postprocess: ([, ns]) => ns,
3✔
178
  },
179
  { name: "import_as_names$ebnf$1", symbols: [] },
180
  { name: "import_as_names$ebnf$1$subexpression$1", symbols: [{ literal: "," }, "import_as_name"] },
181
  {
182
    name: "import_as_names$ebnf$1",
183
    symbols: ["import_as_names$ebnf$1", "import_as_names$ebnf$1$subexpression$1"],
184
    postprocess: function arrpush(d) {
185
      return d[0].concat([d[1]]);
6✔
186
    },
187
  },
188
  {
189
    name: "import_as_names",
190
    symbols: ["import_as_name", "import_as_names$ebnf$1"],
191
    postprocess: flatList,
192
  },
193
  {
194
    name: "import_as_name",
195
    symbols: [pythonLexer.has("name") ? { type: "name" } : name],
5!
196
    postprocess: ([t]) => ({ name: toAstToken(t), alias: null }),
38✔
197
  },
198
  {
199
    name: "import_as_name",
200
    symbols: [
201
      pythonLexer.has("name") ? { type: "name" } : name,
5!
202
      { literal: "as" },
203
      pythonLexer.has("name") ? { type: "name" } : name,
5!
204
    ],
205
    postprocess: ([t, , a]) => ({ name: toAstToken(t), alias: toAstToken(a) }),
4✔
206
  },
207
  {
208
    name: "statement",
209
    symbols: ["statementAssign", pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
210
    postprocess: id,
211
  },
212
  {
213
    name: "statement",
214
    symbols: ["statementAnnAssign", pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
215
    postprocess: id,
216
  },
217
  {
218
    name: "statement",
219
    symbols: [
220
      "statementSubscriptAssign",
221
      pythonLexer.has("newline") ? { type: "newline" } : newline,
5!
222
    ],
223
    postprocess: id,
224
  },
225
  {
226
    name: "statement",
227
    symbols: ["statementReturn", pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
228
    postprocess: id,
229
  },
230
  {
231
    name: "statement",
232
    symbols: ["statementPass", pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
233
    postprocess: id,
234
  },
235
  {
236
    name: "statement",
237
    symbols: ["statementBreak", pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
238
    postprocess: id,
239
  },
240
  {
241
    name: "statement",
242
    symbols: ["statementContinue", pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
243
    postprocess: id,
244
  },
245
  {
246
    name: "statement",
247
    symbols: ["statementGlobal", pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
248
    postprocess: id,
249
  },
250
  {
251
    name: "statement",
252
    symbols: ["statementNonlocal", pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
253
    postprocess: id,
254
  },
255
  {
256
    name: "statement",
257
    symbols: ["statementAssert", pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
258
    postprocess: id,
259
  },
260
  {
261
    name: "statement",
262
    symbols: ["statementExpr", pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
263
    postprocess: id,
264
  },
265
  { name: "statement", symbols: ["if_statement"], postprocess: id },
266
  { name: "statement", symbols: ["statementWhile"], postprocess: id },
267
  { name: "statement", symbols: ["statementFor"], postprocess: id },
268
  { name: "statement", symbols: ["statementDef"], postprocess: id },
269
  {
270
    name: "statementAssign",
271
    symbols: [pythonLexer.has("name") ? { type: "name" } : name, { literal: "=" }, "expression"],
5!
272
    postprocess: ([n, , v]) => {
273
      const tok = toAstToken(n);
137✔
274
      return new StmtNS.Assign(tok, v.endToken, new ExprNS.Variable(tok, tok, tok), v);
137✔
275
    },
276
  },
277
  {
278
    name: "statementAnnAssign",
279
    symbols: [
280
      pythonLexer.has("name") ? { type: "name" } : name,
5!
281
      { literal: ":" },
282
      "expression",
283
      { literal: "=" },
284
      "expression",
285
    ],
286
    postprocess: ([n, , ann, , v]) => {
287
      const tok = toAstToken(n);
7✔
288
      return new StmtNS.AnnAssign(tok, v.endToken, new ExprNS.Variable(tok, tok, tok), v, ann);
7✔
289
    },
290
  },
291
  {
292
    name: "statementAnnAssign",
293
    symbols: [pythonLexer.has("name") ? { type: "name" } : name, { literal: ":" }, "expression"],
5!
294
    postprocess: ([n, , ann]) => {
295
      const nameTok = toAstToken(n);
8✔
296
      const dummyVal = new ExprNS.None(ann.endToken, ann.endToken);
8✔
297
      return new StmtNS.AnnAssign(
8✔
298
        nameTok,
299
        ann.endToken,
300
        new ExprNS.Variable(nameTok, nameTok, nameTok),
301
        dummyVal,
302
        ann,
303
      );
304
    },
305
  },
306
  {
307
    name: "statementSubscriptAssign",
308
    symbols: [
309
      "expressionPost",
310
      pythonLexer.has("lsqb") ? { type: "lsqb" } : lsqb,
5!
311
      "expression",
312
      pythonLexer.has("rsqb") ? { type: "rsqb" } : rsqb,
5!
313
      { literal: "=" },
314
      "expression",
315
    ],
316
    postprocess: function (d) {
317
      var obj = d[0],
3✔
318
        idx = d[2],
3✔
319
        rsqb = d[3],
3✔
320
        val = d[5];
3✔
321
      var sub = new ExprNS.Subscript(obj.startToken, toAstToken(rsqb), obj, idx);
3✔
322
      return new StmtNS.Assign(obj.startToken, val.endToken, sub, val);
3✔
323
    },
324
  },
325
  {
326
    name: "statementReturn",
327
    symbols: [{ literal: "return" }, "expression"],
328
    postprocess: ([kw, expr]) => new StmtNS.Return(toAstToken(kw), expr.endToken, expr),
48✔
329
  },
330
  {
331
    name: "statementReturn",
332
    symbols: [{ literal: "return" }],
333
    postprocess: ([t]) => {
334
      const tok = toAstToken(t);
37✔
335
      return new StmtNS.Return(tok, tok, null);
37✔
336
    },
337
  },
338
  {
339
    name: "statementPass",
340
    symbols: [{ literal: "pass" }],
341
    postprocess: ([t]) => {
342
      const tok = toAstToken(t);
43✔
343
      return new StmtNS.Pass(tok, tok);
43✔
344
    },
345
  },
346
  {
347
    name: "statementBreak",
348
    symbols: [{ literal: "break" }],
349
    postprocess: ([t]) => {
350
      const tok = toAstToken(t);
3✔
351
      return new StmtNS.Break(tok, tok);
3✔
352
    },
353
  },
354
  {
355
    name: "statementContinue",
356
    symbols: [{ literal: "continue" }],
357
    postprocess: ([t]) => {
358
      const tok = toAstToken(t);
3✔
359
      return new StmtNS.Continue(tok, tok);
3✔
360
    },
361
  },
362
  {
363
    name: "statementGlobal",
364
    symbols: [{ literal: "global" }, pythonLexer.has("name") ? { type: "name" } : name],
5!
365
    postprocess: ([kw, n]) => new StmtNS.Global(toAstToken(kw), toAstToken(n), toAstToken(n)),
1✔
366
  },
367
  {
368
    name: "statementNonlocal",
369
    symbols: [{ literal: "nonlocal" }, pythonLexer.has("name") ? { type: "name" } : name],
5!
370
    postprocess: ([kw, n]) => new StmtNS.NonLocal(toAstToken(kw), toAstToken(n), toAstToken(n)),
9✔
371
  },
372
  {
373
    name: "statementAssert",
374
    symbols: [{ literal: "assert" }, "expression"],
375
    postprocess: ([kw, e]) => new StmtNS.Assert(toAstToken(kw), e.endToken, e),
5✔
376
  },
377
  {
378
    name: "statementExpr",
379
    symbols: ["expression"],
380
    postprocess: ([e]) => new StmtNS.SimpleExpr(e.startToken, e.endToken, e),
458✔
381
  },
382
  {
383
    name: "statementWhile",
384
    symbols: [{ literal: "while" }, "expression", { literal: ":" }, "block"],
385
    postprocess: ([kw, test, , body]) =>
386
      new StmtNS.While(toAstToken(kw), body[body.length - 1].endToken, test, body),
8✔
387
  },
388
  {
389
    name: "statementFor",
390
    symbols: [
391
      { literal: "for" },
392
      pythonLexer.has("name") ? { type: "name" } : name,
5!
393
      { literal: "in" },
394
      "expression",
395
      { literal: ":" },
396
      "block",
397
    ],
398
    postprocess: ([kw, target, , iter, , body]) =>
399
      new StmtNS.For(
11✔
400
        toAstToken(kw),
401
        body[body.length - 1].endToken,
402
        toAstToken(target),
403
        iter,
404
        body,
405
      ),
406
  },
407
  {
408
    name: "statementDef",
409
    symbols: [
410
      { literal: "def" },
411
      pythonLexer.has("name") ? { type: "name" } : name,
5!
412
      "params",
413
      { literal: ":" },
414
      "block",
415
    ],
416
    postprocess: ([kw, name, params, , body]) =>
417
      new StmtNS.FunctionDef(
60✔
418
        toAstToken(kw),
419
        body[body.length - 1].endToken,
420
        toAstToken(name),
421
        params,
422
        body,
423
        [],
424
      ),
425
  },
426
  { name: "if_statement$ebnf$1", symbols: [] },
427
  {
428
    name: "if_statement$ebnf$1$subexpression$1",
429
    symbols: [{ literal: "elif" }, "expression", { literal: ":" }, "block"],
430
  },
431
  {
432
    name: "if_statement$ebnf$1",
433
    symbols: ["if_statement$ebnf$1", "if_statement$ebnf$1$subexpression$1"],
434
    postprocess: function arrpush(d) {
435
      return d[0].concat([d[1]]);
5✔
436
    },
437
  },
438
  {
439
    name: "if_statement$ebnf$2$subexpression$1",
440
    symbols: [{ literal: "else" }, { literal: ":" }, "block"],
441
  },
442
  {
443
    name: "if_statement$ebnf$2",
444
    symbols: ["if_statement$ebnf$2$subexpression$1"],
445
    postprocess: id,
446
  },
447
  {
448
    name: "if_statement$ebnf$2",
449
    symbols: [],
450
    postprocess: function (d) {
451
      return null;
34✔
452
    },
453
  },
454
  {
455
    name: "if_statement",
456
    symbols: [
457
      { literal: "if" },
458
      "expression",
459
      { literal: ":" },
460
      "block",
461
      "if_statement$ebnf$1",
462
      "if_statement$ebnf$2",
463
    ],
464
    postprocess: ([kw, test, , body, elifs, elseBlock]) => {
465
      let else_ = elseBlock ? elseBlock[0][2] : null;
49✔
466
      for (let i = elifs.length - 1; i >= 0; i--) {
49✔
467
        const [ekw, etest, ecolon, ebody] = elifs[i];
11✔
468
        const endTok =
469
          else_ && else_.length > 0
11✔
470
            ? else_[else_.length - 1].endToken
471
            : ebody[ebody.length - 1].endToken;
472
        else_ = [new StmtNS.If(toAstToken(ekw), endTok, etest, ebody, else_)];
11✔
473
      }
474
      const endTok =
475
        else_ && else_.length > 0
49✔
476
          ? else_[else_.length - 1].endToken
477
          : body[body.length - 1].endToken;
478
      return new StmtNS.If(toAstToken(kw), endTok, test, body, else_);
49✔
479
    },
480
  },
481
  { name: "names$ebnf$1", symbols: [] },
482
  {
483
    name: "names$ebnf$1$subexpression$1",
484
    symbols: [{ literal: "," }, pythonLexer.has("name") ? { type: "name" } : name],
5!
485
  },
486
  {
487
    name: "names$ebnf$1",
488
    symbols: ["names$ebnf$1", "names$ebnf$1$subexpression$1"],
489
    postprocess: function arrpush(d) {
UNCOV
490
      return d[0].concat([d[1]]);
×
491
    },
492
  },
493
  {
494
    name: "names",
495
    symbols: [pythonLexer.has("name") ? { type: "name" } : name, "names$ebnf$1"],
5!
496
    postprocess: tokList,
497
  },
498
  {
499
    name: "block",
500
    symbols: ["blockInline", pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
501
    postprocess: list,
502
  },
503
  { name: "block$ebnf$1$subexpression$1", symbols: ["statement"] },
504
  {
505
    name: "block$ebnf$1$subexpression$1",
506
    symbols: [pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
507
  },
508
  { name: "block$ebnf$1", symbols: ["block$ebnf$1$subexpression$1"] },
509
  { name: "block$ebnf$1$subexpression$2", symbols: ["statement"] },
510
  {
511
    name: "block$ebnf$1$subexpression$2",
512
    symbols: [pythonLexer.has("newline") ? { type: "newline" } : newline],
5!
513
  },
514
  {
515
    name: "block$ebnf$1",
516
    symbols: ["block$ebnf$1", "block$ebnf$1$subexpression$2"],
517
    postprocess: function arrpush(d) {
518
      return d[0].concat([d[1]]);
46✔
519
    },
520
  },
521
  {
522
    name: "block",
523
    symbols: [
524
      pythonLexer.has("newline") ? { type: "newline" } : newline,
5!
525
      pythonLexer.has("indent") ? { type: "indent" } : indent,
5!
526
      "block$ebnf$1",
527
      pythonLexer.has("dedent") ? { type: "dedent" } : dedent,
5!
528
    ],
529
    postprocess: ([, , stmts]) => stmts.map(d => d[0]).filter(s => s && s.startToken !== undefined),
174✔
530
  },
531
  { name: "blockInline", symbols: ["statementAssign"], postprocess: id },
532
  { name: "blockInline", symbols: ["statementAnnAssign"], postprocess: id },
533
  { name: "blockInline", symbols: ["statementSubscriptAssign"], postprocess: id },
534
  { name: "blockInline", symbols: ["statementReturn"], postprocess: id },
535
  { name: "blockInline", symbols: ["statementPass"], postprocess: id },
536
  { name: "blockInline", symbols: ["statementBreak"], postprocess: id },
537
  { name: "blockInline", symbols: ["statementContinue"], postprocess: id },
538
  { name: "blockInline", symbols: ["statementGlobal"], postprocess: id },
539
  { name: "blockInline", symbols: ["statementNonlocal"], postprocess: id },
540
  { name: "blockInline", symbols: ["statementAssert"], postprocess: id },
541
  { name: "blockInline", symbols: ["statementExpr"], postprocess: id },
542
  {
543
    name: "rest_names",
544
    symbols: [pythonLexer.has("name") ? { type: "name" } : name],
5!
545
    postprocess: ([t]) => {
546
      const tok = toAstToken(t);
13✔
547
      tok.isStarred = false;
13✔
548
      return [tok];
13✔
549
    },
550
  },
551
  {
552
    name: "rest_names",
553
    symbols: [{ literal: "*" }, pythonLexer.has("name") ? { type: "name" } : name],
5!
554
    postprocess: ([, t]) => {
555
      const tok = toAstToken(t);
4✔
556
      tok.isStarred = true;
4✔
557
      return [tok];
4✔
558
    },
559
  },
560
  {
561
    name: "rest_names",
562
    symbols: ["rest_names", { literal: "," }, pythonLexer.has("name") ? { type: "name" } : name],
5!
563
    postprocess: ([params, , t]) => {
564
      const tok = toAstToken(t);
14✔
565
      tok.isStarred = false;
14✔
566
      return [...params, tok];
14✔
567
    },
568
  },
569
  {
570
    name: "rest_names",
571
    symbols: [
572
      "rest_names",
573
      { literal: "," },
574
      { literal: "*" },
575
      pythonLexer.has("name") ? { type: "name" } : name,
5!
576
    ],
577
    postprocess: ([params, , , t]) => {
578
      const tok = toAstToken(t);
1✔
579
      tok.isStarred = true;
1✔
580
      return [...params, tok];
1✔
581
    },
582
  },
583
  { name: "params", symbols: [{ literal: "(" }, { literal: ")" }], postprocess: drop },
584
  {
585
    name: "params",
586
    symbols: [{ literal: "(" }, "rest_names", { literal: ")" }],
587
    postprocess: ([, ps]) => ps,
17✔
588
  },
589
  {
590
    name: "expression",
591
    symbols: ["expressionOr", { literal: "if" }, "expressionOr", { literal: "else" }, "expression"],
592
    postprocess: ([cons, , test, , alt]) =>
593
      new ExprNS.Ternary(cons.startToken, alt.endToken, test, cons, alt),
8✔
594
  },
595
  { name: "expression", symbols: ["expressionOr"], postprocess: id },
596
  { name: "expression", symbols: ["lambda_expr"], postprocess: id },
597
  {
598
    name: "expressionOr",
599
    symbols: ["expressionOr", { literal: "or" }, "expressionAnd"],
600
    postprocess: astBoolOp,
601
  },
602
  { name: "expressionOr", symbols: ["expressionAnd"], postprocess: id },
603
  {
604
    name: "expressionAnd",
605
    symbols: ["expressionAnd", { literal: "and" }, "expressionNot"],
606
    postprocess: astBoolOp,
607
  },
608
  { name: "expressionAnd", symbols: ["expressionNot"], postprocess: id },
609
  { name: "expressionNot", symbols: [{ literal: "not" }, "expressionNot"], postprocess: astUnary },
610
  { name: "expressionNot", symbols: ["expressionCmp"], postprocess: id },
611
  {
612
    name: "expressionCmp",
613
    symbols: ["expressionCmp", "expressionCmpOp", "expressionAdd"],
614
    postprocess: astCompare,
615
  },
616
  { name: "expressionCmp", symbols: ["expressionAdd"], postprocess: id },
617
  {
618
    name: "expressionCmpOp",
619
    symbols: [pythonLexer.has("less") ? { type: "less" } : less],
5!
620
    postprocess: tok,
621
  },
622
  {
623
    name: "expressionCmpOp",
624
    symbols: [pythonLexer.has("greater") ? { type: "greater" } : greater],
5!
625
    postprocess: tok,
626
  },
627
  {
628
    name: "expressionCmpOp",
629
    symbols: [pythonLexer.has("doubleequal") ? { type: "doubleequal" } : doubleequal],
5!
630
    postprocess: tok,
631
  },
632
  {
633
    name: "expressionCmpOp",
634
    symbols: [pythonLexer.has("greaterequal") ? { type: "greaterequal" } : greaterequal],
5!
635
    postprocess: tok,
636
  },
637
  {
638
    name: "expressionCmpOp",
639
    symbols: [pythonLexer.has("lessequal") ? { type: "lessequal" } : lessequal],
5!
640
    postprocess: tok,
641
  },
642
  {
643
    name: "expressionCmpOp",
644
    symbols: [pythonLexer.has("notequal") ? { type: "notequal" } : notequal],
5!
645
    postprocess: tok,
646
  },
647
  { name: "expressionCmpOp", symbols: [{ literal: "in" }], postprocess: tok },
648
  {
649
    name: "expressionCmpOp",
650
    symbols: [{ literal: "not" }, { literal: "in" }],
651
    postprocess: ([t]) => {
652
      const tok = toAstToken(t);
3✔
653
      tok.lexeme = "not in";
3✔
654
      return tok;
3✔
655
    },
656
  },
657
  { name: "expressionCmpOp", symbols: [{ literal: "is" }], postprocess: tok },
658
  {
659
    name: "expressionCmpOp",
660
    symbols: [{ literal: "is" }, { literal: "not" }],
661
    postprocess: ([t]) => {
662
      const tok = toAstToken(t);
3✔
663
      tok.lexeme = "is not";
3✔
664
      return tok;
3✔
665
    },
666
  },
667
  {
668
    name: "expressionAdd",
669
    symbols: ["expressionAdd", "expressionAddOp", "expressionMul"],
670
    postprocess: astBinary,
671
  },
672
  { name: "expressionAdd", symbols: ["expressionMul"], postprocess: id },
673
  {
674
    name: "expressionAddOp",
675
    symbols: [pythonLexer.has("plus") ? { type: "plus" } : plus],
5!
676
    postprocess: tok,
677
  },
678
  {
679
    name: "expressionAddOp",
680
    symbols: [pythonLexer.has("minus") ? { type: "minus" } : minus],
5!
681
    postprocess: tok,
682
  },
683
  {
684
    name: "expressionMul",
685
    symbols: ["expressionMul", "expressionMulOp", "expressionUnary"],
686
    postprocess: astBinary,
687
  },
688
  { name: "expressionMul", symbols: ["expressionUnary"], postprocess: id },
689
  {
690
    name: "expressionMulOp",
691
    symbols: [pythonLexer.has("star") ? { type: "star" } : star],
5!
692
    postprocess: tok,
693
  },
694
  {
695
    name: "expressionMulOp",
696
    symbols: [pythonLexer.has("slash") ? { type: "slash" } : slash],
5!
697
    postprocess: tok,
698
  },
699
  {
700
    name: "expressionMulOp",
701
    symbols: [pythonLexer.has("percent") ? { type: "percent" } : percent],
5!
702
    postprocess: tok,
703
  },
704
  {
705
    name: "expressionMulOp",
706
    symbols: [pythonLexer.has("doubleslash") ? { type: "doubleslash" } : doubleslash],
5!
707
    postprocess: tok,
708
  },
709
  {
710
    name: "expressionUnary",
711
    symbols: [pythonLexer.has("plus") ? { type: "plus" } : plus, "expressionUnary"],
5!
712
    postprocess: astUnary,
713
  },
714
  {
715
    name: "expressionUnary",
716
    symbols: [pythonLexer.has("minus") ? { type: "minus" } : minus, "expressionUnary"],
5!
717
    postprocess: astUnary,
718
  },
719
  { name: "expressionUnary", symbols: ["expressionPow"], postprocess: id },
720
  {
721
    name: "expressionPow",
722
    symbols: [
723
      "expressionPost",
724
      pythonLexer.has("doublestar") ? { type: "doublestar" } : doublestar,
5!
725
      "expressionUnary",
726
    ],
727
    postprocess: astBinaryTok,
728
  },
729
  { name: "expressionPow", symbols: ["expressionPost"], postprocess: id },
730
  {
731
    name: "expressionPost",
732
    symbols: [
733
      "expressionPost",
734
      pythonLexer.has("lsqb") ? { type: "lsqb" } : lsqb,
5!
735
      "expression",
736
      pythonLexer.has("rsqb") ? { type: "rsqb" } : rsqb,
5!
737
    ],
738
    postprocess: ([obj, , idx, rsqb]) =>
739
      new ExprNS.Subscript(obj.startToken, toAstToken(rsqb), obj, idx),
6✔
740
  },
741
  {
742
    name: "expressionPost",
743
    symbols: ["expressionPost", { literal: "(" }, "expressions", { literal: ")" }],
744
    postprocess: ([callee, , args, rparen]) =>
745
      new ExprNS.Call(callee.startToken, toAstToken(rparen), callee, args),
56✔
746
  },
747
  {
748
    name: "expressionPost",
749
    symbols: ["expressionPost", { literal: "(" }, { literal: ")" }],
750
    postprocess: ([callee, , rparen]) =>
751
      new ExprNS.Call(callee.startToken, toAstToken(rparen), callee, []),
25✔
752
  },
753
  { name: "expressionPost", symbols: ["atom"], postprocess: id },
754
  {
755
    name: "atom",
756
    symbols: [{ literal: "(" }, "expression", { literal: ")" }],
757
    postprocess: ([, e]) => new ExprNS.Grouping(e.startToken, e.endToken, e),
10✔
758
  },
759
  {
760
    name: "atom",
761
    symbols: [
762
      pythonLexer.has("lsqb") ? { type: "lsqb" } : lsqb,
5!
763
      pythonLexer.has("rsqb") ? { type: "rsqb" } : rsqb,
5!
764
    ],
765
    postprocess: ([l, r]) => new ExprNS.List(toAstToken(l), toAstToken(r), []),
4✔
766
  },
767
  {
768
    name: "atom",
769
    symbols: [
770
      pythonLexer.has("lsqb") ? { type: "lsqb" } : lsqb,
5!
771
      "expressions",
772
      pythonLexer.has("rsqb") ? { type: "rsqb" } : rsqb,
5!
773
    ],
774
    postprocess: ([l, elems, r]) => new ExprNS.List(toAstToken(l), toAstToken(r), elems),
9✔
775
  },
776
  {
777
    name: "atom",
778
    symbols: [pythonLexer.has("name") ? { type: "name" } : name],
5!
779
    postprocess: astVariable,
780
  },
781
  {
782
    name: "atom",
783
    symbols: [pythonLexer.has("number_float") ? { type: "number_float" } : number_float],
5!
784
    postprocess: ([t]) => {
785
      const tok = toAstToken(t);
11✔
786
      return new ExprNS.Literal(tok, tok, parseFloat(t.value));
11✔
787
    },
788
  },
789
  {
790
    name: "atom",
791
    symbols: [pythonLexer.has("number_int") ? { type: "number_int" } : number_int],
5!
792
    postprocess: astBigInt,
793
  },
794
  {
795
    name: "atom",
796
    symbols: [pythonLexer.has("number_hex") ? { type: "number_hex" } : number_hex],
5!
797
    postprocess: astBigInt,
798
  },
799
  {
800
    name: "atom",
801
    symbols: [pythonLexer.has("number_oct") ? { type: "number_oct" } : number_oct],
5!
802
    postprocess: astBigInt,
803
  },
804
  {
805
    name: "atom",
806
    symbols: [pythonLexer.has("number_bin") ? { type: "number_bin" } : number_bin],
5!
807
    postprocess: astBigInt,
808
  },
809
  {
810
    name: "atom",
811
    symbols: [pythonLexer.has("number_complex") ? { type: "number_complex" } : number_complex],
5!
812
    postprocess: astComplex,
813
  },
814
  { name: "atom", symbols: ["stringLit"], postprocess: id },
815
  { name: "atom", symbols: [{ literal: "None" }], postprocess: astNone },
816
  { name: "atom", symbols: [{ literal: "True" }], postprocess: astTrue },
817
  { name: "atom", symbols: [{ literal: "False" }], postprocess: astFalse },
818
  {
819
    name: "lambda_expr",
820
    symbols: [{ literal: "lambda" }, "names", { literal: ":" }, "expression"],
821
    postprocess: ([kw, params, , body]) =>
822
      new ExprNS.Lambda(toAstToken(kw), body.endToken, params, body),
16✔
823
  },
824
  {
825
    name: "lambda_expr",
826
    symbols: [
827
      { literal: "lambda" },
828
      "names",
829
      pythonLexer.has("doublecolon") ? { type: "doublecolon" } : doublecolon,
5!
830
      "block",
831
    ],
832
    postprocess: ([kw, params, , body]) =>
UNCOV
833
      new ExprNS.MultiLambda(toAstToken(kw), body[body.length - 1].endToken, params, body, []),
×
834
  },
835
  {
836
    name: "lambda_expr",
837
    symbols: [{ literal: "lambda" }, { literal: ":" }, "expression"],
838
    postprocess: ([kw, , body]) => new ExprNS.Lambda(toAstToken(kw), body.endToken, [], body),
1✔
839
  },
840
  {
841
    name: "lambda_expr",
842
    symbols: [
843
      { literal: "lambda" },
844
      pythonLexer.has("doublecolon") ? { type: "doublecolon" } : doublecolon,
5!
845
      "block",
846
    ],
847
    postprocess: ([kw, , body]) =>
UNCOV
848
      new ExprNS.MultiLambda(toAstToken(kw), body[body.length - 1].endToken, [], body, []),
×
849
  },
850
  { name: "expressions$ebnf$1", symbols: [] },
851
  { name: "expressions$ebnf$1$subexpression$1", symbols: [{ literal: "," }, "expression"] },
852
  {
853
    name: "expressions$ebnf$1",
854
    symbols: ["expressions$ebnf$1", "expressions$ebnf$1$subexpression$1"],
855
    postprocess: function arrpush(d) {
856
      return d[0].concat([d[1]]);
39✔
857
    },
858
  },
859
  { name: "expressions$ebnf$2$subexpression$1", symbols: [{ literal: "," }] },
860
  { name: "expressions$ebnf$2", symbols: ["expressions$ebnf$2$subexpression$1"], postprocess: id },
861
  {
862
    name: "expressions$ebnf$2",
863
    symbols: [],
864
    postprocess: function (d) {
865
      return null;
117✔
866
    },
867
  },
868
  {
869
    name: "expressions",
870
    symbols: ["expression", "expressions$ebnf$1", "expressions$ebnf$2"],
871
    postprocess: flatList,
872
  },
873
  {
874
    name: "stringLit",
875
    symbols: [
876
      pythonLexer.has("string_triple_double")
5!
877
        ? { type: "string_triple_double" }
878
        : string_triple_double,
879
    ],
880
    postprocess: astString,
881
  },
882
  {
883
    name: "stringLit",
884
    symbols: [
885
      pythonLexer.has("string_triple_single")
5!
886
        ? { type: "string_triple_single" }
887
        : string_triple_single,
888
    ],
889
    postprocess: astString,
890
  },
891
  {
892
    name: "stringLit",
893
    symbols: [pythonLexer.has("string_double") ? { type: "string_double" } : string_double],
5!
894
    postprocess: astString,
895
  },
896
  {
897
    name: "stringLit",
898
    symbols: [pythonLexer.has("string_single") ? { type: "string_single" } : string_single],
5!
899
    postprocess: astString,
900
  },
901
];
902
let ParserStart = "program";
5✔
903
export default { Lexer, ParserRules, ParserStart };
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc