• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

Permify / permify / 18341735804

08 Oct 2025 10:29AM UTC coverage: 86.447% (+0.3%) from 86.148%
18341735804

Pull #2532

github

tolgaozen
feat(otlp): add support for custom headers in OTLP exporters
Pull Request #2532: refactor: clean up imports and comments in various files

59 of 71 new or added lines in 4 files covered. (83.1%)

2 existing lines in 2 files now uncovered.

9332 of 10795 relevant lines covered (86.45%)

204.91 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

80.84
/pkg/dsl/parser/parser.go
1
package parser
2

3
import (
4
        "errors"
5
        "fmt"
6
        "strings"
7

8
        "github.com/Permify/permify/pkg/dsl/ast"
9
        "github.com/Permify/permify/pkg/dsl/lexer"
10
        "github.com/Permify/permify/pkg/dsl/token"
11
        "github.com/Permify/permify/pkg/dsl/utils"
12
)
13

14
const (
15
        // iota is a special identifier that is automatically set to 0 in this case, and increments by 1 for each subsequent constant declaration. By assigning the value to the blank identifier _, it is effectively ignored.
16
        _ int = iota
17

18
        // LOWEST precedence level for lowest precedence
19
        LOWEST
20
        // AND_OR_NOT precedence level for logical operators (AND, OR)
21
        AND_OR_NOT
22
)
23

24
var precedences = map[token.Type]int{ // a map that assigns precedence levels to different token types
25
        token.AND: AND_OR_NOT,
26
        token.OR:  AND_OR_NOT,
27
        token.NOT: AND_OR_NOT,
28
}
29

30
// Parser is a struct that contains information and functions related to parsing
31
type Parser struct {
32
        // a pointer to a Lexer object that will provide tokens for parsing
33
        l *lexer.Lexer
34
        // the current token being processed
35
        currentToken token.Token
36
        // the previous token (token before currentToken) for lookahead parsing and multi-line expression handling
37
        previousToken token.Token // stores last processed token
38
        // the next token after currentToken
39
        peekToken token.Token
40
        // a slice of error messages that are generated during parsing
41
        errors []string
42
        // a map that associates prefix parsing functions with token types
43
        prefixParseFns map[token.Type]prefixParseFn
44
        // a map that associates infix parsing functions with token types
45
        infixParseFunc map[token.Type]infixParseFn
46
        // references to entities, rules, relations, attributes, and permissions
47
        references *ast.References
48
}
49

50
type (
51
        // a function that parses prefix expressions and returns an ast.Expression and error
52
        prefixParseFn func() (ast.Expression, error)
53

54
        // a function that parses infix expressions and returns an ast.Expression and error
55
        infixParseFn func(ast.Expression) (ast.Expression, error)
56
)
57

58
// NewParser creates a new Parser object with the given input string
59
func NewParser(str string) (p *Parser) {
32✔
60
        // initialize a new Parser object with the given input string and default values for other fields
32✔
61
        p = &Parser{
32✔
62
                l:          lexer.NewLexer(str), // create a new Lexer object with the input string
32✔
63
                errors:     []string{},          // initialize an empty slice of error messages
32✔
64
                references: ast.NewReferences(), // initialize an empty map for relational references
32✔
65
        }
32✔
66

32✔
67
        // register prefix parsing functions for token types IDENT and NOT
32✔
68
        p.prefixParseFns = make(map[token.Type]prefixParseFn)  // initialize an empty map for prefix parsing functions
32✔
69
        p.registerPrefix(token.IDENT, p.parseIdentifierOrCall) // associate the parseIdentifier function with the IDENT token type
32✔
70

32✔
71
        // register infix parsing functions for token types AND, OR, NOT
32✔
72
        p.infixParseFunc = make(map[token.Type]infixParseFn) // initialize an empty map for infix parsing functions
32✔
73
        p.registerInfix(token.AND, p.parseInfixExpression)   // associate the parseInfixExpression function with the AND token type
32✔
74
        p.registerInfix(token.OR, p.parseInfixExpression)    // associate the parseInfixExpression function with the OR token type
32✔
75
        p.registerInfix(token.NOT, p.parseInfixExpression)   // associate the parseInfixExpression function with the OR token type
32✔
76

32✔
77
        return p // return the newly created Parser object and no error
32✔
78
}
32✔
79

80
// next retrieves the next non-ignored token from the Parser's lexer and updates the Parser's currentToken and peekToken fields
81
func (p *Parser) next() {
1,134✔
82
        for {
3,551✔
83
                // retrieve the next token from the lexer
2,417✔
84
                peek := p.l.NextToken()
2,417✔
85
                // if the token is not an ignored token (e.g. whitespace or comments), update the currentToken and peekToken fields and exit the loop
2,417✔
86
                if !token.IsIgnores(peek.Type) {
3,551✔
87
                        // store the current token as previous before advancing
1,134✔
88
                        p.previousToken = p.currentToken // save current token for lookahead
1,134✔
89
                        // set the currentToken field to the previous peekToken value
1,134✔
90
                        p.currentToken = p.peekToken
1,134✔
91
                        // set the peekToken field to the new peek value
1,134✔
92
                        p.peekToken = peek
1,134✔
93
                        // exit the loop
1,134✔
94
                        break
1,134✔
95
                }
96
        }
97
}
98

99
// nextWithIgnores advances the parser's token stream by one position.
100
// It updates the currentToken and peekToken of the Parser.
101
func (p *Parser) nextWithIgnores() {
98✔
102
        // Get the next token in the lexers token stream and store it in the variable peek.
98✔
103
        peek := p.l.NextToken()
98✔
104

98✔
105
        // Update the currentToken with the value of peekToken.
98✔
106
        p.currentToken = p.peekToken
98✔
107

98✔
108
        // Update the peekToken with the value of peek (the new next token in the lexers stream).
98✔
109
        p.peekToken = peek
98✔
110
}
98✔
111

112
// currentTokenIs checks if the Parser's currentToken is any of the given token types
113
func (p *Parser) currentTokenIs(tokens ...token.Type) bool {
1,439✔
114
        // iterate through the given token types and check if any of them match the currentToken's type
1,439✔
115
        for _, t := range tokens {
2,878✔
116
                if p.currentToken.Type == t {
1,894✔
117
                        // if a match is found, return true
455✔
118
                        return true
455✔
119
                }
455✔
120
        }
121
        // if no match is found, return false
122
        return false
984✔
123
}
124

125
// previousTokenIs checks if the Parser's previousToken type matches any of the given types
126
func (p *Parser) previousTokenIs(tokens ...token.Type) bool { // Check if previous token matches any type
10✔
127
        for _, tokenType := range tokens { // Iterate through token types
40✔
128
                if p.previousToken.Type == tokenType { // Check for match
40✔
129
                        return true // Match found
10✔
130
                } // Continue if no match
10✔
131
        } // All types checked
NEW
132
        return false // No match found
×
133
} // End previousTokenIs
134
// peekTokenIs checks if the Parser's peekToken is any of the given token types
135
func (p *Parser) peekTokenIs(tokens ...token.Type) bool {
1,270✔
136
        // iterate through the given token types and check if any of them match the peekToken's type
1,270✔
137
        for _, t := range tokens {
2,540✔
138
                if p.peekToken.Type == t {
1,927✔
139
                        // if a match is found, return true
657✔
140
                        return true
657✔
141
                }
657✔
142
        }
143
        // if no match is found, return false
144
        return false
613✔
145
}
146

147
// Error returns an error if there are any errors in the Parser's errors slice
148
func (p *Parser) Error() error {
31✔
149
        // if there are no errors, return nil
31✔
150
        if len(p.errors) == 0 {
31✔
151
                return nil
×
152
        }
×
153
        // if there are errors, return the first error message in the errors slice as an error type
154
        return errors.New(p.errors[0])
31✔
155
}
156

157
// Parse reads and parses the input string and returns an AST representation of the schema, along with any errors encountered during parsing
158
func (p *Parser) Parse() (*ast.Schema, error) {
29✔
159
        // create a new Schema object to store the parsed statements
29✔
160
        schema := ast.NewSchema()
29✔
161
        schema.Statements = []ast.Statement{}
29✔
162

29✔
163
        // loop through the input string until the end is reached
29✔
164
        for !p.currentTokenIs(token.EOF) {
210✔
165
                // parse the next statement in the input string
181✔
166
                stmt, err := p.parseStatement()
181✔
167
                if err != nil {
193✔
168
                        // if there was an error parsing the statement, return the error message
12✔
169
                        return nil, p.Error()
12✔
170
                }
12✔
171
                if stmt != nil {
203✔
172
                        // add the parsed statement to the schema's Statements field if it is not nil
34✔
173
                        schema.Statements = append(schema.Statements, stmt)
34✔
174
                }
34✔
175

176
                // move to the next token in the input string
177
                p.next()
169✔
178
        }
179

180
        schema.SetReferences(p.references)
17✔
181

17✔
182
        // return the parsed schema object and nil to indicate that there were no errors
17✔
183
        return schema, nil
17✔
184
}
185

186
func (p *Parser) ParsePartial(entityName string) (ast.Statement, error) {
3✔
187
        for !p.currentTokenIs(token.EOF) {
15✔
188
                // parse the next statement in the input string
12✔
189
                stmt, err := p.parsePartialStatement(entityName)
12✔
190
                if err != nil {
12✔
191
                        return nil, p.Error()
×
192
                }
×
193
                if stmt != nil {
15✔
194
                        return stmt, nil
3✔
195
                }
3✔
196
                p.next()
9✔
197
        }
198
        return nil, errors.New("no valid statement found")
×
199
}
200

201
func (p *Parser) parsePartialStatement(entityName string) (ast.Statement, error) {
12✔
202
        switch p.currentToken.Type {
12✔
203
        case token.ATTRIBUTE:
×
204
                return p.parseAttributeStatement(entityName)
×
205
        case token.RELATION:
2✔
206
                return p.parseRelationStatement(entityName)
2✔
207
        case token.PERMISSION:
1✔
208
                return p.parsePermissionStatement(entityName)
1✔
209
        default:
9✔
210
                return nil, nil
9✔
211
        }
212
}
213

214
// parseStatement method parses the current statement based on its defined token types
215
func (p *Parser) parseStatement() (ast.Statement, error) {
181✔
216
        // switch on the currentToken's type to determine which type of statement to parse
181✔
217
        switch p.currentToken.Type {
181✔
218
        case token.ENTITY:
38✔
219
                // if the currentToken is ENTITY, parse an EntityStatement
38✔
220
                return p.parseEntityStatement()
38✔
221
        case token.RULE:
8✔
222
                // if the currentToken is RULE, parse a RuleStatement
8✔
223
                return p.parseRuleStatement()
8✔
224
        default:
135✔
225
                return nil, nil
135✔
226
        }
227
}
228

229
// parseEntityStatement method parses an ENTITY statement and returns an EntityStatement AST node
230
func (p *Parser) parseEntityStatement() (*ast.EntityStatement, error) {
38✔
231
        // create a new EntityStatement object and set its Entity field to the currentToken
38✔
232
        stmt := &ast.EntityStatement{Entity: p.currentToken}
38✔
233
        // expect the next token to be an identifier token, and set the EntityStatement's Name field to the identifier's value
38✔
234
        if !p.expectAndNext(token.IDENT) {
39✔
235
                return nil, p.Error()
1✔
236
        }
1✔
237
        stmt.Name = p.currentToken
37✔
238

37✔
239
        // add the entity reference to the Parser's entityReferences map
37✔
240
        err := p.references.AddEntityReference(stmt.Name.Literal)
37✔
241
        if err != nil {
38✔
242
                p.duplicationError(stmt.Name.Literal) // Generate an error message indicating a duplication error
1✔
243
                return nil, p.Error()
1✔
244
        }
1✔
245

246
        // expect the next token to be a left brace token, indicating the start of the entity's body
247
        if !p.expectAndNext(token.LCB) {
37✔
248
                return nil, p.Error()
1✔
249
        }
1✔
250

251
        // loop through the entity's body until a right brace token is encountered
252
        for !p.currentTokenIs(token.RCB) {
322✔
253
                // if the currentToken is EOF, raise an error and return nil for both the statement and error values
287✔
254
                if p.currentTokenIs(token.EOF) {
288✔
255
                        p.currentError(token.RCB)
1✔
256
                        return nil, p.Error()
1✔
257
                }
1✔
258
                // based on the currentToken's type, parse a RelationStatement or PermissionStatement and add it to the EntityStatement's corresponding field
259
                switch p.currentToken.Type {
286✔
260
                case token.RELATION:
45✔
261
                        relation, err := p.parseRelationStatement(stmt.Name.Literal)
45✔
262
                        if err != nil {
48✔
263
                                return nil, p.Error()
3✔
264
                        }
3✔
265
                        stmt.RelationStatements = append(stmt.RelationStatements, relation)
42✔
266
                case token.ATTRIBUTE:
8✔
267
                        attribute, err := p.parseAttributeStatement(stmt.Name.Literal)
8✔
268
                        if err != nil {
9✔
269
                                return nil, p.Error()
1✔
270
                        }
1✔
271
                        stmt.AttributeStatements = append(stmt.AttributeStatements, attribute)
7✔
272
                case token.PERMISSION:
31✔
273
                        action, err := p.parsePermissionStatement(stmt.Name.Literal)
31✔
274
                        if err != nil {
33✔
275
                                return nil, p.Error()
2✔
276
                        }
2✔
277
                        stmt.PermissionStatements = append(stmt.PermissionStatements, action)
29✔
278
                default:
202✔
279
                        // if the currentToken is not recognized, check if it is a newline, left brace, or right brace token, and skip it if it is
202✔
280
                        if !p.currentTokenIs(token.NEWLINE) && !p.currentTokenIs(token.LCB) && !p.currentTokenIs(token.RCB) {
204✔
281
                                // if the currentToken is not recognized and not a newline, left brace, or right brace token, raise an error and return nil for both the statement and error values
2✔
282
                                p.currentError(token.RELATION, token.PERMISSION, token.ATTRIBUTE)
2✔
283
                                return nil, p.Error()
2✔
284
                        }
2✔
285
                }
286
                // move to the next token in the input string
287
                p.next()
278✔
288
        }
289

290
        // return the parsed EntityStatement and nil for the error value
291
        return stmt, nil
26✔
292
}
293

294
// parseRuleStatement is responsible for parsing a rule statement in the form:
295
//
296
//        rule name(typ1 string, typ2 boolean) {
297
//            EXPRESSION
298
//        }
299
//
300
// This method assumes the current token points to the 'rule' token when it is called.
301
func (p *Parser) parseRuleStatement() (*ast.RuleStatement, error) {
8✔
302
        // Create a new RuleStatement
8✔
303
        stmt := &ast.RuleStatement{Rule: p.currentToken}
8✔
304

8✔
305
        // Expect the next token to be an identifier (the name of the rule).
8✔
306
        // If it's not an identifier, return an error.
8✔
307
        if !p.expectAndNext(token.IDENT) {
8✔
308
                return nil, p.Error()
×
309
        }
×
310
        stmt.Name = p.currentToken
8✔
311

8✔
312
        // Expect the next token to be a left parenthesis '(' starting the argument list.
8✔
313
        if !p.expectAndNext(token.LP) {
8✔
314
                return nil, p.Error()
×
315
        }
×
316

317
        arguments := map[token.Token]ast.AttributeTypeStatement{}
8✔
318
        args := map[string]string{}
8✔
319

8✔
320
        // Loop over the tokens until a right parenthesis ')' is encountered.
8✔
321
        // In each iteration, two tokens are processed: an identifier (arg name) and its type.
8✔
322
        for !p.peekTokenIs(token.RP) {
20✔
323
                // Expect the first token to be the parameter's identifier.
12✔
324
                if !p.expectAndNext(token.IDENT) {
12✔
325
                        return nil, p.Error()
×
326
                }
×
327
                argument := p.currentToken
12✔
328
                arg := p.currentToken.Literal
12✔
329

12✔
330
                // Expect the second token to be the parameter's type.
12✔
331
                if !p.expectAndNext(token.IDENT) {
12✔
332
                        return nil, p.Error()
×
333
                }
×
334

335
                if p.peekTokenIs(token.LSB) { // Check if the next token is '['
13✔
336
                        arguments[argument] = ast.AttributeTypeStatement{
1✔
337
                                Type:    p.currentToken,
1✔
338
                                IsArray: true, // Marking the type as an array
1✔
339
                        }
1✔
340
                        args[arg] = p.currentToken.Literal + "[]" // Store the argument type as string with "[]" suffix
1✔
341
                        p.next()                                  // Move to the '[' token
1✔
342
                        if !p.expectAndNext(token.RSB) {          // Expect and move to the ']' token
1✔
343
                                return nil, p.Error()
×
344
                        }
×
345
                } else {
11✔
346
                        arguments[argument] = ast.AttributeTypeStatement{
11✔
347
                                Type:    p.currentToken,
11✔
348
                                IsArray: false, // Marking the type as not an array
11✔
349
                        }
11✔
350
                        args[arg] = p.currentToken.Literal // Store the regular argument type
11✔
351
                }
11✔
352

353
                // If the next token is a comma, there are more parameters to parse.
354
                // Continue to the next iteration.
355
                if p.peekTokenIs(token.COMMA) {
16✔
356
                        p.next()
4✔
357
                        continue
4✔
358
                } else if !p.peekTokenIs(token.RP) {
8✔
359
                        // If the next token is not a comma, it must be a closing parenthesis.
×
360
                        // If it's not, return an error.
×
361
                        p.peekError(token.RP)
×
362
                        return nil, p.Error()
×
363
                }
×
364
        }
365

366
        // Save parsed arguments to the statement
367
        stmt.Arguments = arguments
8✔
368

8✔
369
        // Consume the right parenthesis.
8✔
370
        p.next()
8✔
371

8✔
372
        // Expect the next token to be a left curly bracket '{' starting the body.
8✔
373
        if !p.expectAndNext(token.LCB) {
8✔
374
                return nil, p.Error()
×
375
        }
×
376

377
        p.next()
8✔
378

8✔
379
        // Collect tokens for the body until a closing curly bracket '}' is encountered.
8✔
380
        var bodyTokens []token.Token
8✔
381
        for !p.peekTokenIs(token.RCB) {
106✔
382
                // If there's no closing bracket, return an error.
98✔
383
                if p.peekTokenIs(token.EOF) {
98✔
384
                        p.peekError(token.RCB)
×
385
                        return nil, p.Error()
×
386
                }
×
387

388
                bodyTokens = append(bodyTokens, p.currentToken)
98✔
389
                p.nextWithIgnores()
98✔
390
        }
391

392
        // Combine all the body tokens into a single string
393
        var bodyStr strings.Builder
8✔
394
        for _, t := range bodyTokens {
106✔
395
                bodyStr.WriteString(t.Literal)
98✔
396
        }
98✔
397
        stmt.Expression = bodyStr.String()
8✔
398

8✔
399
        // Expect and consume the closing curly bracket '}'.
8✔
400
        if !p.expectAndNext(token.RCB) {
8✔
401
                return nil, p.Error()
×
402
        }
×
403

404
        // Register the parsed rule in the parser's references.
405
        err := p.references.AddRuleReference(stmt.Name.Literal, args)
8✔
406
        if err != nil {
8✔
407
                // If there's an error (e.g., a duplicate rule), return an error.
×
408
                p.duplicationError(stmt.Name.Literal)
×
409
                return nil, p.Error()
×
410
        }
×
411

412
        // Return the successfully parsed RuleStatement.
413
        return stmt, nil
8✔
414
}
415

416
// parseRelationStatement method parses a RELATION statement and returns a RelationStatement AST node
417
func (p *Parser) parseAttributeStatement(entityName string) (*ast.AttributeStatement, error) {
8✔
418
        // create a new RelationStatement object and set its Relation field to the currentToken
8✔
419
        stmt := &ast.AttributeStatement{Attribute: p.currentToken}
8✔
420

8✔
421
        // expect the next token to be an identifier token, and set the RelationStatement's Name field to the identifier's value
8✔
422
        if !p.expectAndNext(token.IDENT) {
9✔
423
                return nil, p.Error()
1✔
424
        }
1✔
425
        stmt.Name = p.currentToken
7✔
426

7✔
427
        if !p.expectAndNext(token.IDENT) {
7✔
428
                return nil, p.Error()
×
429
        }
×
430

431
        atstmt := ast.AttributeTypeStatement{Type: p.currentToken}
7✔
432
        atstmt.IsArray = false
7✔
433

7✔
434
        if p.peekTokenIs(token.LSB) {
8✔
435
                p.next()
1✔
436
                if !p.expectAndNext(token.RSB) {
1✔
437
                        return nil, p.Error()
×
438
                }
×
439
                atstmt.IsArray = true
1✔
440
        }
441

442
        stmt.AttributeType = atstmt
7✔
443

7✔
444
        key := utils.Key(entityName, stmt.Name.Literal)
7✔
445
        // add the relation reference to the Parser's relationReferences and relationalReferences maps
7✔
446
        err := p.references.AddAttributeReferences(key, atstmt)
7✔
447
        if err != nil {
7✔
448
                p.duplicationError(key) // Generate an error message indicating a duplication error
×
449
                return nil, p.Error()
×
450
        }
×
451

452
        // return the parsed RelationStatement and nil for the error value
453
        return stmt, nil
7✔
454
}
455

456
// parseRelationStatement method parses a RELATION statement and returns a RelationStatement AST node
457
func (p *Parser) parseRelationStatement(entityName string) (*ast.RelationStatement, error) {
47✔
458
        // create a new RelationStatement object and set its Relation field to the currentToken
47✔
459
        stmt := &ast.RelationStatement{Relation: p.currentToken}
47✔
460

47✔
461
        // expect the next token to be an identifier token, and set the RelationStatement's Name field to the identifier's value
47✔
462
        if !p.expectAndNext(token.IDENT) {
47✔
463
                return nil, p.Error()
×
464
        }
×
465
        stmt.Name = p.currentToken
47✔
466
        relationName := stmt.Name.Literal
47✔
467

47✔
468
        // expect the next token to be a SIGN token, indicating the start of the relation type(s)
47✔
469
        if !p.expect(token.SIGN) {
48✔
470
                return nil, p.Error()
1✔
471
        }
1✔
472

473
        // loop through the relation types until no more SIGN tokens are encountered
474
        for p.peekTokenIs(token.SIGN) {
95✔
475
                // parse a RelationTypeStatement and append it to the RelationStatement's RelationTypes field
49✔
476
                relationStatement, err := p.parseRelationTypeStatement()
49✔
477
                if err != nil {
50✔
478
                        return nil, p.Error()
1✔
479
                }
1✔
480
                stmt.RelationTypes = append(stmt.RelationTypes, *relationStatement)
48✔
481
        }
482

483
        key := utils.Key(entityName, relationName)
45✔
484

45✔
485
        // add the relation reference to the Parser's relationReferences and relationalReferences maps
45✔
486
        err := p.references.AddRelationReferences(key, stmt.RelationTypes)
45✔
487
        if err != nil {
46✔
488
                p.duplicationError(key) // Generate an error message indicating a duplication error
1✔
489
                return nil, p.Error()
1✔
490
        }
1✔
491

492
        // return the parsed RelationStatement and nil for the error value
493
        return stmt, nil
44✔
494
}
495

496
// parseRelationTypeStatement method parses a single relation type within a RELATION statement and returns a RelationTypeStatement AST node
497
func (p *Parser) parseRelationTypeStatement() (*ast.RelationTypeStatement, error) {
49✔
498
        // expect the currentToken to be a SIGN token, indicating the start of the relation type
49✔
499
        if !p.expectAndNext(token.SIGN) {
49✔
500
                return nil, p.Error()
×
501
        }
×
502
        // create a new RelationTypeStatement object and set its Sign field to the SIGN token
503
        stmt := &ast.RelationTypeStatement{Sign: p.currentToken}
49✔
504

49✔
505
        // expect the next token to be an identifier token, and set the RelationTypeStatement's Type field to the identifier's value
49✔
506
        if !p.expectAndNext(token.IDENT) {
50✔
507
                return nil, p.Error()
1✔
508
        }
1✔
509
        stmt.Type = p.currentToken
48✔
510

48✔
511
        // if the next token is a HASH token, indicating that a specific relation within the relation type is being referenced, parse it and set the RelationTypeStatement's Relation field to the identifier's value
48✔
512
        if p.peekTokenIs(token.HASH) {
51✔
513
                p.next()
3✔
514
                if !p.expectAndNext(token.IDENT) {
3✔
515
                        return nil, p.Error()
×
516
                }
×
517
                stmt.Relation = p.currentToken
3✔
518
        }
519

520
        // return the parsed RelationTypeStatement and nil for the error value
521
        return stmt, nil
48✔
522
}
523

524
// parsePermissionStatement method parses an PERMISSION statement and returns an PermissionStatement AST node
525
func (p *Parser) parsePermissionStatement(entityName string) (ast.Statement, error) {
32✔
526
        // create a new PermissionStatement object and set its Permission field to the currentToken
32✔
527
        stmt := &ast.PermissionStatement{Permission: p.currentToken}
32✔
528

32✔
529
        // expect the next token to be an identifier token, and set the PermissionStatement's Name field to the identifier's value
32✔
530
        if !p.expectAndNext(token.IDENT) {
33✔
531
                return nil, p.Error()
1✔
532
        }
1✔
533
        stmt.Name = p.currentToken
31✔
534

31✔
535
        key := utils.Key(entityName, stmt.Name.Literal)
31✔
536
        // add the action reference to the Parser's actionReferences and relationalReferences maps
31✔
537
        err := p.references.AddPermissionReference(key)
31✔
538
        if err != nil {
32✔
539
                p.duplicationError(key) // Generate an error message indicating a duplication error
1✔
540
                return nil, p.Error()
1✔
541
        }
1✔
542

543
        // expect the next token to be an ASSIGN token, indicating the start of the expression to be assigned to the action
544
        if !p.expectAndNext(token.ASSIGN) {
30✔
545
                return nil, p.Error()
×
546
        }
×
547

548
        p.next()
30✔
549

30✔
550
        // parse the expression statement and set it as the PermissionStatement's ExpressionStatement field
30✔
551
        ex, err := p.parseExpressionStatement()
30✔
552
        if err != nil {
30✔
553
                return nil, p.Error()
×
554
        }
×
555
        stmt.ExpressionStatement = ex
30✔
556

30✔
557
        // return the parsed PermissionStatement and nil for the error value
30✔
558
        return stmt, nil
30✔
559
}
560

561
// parseExpressionStatement method parses an expression statement and returns an ExpressionStatement AST node
562
func (p *Parser) parseExpressionStatement() (*ast.ExpressionStatement, error) {
30✔
563
        // create a new ExpressionStatement object
30✔
564
        stmt := &ast.ExpressionStatement{}
30✔
565
        var err error
30✔
566
        // parse the expression using the lowest precedence value as the initial precedence level
30✔
567
        stmt.Expression, err = p.parseExpression(LOWEST)
30✔
568
        if err != nil {
30✔
569
                return nil, p.Error()
×
570
        }
×
571

572
        // return the parsed ExpressionStatement and nil for the error value
573
        return stmt, nil
30✔
574
}
575

576
// expectAndNext method checks if the next token is of the expected type and advances the lexer to the next token if it is. It returns true if the next token is of the expected type, and false otherwise.
577
func (p *Parser) expectAndNext(t token.Type) bool {
413✔
578
        // if the next token is of the expected type, advance the lexer to the next token and return true
413✔
579
        if p.peekTokenIs(t) {
821✔
580
                p.next()
408✔
581
                return true
408✔
582
        }
408✔
583
        // otherwise, generate an error message indicating that the expected token type was not found and return false
584
        p.peekError(t)
5✔
585
        return false
5✔
586
}
587

588
// expect method checks if the next token is of the expected type, without advancing the lexer. It returns true if the next token is of the expected type, and false otherwise.
589
func (p *Parser) expect(t token.Type) bool {
78✔
590
        // if the next token is of the expected type, return true
78✔
591
        if p.peekTokenIs(t) {
155✔
592
                return true
77✔
593
        }
77✔
594
        // otherwise, generate an error message indicating that the expected token type was not found and return false
595
        p.peekError(t)
1✔
596
        return false
1✔
597
}
598

599
// parseExpression method parses an expression with a given precedence level and returns the parsed expression as an AST node. It takes an integer value indicating the precedence level.
600
func (p *Parser) parseExpression(precedence int) (ast.Expression, error) {
109✔
601
        var exp ast.Expression
109✔
602
        var err error
109✔
603

109✔
604
        if p.currentTokenIs(token.NEWLINE) && p.previousTokenIs(token.LP, token.AND, token.OR, token.NOT, token.ASSIGN) {
119✔
605
                p.next() // skip newline after operators
10✔
606
        } // Newline handling complete
10✔
607
        if p.currentTokenIs(token.LP) {
140✔
608
                p.next() // Consume the left parenthesis.
31✔
609
                exp, err = p.parseExpression(LOWEST)
31✔
610
                if err != nil {
31✔
611
                        return nil, err
×
612
                }
×
613

614
                if !p.expect(token.RP) {
31✔
615
                        return nil, p.Error()
×
616
                }
×
617
                p.next() // Consume the right parenthesis.
31✔
618
        } else {
78✔
619
                // get the prefix parsing function for the current token type
78✔
620
                prefix := p.prefixParseFns[p.currentToken.Type]
78✔
621
                if prefix == nil {
78✔
622
                        p.noPrefixParseFnError(p.currentToken.Type)
×
623
                        return nil, p.Error()
×
624
                }
×
625

626
                // parse the prefix expression
627
                exp, err = prefix()
78✔
628
                if err != nil {
78✔
629
                        return nil, p.Error()
×
630
                }
×
631
        }
632

633
        // continue parsing the expression while the next token has a higher precedence level than the current precedence level
634
        for !p.peekTokenIs(token.NEWLINE) && precedence < p.peekPrecedence() {
157✔
635
                // get the infix parsing function for the next token type
48✔
636
                infix := p.infixParseFunc[p.peekToken.Type]
48✔
637
                if infix == nil {
48✔
638
                        return exp, nil
×
639
                }
×
640
                p.next()
48✔
641
                // parse the infix expression with the current expression as its left-hand side
48✔
642
                exp, err = infix(exp)
48✔
643
                if err != nil {
48✔
644
                        return nil, p.Error()
×
645
                }
×
646
        }
647

648
        // return the parsed expression and nil for the error value
649
        return exp, nil
109✔
650
}
651

652
// parseInfixExpression parses an infix expression that has a left operand and an operator followed by
653
// a right operand, such as "a or b" or "x and y".
654
// It takes the left operand as an argument, constructs an InfixExpression with the current operator
655
// and left operand, and parses the right operand with a higher precedence to construct the final
656
// expression tree.
657
// It returns the resulting InfixExpression and any error encountered.
658
func (p *Parser) parseInfixExpression(left ast.Expression) (ast.Expression, error) {
48✔
659
        // Ensure the current token is a valid infix operator before proceeding.
48✔
660
        if !p.isInfixOperator(p.currentToken.Type) {
48✔
661
                p.currentError(token.AND, token.OR, token.NOT) // Replace with your actual valid infix token types
×
662
                return nil, p.Error()
×
663
        }
×
664

665
        // Create a new InfixExpression with the left operand and the current operator.
666
        expression := &ast.InfixExpression{
48✔
667
                Op:       p.currentToken,
48✔
668
                Left:     left,
48✔
669
                Operator: ast.Operator(p.currentToken.Literal),
48✔
670
        }
48✔
671

48✔
672
        // Get the precedence of the current operator and consume the operator token.
48✔
673
        precedence := p.currentPrecedence()
48✔
674
        p.next()
48✔
675

48✔
676
        // Parse the right operand with a higher precedence to construct the final expression tree.
48✔
677
        right, err := p.parseExpression(precedence)
48✔
678
        if err != nil {
48✔
679
                return nil, err
×
680
        }
×
681

682
        // Ensure the right operand is not nil.
683
        if right == nil {
48✔
684
                p.currentError(token.IDENT, token.LP) // Replace with your actual valid right operand token types
×
685
                return nil, p.Error()
×
686
        }
×
687

688
        // Set the right operand of the InfixExpression and return it.
689
        expression.Right = right
48✔
690
        return expression, nil
48✔
691
}
692

693
// parseIntegerLiteral parses an integer literal and returns the resulting IntegerLiteral expression.
694
func (p *Parser) isInfixOperator(tokenType token.Type) bool {
48✔
695
        return tokenType == token.AND || tokenType == token.OR || tokenType == token.NOT
48✔
696
}
48✔
697

698
// peekPrecedence returns the precedence of the next token in the input, if it is a known
699
// operator, or the lowest precedence otherwise.
700
func (p *Parser) peekPrecedence() int {
114✔
701
        if pr, ok := precedences[p.peekToken.Type]; ok {
169✔
702
                return pr
55✔
703
        }
55✔
704
        return LOWEST
59✔
705
}
706

707
// currentPrecedence returns the precedence of the current token in the input, if it is a known
708
// operator, or the lowest precedence otherwise.
709
func (p *Parser) currentPrecedence() int {
48✔
710
        if pr, ok := precedences[p.currentToken.Type]; ok {
96✔
711
                return pr
48✔
712
        }
48✔
713
        return LOWEST
×
714
}
715

716
func (p *Parser) parseIdentifierOrCall() (ast.Expression, error) {
78✔
717
        // Ensure the current token is a valid identifier before proceeding.
78✔
718
        if !p.currentTokenIs(token.IDENT) {
78✔
719
                return nil, fmt.Errorf("unexpected token type for identifier expression: %s", p.currentToken.Type)
×
720
        }
×
721

722
        if p.peekTokenIs(token.LP) {
87✔
723
                return p.parseCallExpression()
9✔
724
        }
9✔
725

726
        return p.parseIdentifierExpression()
69✔
727
}
728

729
// parseIdentifier parses an identifier expression that may consist of one or more dot-separated
730
// identifiers, such as "x", "foo.bar", or "a.b.c.d".
731
// It constructs a new Identifier expression with the first token as the prefix and subsequent
732
// tokens as identifiers, and returns the resulting expression and any error encountered.
733
func (p *Parser) parseIdentifierExpression() (ast.Expression, error) {
83✔
734
        // Ensure the current token is a valid identifier before proceeding.
83✔
735
        if !p.currentTokenIs(token.IDENT) {
83✔
736
                p.currentError(token.IDENT)
×
737
                return nil, p.Error()
×
738
        }
×
739

740
        // Create a new Identifier expression with the first token as the prefix.
741
        ident := &ast.Identifier{Idents: []token.Token{p.currentToken}}
83✔
742

83✔
743
        // If the next token is a dot, consume it and continue parsing the next identifier.
83✔
744
        for p.peekTokenIs(token.DOT) {
116✔
745
                p.next() // Consume the dot token
33✔
746

33✔
747
                // Check if the next token after the dot is a valid identifier
33✔
748
                if !p.expectAndNext(token.IDENT) {
33✔
749
                        return nil, p.Error()
×
750
                }
×
751

752
                ident.Idents = append(ident.Idents, p.currentToken)
33✔
753
        }
754

755
        // Return the resulting Identifier expression.
756
        return ident, nil
83✔
757
}
758

759
// call_func(variable1, variable2)
760
func (p *Parser) parseCallExpression() (ast.Expression, error) {
9✔
761
        // Ensure the current token is a valid identifier before proceeding.
9✔
762
        if !p.currentTokenIs(token.IDENT) {
9✔
763
                p.currentError(token.IDENT)
×
764
                return nil, p.Error()
×
765
        }
×
766

767
        // Create a new Identifier expression with the first token as the prefix.
768
        call := &ast.Call{Name: p.currentToken}
9✔
769

9✔
770
        if !p.expectAndNext(token.LP) {
9✔
771
                return nil, p.Error()
×
772
        }
×
773

774
        // Check if there are no arguments
775
        if p.peekTokenIs(token.RP) {
9✔
776
                p.next() // consume the RP token
×
777
                return call, nil
×
778
        }
×
779

780
        p.next()
9✔
781

9✔
782
        // Parse the first argument
9✔
783
        ident, err := p.parseIdentifierExpression()
9✔
784
        if err != nil {
9✔
785
                return nil, err
×
786
        }
×
787

788
        i, ok := ident.(*ast.Identifier)
9✔
789
        if !ok {
9✔
790
                return nil, fmt.Errorf("expected identifier, got %T", ident)
×
791
        }
×
792
        call.Arguments = append(call.Arguments, *i)
9✔
793

9✔
794
        // Parse remaining arguments
9✔
795
        for p.peekTokenIs(token.COMMA) {
14✔
796
                p.next()
5✔
797

5✔
798
                if !p.expectAndNext(token.IDENT) {
5✔
799
                        return nil, p.Error()
×
800
                }
×
801

802
                ident, err = p.parseIdentifierExpression()
5✔
803
                if err != nil {
5✔
804
                        return nil, err
×
805
                }
×
806

807
                i, ok = ident.(*ast.Identifier)
5✔
808
                if !ok {
5✔
809
                        return nil, fmt.Errorf("expected identifier, got %T", ident)
×
810
                }
×
811
                call.Arguments = append(call.Arguments, *i)
5✔
812
        }
813

814
        if !p.expectAndNext(token.RP) {
9✔
815
                return nil, p.Error()
×
816
        }
×
817

818
        // Return the resulting Identifier expression.
819
        return call, nil
9✔
820
}
821

822
// registerPrefix safely registers a parsing function for a prefix token type in the parser's prefixParseFns map.
823
// It takes a token type and a prefix parsing function as arguments, and stores the function in the map
824
// under the given token type key.
825
func (p *Parser) registerPrefix(tokenType token.Type, fn prefixParseFn) {
32✔
826
        if fn == nil {
32✔
827
                p.duplicationError(fmt.Sprintf("registerPrefix: nil function for token type %s", tokenType))
×
828
                return
×
829
        }
×
830

831
        if _, exists := p.prefixParseFns[tokenType]; exists {
32✔
832
                p.duplicationError(fmt.Sprintf("registerPrefix: token type %s already registered", tokenType))
×
833
                return
×
834
        }
×
835

836
        p.prefixParseFns[tokenType] = fn
32✔
837
}
838

839
// registerInfix safely registers a parsing function for an infix token type in the parser's infixParseFunc map.
840
// It takes a token type and an infix parsing function as arguments, and stores the function in the map
841
// under the given token type key.
842
func (p *Parser) registerInfix(tokenType token.Type, fn infixParseFn) {
96✔
843
        if fn == nil {
96✔
844
                p.duplicationError(fmt.Sprintf("registerInfix: nil function for token type %s", tokenType))
×
845
                return
×
846
        }
×
847

848
        if _, exists := p.infixParseFunc[tokenType]; exists {
96✔
849
                p.duplicationError(fmt.Sprintf("registerInfix: token type %s already registered", tokenType))
×
850
                return
×
851
        }
×
852

853
        p.infixParseFunc[tokenType] = fn
96✔
854
}
855

856
// duplicationError adds an error message to the parser's error list indicating that a duplication was found.
857
// It takes a key string as an argument that is used to identify the source of the duplication in the input.
858
func (p *Parser) duplicationError(key string) {
3✔
859
        msg := fmt.Sprintf("%v:%v:duplication found for %s", p.l.GetLinePosition(), p.l.GetColumnPosition(), key)
3✔
860
        p.errors = append(p.errors, msg)
3✔
861
}
3✔
862

863
// noPrefixParseFnError adds an error message to the parser's error list indicating that no prefix parsing
864
// function was found for a given token type.
865
// It takes a token type as an argument that indicates the type of the token for which a parsing function is missing.
866
func (p *Parser) noPrefixParseFnError(t token.Type) {
×
867
        msg := fmt.Sprintf("%v:%v:no prefix parse function for %s found", p.l.GetLinePosition(), p.l.GetColumnPosition(), t)
×
868
        p.errors = append(p.errors, msg)
×
869
}
×
870

871
// peekError adds an error message to the parser's error list indicating that the next token in the input
872
// did not match the expected type(s).
873
// It takes one or more token types as arguments that indicate the expected types.
874
func (p *Parser) peekError(t ...token.Type) {
6✔
875
        expected := strings.Join(tokenTypesToStrings(t), ", ")
6✔
876
        msg := fmt.Sprintf("%v:%v:expected next token to be %s, got %s instead", p.l.GetLinePosition(), p.l.GetColumnPosition(), expected, p.peekToken.Type)
6✔
877
        p.errors = append(p.errors, msg)
6✔
878
}
6✔
879

880
// currentError adds an error message to the parser's error list indicating that the current token in the input
881
// did not match the expected type(s).
882
// It takes one or more token types as arguments that indicate the expected types.
883
func (p *Parser) currentError(t ...token.Type) {
3✔
884
        expected := strings.Join(tokenTypesToStrings(t), ", ")
3✔
885
        msg := fmt.Sprintf("%v:%v:expected token to be %s, got %s instead", p.l.GetLinePosition(),
3✔
886
                p.l.GetColumnPosition(), expected, p.currentToken.Type)
3✔
887
        p.errors = append(p.errors, msg)
3✔
888
}
3✔
889

890
// tokenTypesToStrings converts a slice of token types to a slice of their string representations.
891
func tokenTypesToStrings(types []token.Type) []string {
9✔
892
        strs := make([]string, len(types))
9✔
893
        for i, t := range types {
22✔
894
                strs[i] = t.String()
13✔
895
        }
13✔
896
        return strs
9✔
897
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc