• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

psf / black / 7868199971

12 Feb 2024 06:45AM UTC coverage: 96.387%. Remained the same
7868199971

Pull #4225

github

web-flow
Bump pre-commit/action from 3.0.0 to 3.0.1

Bumps [pre-commit/action](https://github.com/pre-commit/action) from 3.0.0 to 3.0.1.
- [Release notes](https://github.com/pre-commit/action/releases)
- [Commits](https://github.com/pre-commit/action/compare/v3.0.0...v3.0.1)

---
updated-dependencies:
- dependency-name: pre-commit/action
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Pull Request #4225: Bump pre-commit/action from 3.0.0 to 3.0.1

3080 of 3301 branches covered (0.0%)

7256 of 7528 relevant lines covered (96.39%)

4.81 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

89.47
/src/black/parsing.py
1
"""
2
Parse Python code and perform AST validation.
3
"""
4

5
import ast
5✔
6
import sys
5✔
7
import warnings
5✔
8
from typing import Iterable, Iterator, List, Set, Tuple
5✔
9

10
from black.mode import VERSION_TO_FEATURES, Feature, TargetVersion, supports_feature
5✔
11
from black.nodes import syms
5✔
12
from blib2to3 import pygram
5✔
13
from blib2to3.pgen2 import driver
5✔
14
from blib2to3.pgen2.grammar import Grammar
5✔
15
from blib2to3.pgen2.parse import ParseError
5✔
16
from blib2to3.pgen2.tokenize import TokenError
5✔
17
from blib2to3.pytree import Leaf, Node
5✔
18

19

20
class InvalidInput(ValueError):
5✔
21
    """Raised when input source code fails all parse attempts."""
22

23

24
def get_grammars(target_versions: Set[TargetVersion]) -> List[Grammar]:
5✔
25
    if not target_versions:
5✔
26
        # No target_version specified, so try all grammars.
27
        return [
5✔
28
            # Python 3.7-3.9
29
            pygram.python_grammar_async_keywords,
30
            # Python 3.0-3.6
31
            pygram.python_grammar,
32
            # Python 3.10+
33
            pygram.python_grammar_soft_keywords,
34
        ]
35

36
    grammars = []
5✔
37
    # If we have to parse both, try to parse async as a keyword first
38
    if not supports_feature(
5✔
39
        target_versions, Feature.ASYNC_IDENTIFIERS
40
    ) and not supports_feature(target_versions, Feature.PATTERN_MATCHING):
41
        # Python 3.7-3.9
42
        grammars.append(pygram.python_grammar_async_keywords)
5✔
43
    if not supports_feature(target_versions, Feature.ASYNC_KEYWORDS):
5✔
44
        # Python 3.0-3.6
45
        grammars.append(pygram.python_grammar)
5✔
46
    if any(Feature.PATTERN_MATCHING in VERSION_TO_FEATURES[v] for v in target_versions):
5✔
47
        # Python 3.10+
48
        grammars.append(pygram.python_grammar_soft_keywords)
5✔
49

50
    # At least one of the above branches must have been taken, because every Python
51
    # version has exactly one of the two 'ASYNC_*' flags
52
    return grammars
5✔
53

54

55
def lib2to3_parse(src_txt: str, target_versions: Iterable[TargetVersion] = ()) -> Node:
5✔
56
    """Given a string with source, return the lib2to3 Node."""
57
    if not src_txt.endswith("\n"):
5✔
58
        src_txt += "\n"
5✔
59

60
    grammars = get_grammars(set(target_versions))
5✔
61
    errors = {}
5✔
62
    for grammar in grammars:
5✔
63
        drv = driver.Driver(grammar)
5✔
64
        try:
5✔
65
            result = drv.parse_string(src_txt, True)
5✔
66
            break
5✔
67

68
        except ParseError as pe:
5✔
69
            lineno, column = pe.context[1]
5✔
70
            lines = src_txt.splitlines()
5✔
71
            try:
5✔
72
                faulty_line = lines[lineno - 1]
5✔
73
            except IndexError:
×
74
                faulty_line = "<line number missing in source>"
×
75
            errors[grammar.version] = InvalidInput(
5✔
76
                f"Cannot parse: {lineno}:{column}: {faulty_line}"
77
            )
78

79
        except TokenError as te:
5✔
80
            # In edge cases these are raised; and typically don't have a "faulty_line".
81
            lineno, column = te.args[1]
5✔
82
            errors[grammar.version] = InvalidInput(
5✔
83
                f"Cannot parse: {lineno}:{column}: {te.args[0]}"
84
            )
85

86
    else:
87
        # Choose the latest version when raising the actual parsing error.
88
        assert len(errors) >= 1
5✔
89
        exc = errors[max(errors)]
5✔
90
        raise exc from None
5✔
91

92
    if isinstance(result, Leaf):
5✔
93
        result = Node(syms.file_input, [result])
5✔
94
    return result
5✔
95

96

97
def matches_grammar(src_txt: str, grammar: Grammar) -> bool:
5✔
98
    drv = driver.Driver(grammar)
×
99
    try:
×
100
        drv.parse_string(src_txt, True)
×
101
    except (ParseError, TokenError, IndentationError):
×
102
        return False
×
103
    else:
104
        return True
×
105

106

107
def lib2to3_unparse(node: Node) -> str:
5✔
108
    """Given a lib2to3 node, return its string representation."""
109
    code = str(node)
×
110
    return code
×
111

112

113
def _parse_single_version(
5✔
114
    src: str, version: Tuple[int, int], *, type_comments: bool
115
) -> ast.AST:
116
    filename = "<unknown>"
5✔
117
    with warnings.catch_warnings():
5✔
118
        warnings.simplefilter("ignore", SyntaxWarning)
5✔
119
        warnings.simplefilter("ignore", DeprecationWarning)
5✔
120
        return ast.parse(
5✔
121
            src, filename, feature_version=version, type_comments=type_comments
122
        )
123

124

125
def parse_ast(src: str) -> ast.AST:
5✔
126
    # TODO: support Python 4+ ;)
127
    versions = [(3, minor) for minor in range(3, sys.version_info[1] + 1)]
5✔
128

129
    first_error = ""
5✔
130
    for version in sorted(versions, reverse=True):
5✔
131
        try:
5✔
132
            return _parse_single_version(src, version, type_comments=True)
5✔
133
        except SyntaxError as e:
5✔
134
            if not first_error:
5✔
135
                first_error = str(e)
5✔
136

137
    # Try to parse without type comments
138
    for version in sorted(versions, reverse=True):
5✔
139
        try:
5✔
140
            return _parse_single_version(src, version, type_comments=False)
5✔
141
        except SyntaxError:
5✔
142
            pass
5✔
143

144
    raise SyntaxError(first_error)
5✔
145

146

147
def _normalize(lineend: str, value: str) -> str:
5✔
148
    # To normalize, we strip any leading and trailing space from
149
    # each line...
150
    stripped: List[str] = [i.strip() for i in value.splitlines()]
5✔
151
    normalized = lineend.join(stripped)
5✔
152
    # ...and remove any blank lines at the beginning and end of
153
    # the whole string
154
    return normalized.strip()
5✔
155

156

157
def stringify_ast(node: ast.AST, depth: int = 0) -> Iterator[str]:
5✔
158
    """Simple visitor generating strings to compare ASTs by content."""
159

160
    if (
5✔
161
        isinstance(node, ast.Constant)
162
        and isinstance(node.value, str)
163
        and node.kind == "u"
164
    ):
165
        # It's a quirk of history that we strip the u prefix over here. We used to
166
        # rewrite the AST nodes for Python version compatibility and we never copied
167
        # over the kind
168
        node.kind = None
5✔
169

170
    yield f"{'  ' * depth}{node.__class__.__name__}("
5✔
171

172
    for field in sorted(node._fields):  # noqa: F402
5✔
173
        # TypeIgnore has only one field 'lineno' which breaks this comparison
174
        if isinstance(node, ast.TypeIgnore):
5✔
175
            break
5✔
176

177
        try:
5✔
178
            value: object = getattr(node, field)
5✔
179
        except AttributeError:
×
180
            continue
×
181

182
        yield f"{'  ' * (depth + 1)}{field}="
5✔
183

184
        if isinstance(value, list):
5✔
185
            for item in value:
5✔
186
                # Ignore nested tuples within del statements, because we may insert
187
                # parentheses and they change the AST.
188
                if (
5✔
189
                    field == "targets"
190
                    and isinstance(node, ast.Delete)
191
                    and isinstance(item, ast.Tuple)
192
                ):
193
                    for elt in item.elts:
5✔
194
                        yield from stringify_ast(elt, depth + 2)
5✔
195

196
                elif isinstance(item, ast.AST):
5✔
197
                    yield from stringify_ast(item, depth + 2)
5✔
198

199
        elif isinstance(value, ast.AST):
5✔
200
            yield from stringify_ast(value, depth + 2)
5✔
201

202
        else:
203
            normalized: object
204
            if (
5✔
205
                isinstance(node, ast.Constant)
206
                and field == "value"
207
                and isinstance(value, str)
208
            ):
209
                # Constant strings may be indented across newlines, if they are
210
                # docstrings; fold spaces after newlines when comparing. Similarly,
211
                # trailing and leading space may be removed.
212
                normalized = _normalize("\n", value)
5✔
213
            elif field == "type_comment" and isinstance(value, str):
5✔
214
                # Trailing whitespace in type comments is removed.
215
                normalized = value.rstrip()
5✔
216
            else:
217
                normalized = value
5✔
218
            yield f"{'  ' * (depth + 2)}{normalized!r},  # {value.__class__.__name__}"
5✔
219

220
    yield f"{'  ' * depth})  # /{node.__class__.__name__}"
5✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc