• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 19015773527

02 Nov 2025 05:33PM UTC coverage: 17.872% (-62.4%) from 80.3%
19015773527

Pull #22816

github

web-flow
Merge a12d75757 into 6c024e162
Pull Request #22816: Update Pants internal Python to 3.14

4 of 5 new or added lines in 3 files covered. (80.0%)

28452 existing lines in 683 files now uncovered.

9831 of 55007 relevant lines covered (17.87%)

0.18 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/src/python/pants/backend/shell/dependency_inference.py
1
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3

UNCOV
4
from __future__ import annotations
×
5

UNCOV
6
import json
×
UNCOV
7
import logging
×
UNCOV
8
import os
×
UNCOV
9
import re
×
UNCOV
10
from collections import defaultdict
×
UNCOV
11
from collections.abc import Iterable
×
UNCOV
12
from dataclasses import dataclass
×
UNCOV
13
from typing import DefaultDict
×
14

UNCOV
15
from pants.backend.shell.lint.shellcheck.subsystem import Shellcheck
×
UNCOV
16
from pants.backend.shell.subsystems.shell_setup import ShellSetup
×
UNCOV
17
from pants.backend.shell.target_types import ShellDependenciesField, ShellSourceField
×
UNCOV
18
from pants.core.util_rules.external_tool import download_external_tool
×
UNCOV
19
from pants.engine.addresses import Address
×
UNCOV
20
from pants.engine.collection import DeduplicatedCollection
×
UNCOV
21
from pants.engine.fs import Digest
×
UNCOV
22
from pants.engine.internals.graph import determine_explicitly_provided_dependencies, hydrate_sources
×
UNCOV
23
from pants.engine.intrinsics import execute_process
×
UNCOV
24
from pants.engine.platform import Platform
×
UNCOV
25
from pants.engine.process import Process, ProcessCacheScope
×
UNCOV
26
from pants.engine.rules import Rule, collect_rules, concurrently, implicitly, rule
×
UNCOV
27
from pants.engine.target import (
×
28
    AllTargets,
29
    DependenciesRequest,
30
    FieldSet,
31
    HydrateSourcesRequest,
32
    InferDependenciesRequest,
33
    InferredDependencies,
34
    Targets,
35
)
UNCOV
36
from pants.engine.unions import UnionRule
×
UNCOV
37
from pants.util.frozendict import FrozenDict
×
UNCOV
38
from pants.util.logging import LogLevel
×
UNCOV
39
from pants.util.ordered_set import OrderedSet
×
40

UNCOV
41
logger = logging.getLogger(__name__)
×
42

43

UNCOV
44
class AllShellTargets(Targets):
×
UNCOV
45
    pass
×
46

47

UNCOV
48
@rule(desc="Find all Shell targets in project", level=LogLevel.DEBUG)
×
UNCOV
49
async def find_all_shell_targets(all_tgts: AllTargets) -> AllShellTargets:
×
50
    return AllShellTargets(tgt for tgt in all_tgts if tgt.has_field(ShellSourceField))
×
51

52

UNCOV
53
@dataclass(frozen=True)
×
UNCOV
54
class ShellMapping:
×
55
    """A mapping of Shell file names to their owning file address."""
56

UNCOV
57
    mapping: FrozenDict[str, Address]
×
UNCOV
58
    ambiguous_modules: FrozenDict[str, tuple[Address, ...]]
×
59

60

UNCOV
61
@rule(desc="Creating map of Shell file names to Shell targets", level=LogLevel.DEBUG)
×
UNCOV
62
async def map_shell_files(tgts: AllShellTargets) -> ShellMapping:
×
63
    files_to_addresses: dict[str, Address] = {}
×
64
    files_with_multiple_owners: DefaultDict[str, set[Address]] = defaultdict(set)
×
65
    for tgt in tgts:
×
66
        fp = tgt[ShellSourceField].file_path
×
67
        if fp in files_to_addresses:
×
68
            files_with_multiple_owners[fp].update({files_to_addresses[fp], tgt.address})
×
69
        else:
70
            files_to_addresses[fp] = tgt.address
×
71

72
    # Remove files with ambiguous owners.
73
    for ambiguous_f in files_with_multiple_owners:
×
74
        files_to_addresses.pop(ambiguous_f)
×
75

76
    return ShellMapping(
×
77
        mapping=FrozenDict(sorted(files_to_addresses.items())),
78
        ambiguous_modules=FrozenDict(
79
            (k, tuple(sorted(v))) for k, v in sorted(files_with_multiple_owners.items())
80
        ),
81
    )
82

83

UNCOV
84
class ParsedShellImports(DeduplicatedCollection):
×
UNCOV
85
    sort_input = True
×
86

87

UNCOV
88
@dataclass(frozen=True)
×
UNCOV
89
class ParseShellImportsRequest:
×
UNCOV
90
    digest: Digest
×
UNCOV
91
    fp: str
×
92

93

UNCOV
94
PATH_FROM_SHELLCHECK_ERROR = re.compile(r"Not following: (.+) was not specified as input")
×
95

96

UNCOV
97
@rule
×
UNCOV
98
async def parse_shell_imports(
×
99
    request: ParseShellImportsRequest, shellcheck: Shellcheck, platform: Platform
100
) -> ParsedShellImports:
101
    # We use Shellcheck to parse for us by running it against each file in isolation, which means
102
    # that all `source` statements will error. Then, we can extract the problematic paths from the
103
    # JSON output.
104
    downloaded_shellcheck = await download_external_tool(shellcheck.get_request(platform))
×
105

106
    immutable_input_key = "__shellcheck_tool"
×
107
    exe_path = os.path.join(immutable_input_key, downloaded_shellcheck.exe)
×
108

109
    process_result = await execute_process(
×
110
        Process(
111
            # NB: We do not load up `[shellcheck].{args,config}` because it would risk breaking
112
            # determinism of dependency inference in an unexpected way.
113
            [exe_path, "--format=json", request.fp],
114
            input_digest=request.digest,
115
            immutable_input_digests={immutable_input_key: downloaded_shellcheck.digest},
116
            description=f"Detect Shell imports for {request.fp}",
117
            level=LogLevel.DEBUG,
118
            # We expect this to always fail, but it should still be cached because the process is
119
            # deterministic.
120
            cache_scope=ProcessCacheScope.ALWAYS,
121
        ),
122
        **implicitly(),
123
    )
124

125
    try:
×
126
        output = json.loads(process_result.stdout)
×
127
    except json.JSONDecodeError:
×
128
        logger.error(
×
129
            f"Parsing {request.fp} for dependency inference failed because Shellcheck's output "
130
            f"could not be loaded as JSON. Please open a GitHub issue at "
131
            f"https://github.com/pantsbuild/pants/issues/new with this error message attached.\n\n"
132
            f"\nshellcheck version: {shellcheck.version}\n"
133
            f"process_result.stdout: {process_result.stdout.decode()}"
134
        )
135
        return ParsedShellImports()
×
136

137
    paths = set()
×
138
    for error in output:
×
139
        if not error.get("code", "") == 1091:
×
140
            continue
×
141
        msg = error.get("message", "")
×
142
        matches = PATH_FROM_SHELLCHECK_ERROR.match(msg)
×
143
        if matches:
×
144
            paths.add(matches.group(1))
×
145
        else:
146
            logger.error(
×
147
                f"Parsing {request.fp} for dependency inference failed because Shellcheck's error "
148
                f"message was not in the expected format. Please open a GitHub issue at "
149
                f"https://github.com/pantsbuild/pants/issues/new with this error message "
150
                f"attached.\n\n\nshellcheck version: {shellcheck.version}\n"
151
                f"error JSON entry: {error}"
152
            )
153
    return ParsedShellImports(paths)
×
154

155

UNCOV
156
@dataclass(frozen=True)
×
UNCOV
157
class ShellDependenciesInferenceFieldSet(FieldSet):
×
UNCOV
158
    required_fields = (ShellSourceField, ShellDependenciesField)
×
159

UNCOV
160
    source: ShellSourceField
×
UNCOV
161
    dependencies: ShellDependenciesField
×
162

163

UNCOV
164
class InferShellDependencies(InferDependenciesRequest):
×
UNCOV
165
    infer_from = ShellDependenciesInferenceFieldSet
×
166

167

UNCOV
168
@rule(desc="Inferring Shell dependencies by analyzing imports")
×
UNCOV
169
async def infer_shell_dependencies(
×
170
    request: InferShellDependencies, shell_mapping: ShellMapping, shell_setup: ShellSetup
171
) -> InferredDependencies:
172
    if not shell_setup.dependency_inference:
×
173
        return InferredDependencies([])
×
174

175
    address = request.field_set.address
×
176
    explicitly_provided_deps, hydrated_sources = await concurrently(
×
177
        determine_explicitly_provided_dependencies(
178
            **implicitly(DependenciesRequest(request.field_set.dependencies))
179
        ),
180
        hydrate_sources(HydrateSourcesRequest(request.field_set.source), **implicitly()),
181
    )
182
    assert len(hydrated_sources.snapshot.files) == 1
×
183

184
    detected_imports = await parse_shell_imports(
×
185
        ParseShellImportsRequest(
186
            hydrated_sources.snapshot.digest, hydrated_sources.snapshot.files[0]
187
        ),
188
        **implicitly(),
189
    )
190
    result: OrderedSet[Address] = OrderedSet()
×
191
    for import_path in detected_imports:
×
192
        unambiguous = shell_mapping.mapping.get(import_path)
×
193
        ambiguous = shell_mapping.ambiguous_modules.get(import_path)
×
194
        if unambiguous:
×
195
            result.add(unambiguous)
×
196
        elif ambiguous:
×
197
            explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference(
×
198
                ambiguous,
199
                address,
200
                import_reference="file",
201
                context=f"The target {address} sources `{import_path}`",
202
            )
203
            maybe_disambiguated = explicitly_provided_deps.disambiguated(ambiguous)
×
204
            if maybe_disambiguated:
×
205
                result.add(maybe_disambiguated)
×
206
    return InferredDependencies(sorted(result))
×
207

208

UNCOV
209
def rules() -> Iterable[Rule | UnionRule]:
×
UNCOV
210
    return (*collect_rules(), UnionRule(InferDependenciesRequest, InferShellDependencies))
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc