• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 22285099215

22 Feb 2026 08:52PM UTC coverage: 75.854% (-17.1%) from 92.936%
22285099215

Pull #23121

github

web-flow
Merge c7299df9c into ba8359840
Pull Request #23121: fix issue with optional fields in dependency validator

28 of 29 new or added lines in 2 files covered. (96.55%)

11174 existing lines in 400 files now uncovered.

53694 of 70786 relevant lines covered (75.85%)

1.88 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

54.29
/src/python/pants/backend/shell/dependency_inference.py
1
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3

4
from __future__ import annotations
1✔
5

6
import json
1✔
7
import logging
1✔
8
import os
1✔
9
import re
1✔
10
from collections import defaultdict
1✔
11
from collections.abc import Iterable
1✔
12
from dataclasses import dataclass
1✔
13
from typing import DefaultDict
1✔
14

15
from pants.backend.shell.lint.shellcheck.subsystem import Shellcheck
1✔
16
from pants.backend.shell.subsystems.shell_setup import ShellSetup
1✔
17
from pants.backend.shell.target_types import ShellDependenciesField, ShellSourceField
1✔
18
from pants.core.util_rules.external_tool import download_external_tool
1✔
19
from pants.engine.addresses import Address
1✔
20
from pants.engine.collection import DeduplicatedCollection
1✔
21
from pants.engine.fs import Digest
1✔
22
from pants.engine.internals.graph import determine_explicitly_provided_dependencies, hydrate_sources
1✔
23
from pants.engine.intrinsics import execute_process
1✔
24
from pants.engine.platform import Platform
1✔
25
from pants.engine.process import Process, ProcessCacheScope
1✔
26
from pants.engine.rules import Rule, collect_rules, concurrently, implicitly, rule
1✔
27
from pants.engine.target import (
1✔
28
    AllTargets,
29
    DependenciesRequest,
30
    FieldSet,
31
    HydrateSourcesRequest,
32
    InferDependenciesRequest,
33
    InferredDependencies,
34
    Targets,
35
)
36
from pants.engine.unions import UnionRule
1✔
37
from pants.util.frozendict import FrozenDict
1✔
38
from pants.util.logging import LogLevel
1✔
39
from pants.util.ordered_set import OrderedSet
1✔
40

41
logger = logging.getLogger(__name__)
1✔
42

43

44
class AllShellTargets(Targets):
1✔
45
    pass
1✔
46

47

48
@rule(desc="Find all Shell targets in project", level=LogLevel.DEBUG)
1✔
49
async def find_all_shell_targets(all_tgts: AllTargets) -> AllShellTargets:
1✔
UNCOV
50
    return AllShellTargets(tgt for tgt in all_tgts if tgt.has_field(ShellSourceField))
×
51

52

53
@dataclass(frozen=True)
1✔
54
class ShellMapping:
1✔
55
    """A mapping of Shell file names to their owning file address."""
56

57
    mapping: FrozenDict[str, Address]
1✔
58
    ambiguous_modules: FrozenDict[str, tuple[Address, ...]]
1✔
59

60

61
@rule(desc="Creating map of Shell file names to Shell targets", level=LogLevel.DEBUG)
1✔
62
async def map_shell_files(tgts: AllShellTargets) -> ShellMapping:
1✔
UNCOV
63
    files_to_addresses: dict[str, Address] = {}
×
UNCOV
64
    files_with_multiple_owners: DefaultDict[str, set[Address]] = defaultdict(set)
×
UNCOV
65
    for tgt in tgts:
×
UNCOV
66
        fp = tgt[ShellSourceField].file_path
×
UNCOV
67
        if fp in files_to_addresses:
×
UNCOV
68
            files_with_multiple_owners[fp].update({files_to_addresses[fp], tgt.address})
×
69
        else:
UNCOV
70
            files_to_addresses[fp] = tgt.address
×
71

72
    # Remove files with ambiguous owners.
UNCOV
73
    for ambiguous_f in files_with_multiple_owners:
×
UNCOV
74
        files_to_addresses.pop(ambiguous_f)
×
75

UNCOV
76
    return ShellMapping(
×
77
        mapping=FrozenDict(sorted(files_to_addresses.items())),
78
        ambiguous_modules=FrozenDict(
79
            (k, tuple(sorted(v))) for k, v in sorted(files_with_multiple_owners.items())
80
        ),
81
    )
82

83

84
class ParsedShellImports(DeduplicatedCollection):
1✔
85
    sort_input = True
1✔
86

87

88
@dataclass(frozen=True)
1✔
89
class ParseShellImportsRequest:
1✔
90
    digest: Digest
1✔
91
    fp: str
1✔
92

93

94
PATH_FROM_SHELLCHECK_ERROR = re.compile(r"Not following: (.+) was not specified as input")
1✔
95

96

97
@rule
1✔
98
async def parse_shell_imports(
1✔
99
    request: ParseShellImportsRequest, shellcheck: Shellcheck, platform: Platform
100
) -> ParsedShellImports:
101
    # We use Shellcheck to parse for us by running it against each file in isolation, which means
102
    # that all `source` statements will error. Then, we can extract the problematic paths from the
103
    # JSON output.
UNCOV
104
    downloaded_shellcheck = await download_external_tool(shellcheck.get_request(platform))
×
105

UNCOV
106
    immutable_input_key = "__shellcheck_tool"
×
UNCOV
107
    exe_path = os.path.join(immutable_input_key, downloaded_shellcheck.exe)
×
108

UNCOV
109
    process_result = await execute_process(
×
110
        Process(
111
            # NB: We do not load up `[shellcheck].{args,config}` because it would risk breaking
112
            # determinism of dependency inference in an unexpected way.
113
            [exe_path, "--format=json", request.fp],
114
            input_digest=request.digest,
115
            immutable_input_digests={immutable_input_key: downloaded_shellcheck.digest},
116
            description=f"Detect Shell imports for {request.fp}",
117
            level=LogLevel.DEBUG,
118
            # We expect this to always fail, but it should still be cached because the process is
119
            # deterministic.
120
            cache_scope=ProcessCacheScope.ALWAYS,
121
        ),
122
        **implicitly(),
123
    )
124

UNCOV
125
    try:
×
UNCOV
126
        output = json.loads(process_result.stdout)
×
127
    except json.JSONDecodeError:
×
128
        logger.error(
×
129
            f"Parsing {request.fp} for dependency inference failed because Shellcheck's output "
130
            f"could not be loaded as JSON. Please open a GitHub issue at "
131
            f"https://github.com/pantsbuild/pants/issues/new with this error message attached.\n\n"
132
            f"\nshellcheck version: {shellcheck.version}\n"
133
            f"process_result.stdout: {process_result.stdout.decode()}"
134
        )
135
        return ParsedShellImports()
×
136

UNCOV
137
    paths = set()
×
UNCOV
138
    for error in output:
×
UNCOV
139
        if not error.get("code", "") == 1091:
×
UNCOV
140
            continue
×
UNCOV
141
        msg = error.get("message", "")
×
UNCOV
142
        matches = PATH_FROM_SHELLCHECK_ERROR.match(msg)
×
UNCOV
143
        if matches:
×
UNCOV
144
            paths.add(matches.group(1))
×
145
        else:
146
            logger.error(
×
147
                f"Parsing {request.fp} for dependency inference failed because Shellcheck's error "
148
                f"message was not in the expected format. Please open a GitHub issue at "
149
                f"https://github.com/pantsbuild/pants/issues/new with this error message "
150
                f"attached.\n\n\nshellcheck version: {shellcheck.version}\n"
151
                f"error JSON entry: {error}"
152
            )
UNCOV
153
    return ParsedShellImports(paths)
×
154

155

156
@dataclass(frozen=True)
1✔
157
class ShellDependenciesInferenceFieldSet(FieldSet):
1✔
158
    required_fields = (ShellSourceField, ShellDependenciesField)
1✔
159

160
    source: ShellSourceField
1✔
161
    dependencies: ShellDependenciesField
1✔
162

163

164
class InferShellDependencies(InferDependenciesRequest):
1✔
165
    infer_from = ShellDependenciesInferenceFieldSet
1✔
166

167

168
@rule(desc="Inferring Shell dependencies by analyzing imports")
1✔
169
async def infer_shell_dependencies(
1✔
170
    request: InferShellDependencies, shell_mapping: ShellMapping, shell_setup: ShellSetup
171
) -> InferredDependencies:
UNCOV
172
    if not shell_setup.dependency_inference:
×
173
        return InferredDependencies([])
×
174

UNCOV
175
    address = request.field_set.address
×
UNCOV
176
    explicitly_provided_deps, hydrated_sources = await concurrently(
×
177
        determine_explicitly_provided_dependencies(
178
            **implicitly(DependenciesRequest(request.field_set.dependencies))
179
        ),
180
        hydrate_sources(HydrateSourcesRequest(request.field_set.source), **implicitly()),
181
    )
UNCOV
182
    assert len(hydrated_sources.snapshot.files) == 1
×
183

UNCOV
184
    detected_imports = await parse_shell_imports(
×
185
        ParseShellImportsRequest(
186
            hydrated_sources.snapshot.digest, hydrated_sources.snapshot.files[0]
187
        ),
188
        **implicitly(),
189
    )
UNCOV
190
    result: OrderedSet[Address] = OrderedSet()
×
UNCOV
191
    for import_path in detected_imports:
×
UNCOV
192
        unambiguous = shell_mapping.mapping.get(import_path)
×
UNCOV
193
        ambiguous = shell_mapping.ambiguous_modules.get(import_path)
×
UNCOV
194
        if unambiguous:
×
UNCOV
195
            result.add(unambiguous)
×
UNCOV
196
        elif ambiguous:
×
UNCOV
197
            explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference(
×
198
                ambiguous,
199
                address,
200
                import_reference="file",
201
                context=f"The target {address} sources `{import_path}`",
202
            )
UNCOV
203
            maybe_disambiguated = explicitly_provided_deps.disambiguated(ambiguous)
×
UNCOV
204
            if maybe_disambiguated:
×
UNCOV
205
                result.add(maybe_disambiguated)
×
UNCOV
206
    return InferredDependencies(sorted(result))
×
207

208

209
def rules() -> Iterable[Rule | UnionRule]:
1✔
210
    return (*collect_rules(), UnionRule(InferDependenciesRequest, InferShellDependencies))
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc