• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 19381742489

15 Nov 2025 12:52AM UTC coverage: 49.706% (-30.6%) from 80.29%
19381742489

Pull #22890

github

web-flow
Merge d961abf79 into 42e1ebd41
Pull Request #22890: Updated all python subsystem constraints to 3.14

4 of 5 new or added lines in 5 files covered. (80.0%)

14659 existing lines in 485 files now uncovered.

31583 of 63540 relevant lines covered (49.71%)

0.79 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

55.93
/src/python/pants/backend/scala/dependency_inference/scala_parser.py
1
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3
from __future__ import annotations
1✔
4

5
import json
1✔
6
import logging
1✔
7
import os
1✔
8
from collections.abc import Iterator, Mapping
1✔
9
from dataclasses import dataclass
1✔
10
from typing import Any
1✔
11

12
from pants.backend.scala.subsystems.scala import ScalaSubsystem
1✔
13
from pants.backend.scala.subsystems.scalac import Scalac
1✔
14
from pants.backend.scala.util_rules.versions import (
1✔
15
    ScalaArtifactsForVersionRequest,
16
    ScalaVersion,
17
    _resolve_scala_artifacts_for_version,
18
    resolve_scala_artifacts_for_version,
19
)
20
from pants.core.goals.resolves import ExportableTool
1✔
21
from pants.core.util_rules.source_files import (
1✔
22
    SourceFiles,
23
    SourceFilesRequest,
24
    determine_source_files,
25
)
26
from pants.engine.fs import (
1✔
27
    AddPrefix,
28
    CreateDigest,
29
    Directory,
30
    FileContent,
31
    MergeDigests,
32
    RemovePrefix,
33
)
34
from pants.engine.internals.graph import resolve_target
1✔
35
from pants.engine.internals.selectors import concurrently
1✔
36
from pants.engine.intrinsics import (
1✔
37
    add_prefix,
38
    create_digest,
39
    execute_process,
40
    get_digest_contents,
41
    merge_digests,
42
    remove_prefix,
43
)
44
from pants.engine.process import (
1✔
45
    FallibleProcessResult,
46
    ProductDescription,
47
    execute_process_or_raise,
48
    fallible_to_exec_result_or_raise,
49
)
50
from pants.engine.rules import collect_rules, implicitly, rule
1✔
51
from pants.engine.target import WrappedTargetRequest
1✔
52
from pants.engine.unions import UnionRule
1✔
53
from pants.jvm.compile import ClasspathEntry
1✔
54
from pants.jvm.jdk_rules import InternalJdk, JvmProcess
1✔
55
from pants.jvm.jdk_rules import rules as jdk_rules
1✔
56
from pants.jvm.resolve.common import ArtifactRequirements
1✔
57
from pants.jvm.resolve.coursier_fetch import ToolClasspathRequest, materialize_classpath_for_tool
1✔
58
from pants.jvm.resolve.jvm_tool import GenerateJvmLockfileFromTool, JvmToolBase
1✔
59
from pants.jvm.subsystems import JvmSubsystem
1✔
60
from pants.jvm.target_types import JvmResolveField
1✔
61
from pants.util.frozendict import FrozenDict
1✔
62
from pants.util.logging import LogLevel
1✔
63
from pants.util.ordered_set import FrozenOrderedSet
1✔
64
from pants.util.resources import read_resource
1✔
65

66
logger = logging.getLogger(__name__)
1✔
67

68

69
_PARSER_SCALA_VERSION = ScalaVersion.parse("2.13.8")
1✔
70
_PARSER_SCALA_BINARY_VERSION = _PARSER_SCALA_VERSION.binary
1✔
71

72

73
class ScalaParser(JvmToolBase):
1✔
74
    options_scope = "scala-parser"
1✔
75
    help = "Internal tool for parsing Scala sources to identify dependencies"
1✔
76

77
    default_artifacts = (
1✔
78
        f"org.scalameta:scalameta_{_PARSER_SCALA_BINARY_VERSION}:4.8.7",
79
        f"io.circe:circe-generic_{_PARSER_SCALA_BINARY_VERSION}:0.14.1",
80
        _resolve_scala_artifacts_for_version(
81
            _PARSER_SCALA_VERSION
82
        ).library_coordinate.to_coord_str(),
83
    )
84
    default_lockfile_resource = (
1✔
85
        "pants.backend.scala.dependency_inference",
86
        "scala_parser.lock",
87
    )
88

89

90
@dataclass(frozen=True)
1✔
91
class ScalaImport:
1✔
92
    name: str
1✔
93
    alias: str | None
1✔
94
    is_wildcard: bool
1✔
95

96
    @classmethod
1✔
97
    def from_json_dict(cls, data: Mapping[str, Any]):
1✔
98
        return cls(name=data["name"], alias=data.get("alias"), is_wildcard=data["isWildcard"])
×
99

100
    def to_debug_json_dict(self) -> dict[str, Any]:
1✔
101
        return {
×
102
            "name": self.name,
103
            "alias": self.alias,
104
            "is_wildcard": self.is_wildcard,
105
        }
106

107

108
@dataclass(frozen=True)
1✔
109
class ScalaProvidedSymbol:
1✔
110
    name: str
1✔
111
    recursive: bool
1✔
112

113
    @classmethod
1✔
114
    def from_json_dict(cls, data: Mapping[str, Any]):
1✔
115
        return cls(name=data["name"], recursive=data["recursive"])
×
116

117
    def to_debug_json_dict(self) -> dict[str, Any]:
1✔
118
        return {
×
119
            "name": self.name,
120
            "recursive": self.recursive,
121
        }
122

123

124
@dataclass(frozen=True)
1✔
125
class ScalaConsumedSymbol:
1✔
126
    name: str
1✔
127
    is_absolute: bool
1✔
128

129
    @classmethod
1✔
130
    def from_json_dict(cls, data: Mapping[str, Any]):
1✔
131
        return cls(name=data["name"], is_absolute=data["isAbsolute"])
×
132

133
    @property
1✔
134
    def is_qualified(self) -> bool:
1✔
135
        # TODO: Similar to #13545: we assume that a symbol containing a dot might already
136
        # be fully qualified.
UNCOV
137
        return "." in self.name
×
138

139
    def split(self) -> tuple[str, str]:
1✔
140
        """Splits the symbol name in its relative prefix and the rest of the symbol name."""
UNCOV
141
        symbol_rel_prefix, _, symbol_rel_suffix = self.name.partition(".")
×
UNCOV
142
        return (symbol_rel_prefix, symbol_rel_suffix)
×
143

144
    def to_debug_json_dict(self) -> dict[str, Any]:
1✔
145
        return {"name": self.name, "isAbsolute": self.is_absolute}
×
146

147

148
@dataclass(frozen=True)
1✔
149
class ScalaSourceDependencyAnalysis:
1✔
150
    provided_symbols: FrozenOrderedSet[ScalaProvidedSymbol]
1✔
151
    provided_symbols_encoded: FrozenOrderedSet[ScalaProvidedSymbol]
1✔
152
    imports_by_scope: FrozenDict[str, tuple[ScalaImport, ...]]
1✔
153
    _consumed_symbols_by_scope: FrozenDict[str, FrozenOrderedSet[ScalaConsumedSymbol]]
1✔
154
    scopes: FrozenOrderedSet[str]
1✔
155

156
    def all_imports(self) -> Iterator[str]:
1✔
157
        # TODO: This might also be an import relative to its scope.
158
        for imports in self.imports_by_scope.values():
×
159
            for imp in imports:
×
160
                yield imp.name
×
161

162
    def fully_qualified_consumed_symbols(self) -> Iterator[str]:
1✔
163
        """Consumed symbols qualified in various ways.
164

165
        This method _will_ introduce false-positives, because we will assume that the symbol could
166
        have been provided by any wildcard import in scope, as well as being declared in the current
167
        package.
168
        """
169

UNCOV
170
        def scope_and_parents(scope: str) -> Iterator[str]:
×
UNCOV
171
            while True:
×
UNCOV
172
                yield scope
×
UNCOV
173
                if scope == "":
×
UNCOV
174
                    break
×
UNCOV
175
                scope, _, _ = scope.rpartition(".")
×
176

UNCOV
177
        for consumption_scope, consumed_symbols in self._consumed_symbols_by_scope.items():
×
UNCOV
178
            parent_scopes = tuple(scope_and_parents(consumption_scope))
×
UNCOV
179
            for symbol in consumed_symbols:
×
UNCOV
180
                if not self.scopes or symbol.is_qualified or symbol.is_absolute:
×
UNCOV
181
                    yield symbol.name
×
182

UNCOV
183
                if symbol.is_absolute:
×
184
                    # We do not need to qualify this symbol any further as we know its
185
                    # name is the actual fully qualified name
UNCOV
186
                    continue
×
187

UNCOV
188
                for parent_scope in parent_scopes:
×
UNCOV
189
                    if parent_scope in self.scopes:
×
190
                        # A package declaration is a parent of this scope, and any of its symbols
191
                        # could be in scope.
UNCOV
192
                        yield f"{parent_scope}.{symbol.name}"
×
193

UNCOV
194
                    for imp in self.imports_by_scope.get(parent_scope, ()):
×
UNCOV
195
                        if imp.is_wildcard:
×
196
                            # There is a wildcard import in a parent scope.
UNCOV
197
                            yield f"{imp.name}.{symbol.name}"
×
UNCOV
198
                        if symbol.is_qualified:
×
199
                            # If the parent scope has an import which defines the first token of the
200
                            # symbol, then it might be a relative usage of an import.
UNCOV
201
                            symbol_rel_prefix, symbol_rel_suffix = symbol.split()
×
UNCOV
202
                            if imp.alias:
×
UNCOV
203
                                if imp.alias == symbol_rel_prefix:
×
UNCOV
204
                                    yield f"{imp.name}.{symbol_rel_suffix}"
×
UNCOV
205
                            elif imp.name.endswith(f".{symbol_rel_prefix}"):
×
UNCOV
206
                                yield f"{imp.name}.{symbol_rel_suffix}"
×
207

208
    @property
1✔
209
    def consumed_symbols_by_scope(self) -> FrozenDict[str, FrozenOrderedSet[str]]:
1✔
UNCOV
210
        return FrozenDict(
×
211
            {
212
                key: FrozenOrderedSet(v.name for v in values)
213
                for key, values in self._consumed_symbols_by_scope.items()
214
            }
215
        )
216

217
    @classmethod
1✔
218
    def from_json_dict(cls, d: dict) -> ScalaSourceDependencyAnalysis:
1✔
219
        return cls(
×
220
            provided_symbols=FrozenOrderedSet(
221
                ScalaProvidedSymbol.from_json_dict(v) for v in d["providedSymbols"]
222
            ),
223
            provided_symbols_encoded=FrozenOrderedSet(
224
                ScalaProvidedSymbol.from_json_dict(v) for v in d["providedSymbolsEncoded"]
225
            ),
226
            imports_by_scope=FrozenDict(
227
                {
228
                    key: tuple(ScalaImport.from_json_dict(v) for v in values)
229
                    for key, values in d["importsByScope"].items()
230
                }
231
            ),
232
            _consumed_symbols_by_scope=FrozenDict(
233
                {
234
                    key: FrozenOrderedSet(ScalaConsumedSymbol.from_json_dict(v) for v in values)
235
                    for key, values in d["consumedSymbolsByScope"].items()
236
                }
237
            ),
238
            scopes=FrozenOrderedSet(d["scopes"]),
239
        )
240

241
    def to_debug_json_dict(self) -> dict[str, Any]:
1✔
242
        return {
×
243
            "provided_symbols": [v.to_debug_json_dict() for v in self.provided_symbols],
244
            "provided_symbols_encoded": [
245
                v.to_debug_json_dict() for v in self.provided_symbols_encoded
246
            ],
247
            "imports_by_scope": {
248
                key: [v.to_debug_json_dict() for v in values]
249
                for key, values in self.imports_by_scope.items()
250
            },
251
            "consumed_symbols_by_scope": {
252
                key: [v.to_debug_json_dict() for v in values]
253
                for key, values in self._consumed_symbols_by_scope.items()
254
            },
255
            "scopes": list(self.scopes),
256
        }
257

258

259
@dataclass(frozen=True)
1✔
260
class FallibleScalaSourceDependencyAnalysisResult:
1✔
261
    process_result: FallibleProcessResult
1✔
262

263

264
class ScalaParserCompiledClassfiles(ClasspathEntry):
1✔
265
    pass
1✔
266

267

268
@dataclass(frozen=True)
1✔
269
class AnalyzeScalaSourceRequest:
1✔
270
    source_files: SourceFiles
1✔
271
    scala_version: ScalaVersion
1✔
272
    source3: bool
1✔
273

274

275
@rule(level=LogLevel.DEBUG)
1✔
276
async def create_analyze_scala_source_request(
1✔
277
    scala_subsystem: ScalaSubsystem, jvm: JvmSubsystem, scalac: Scalac, request: SourceFilesRequest
278
) -> AnalyzeScalaSourceRequest:
279
    address = request.sources_fields[0].address
×
280

281
    wrapped_tgt, source_files = await concurrently(
×
282
        resolve_target(
283
            WrappedTargetRequest(
284
                address, description_of_origin="<the Scala analyze request setup rule>"
285
            ),
286
            **implicitly(),
287
        ),
288
        determine_source_files(request),
289
    )
290

291
    tgt = wrapped_tgt.target
×
292
    resolve = tgt[JvmResolveField].normalized_value(jvm)
×
293
    scala_version = scala_subsystem.version_for_resolve(resolve)
×
294
    source3 = "-Xsource:3" in scalac.parsed_args_for_resolve(resolve)
×
295

296
    return AnalyzeScalaSourceRequest(source_files, scala_version, source3)
×
297

298

299
@rule(level=LogLevel.DEBUG)
1✔
300
async def analyze_scala_source_dependencies(
1✔
301
    jdk: InternalJdk,
302
    processor_classfiles: ScalaParserCompiledClassfiles,
303
    tool: ScalaParser,
304
    request: AnalyzeScalaSourceRequest,
305
) -> FallibleScalaSourceDependencyAnalysisResult:
306
    source_files = request.source_files
×
307

308
    if len(source_files.files) > 1:
×
309
        raise ValueError(
×
310
            f"analyze_scala_source_dependencies expects sources with exactly 1 source file, but found {len(source_files.snapshot.files)}."
311
        )
312
    elif len(source_files.files) == 0:
×
313
        raise ValueError(
×
314
            "analyze_scala_source_dependencies expects sources with exactly 1 source file, but found none."
315
        )
316
    source_prefix = "__source_to_analyze"
×
317
    source_path = os.path.join(source_prefix, source_files.files[0])
×
318
    processorcp_relpath = "__processorcp"
×
319
    toolcp_relpath = "__toolcp"
×
320

321
    tool_classpath, prefixed_source_files_digest = await concurrently(
×
322
        materialize_classpath_for_tool(
323
            ToolClasspathRequest(lockfile=GenerateJvmLockfileFromTool.create(tool))
324
        ),
325
        add_prefix(AddPrefix(source_files.snapshot.digest, source_prefix)),
326
    )
327

328
    extra_immutable_input_digests = {
×
329
        toolcp_relpath: tool_classpath.digest,
330
        processorcp_relpath: processor_classfiles.digest,
331
    }
332

333
    analysis_output_path = "__source_analysis.json"
×
334

335
    process_result = await execute_process(
×
336
        **implicitly(
337
            JvmProcess(
338
                jdk=jdk,
339
                classpath_entries=[
340
                    *tool_classpath.classpath_entries(toolcp_relpath),
341
                    processorcp_relpath,
342
                ],
343
                argv=[
344
                    "org.pantsbuild.backend.scala.dependency_inference.ScalaParser",
345
                    analysis_output_path,
346
                    source_path,
347
                    str(request.scala_version),
348
                    str(request.source3),
349
                ],
350
                input_digest=prefixed_source_files_digest,
351
                extra_immutable_input_digests=extra_immutable_input_digests,
352
                output_files=(analysis_output_path,),
353
                extra_nailgun_keys=extra_immutable_input_digests,
354
                description=f"Analyzing {source_files.files[0]}",
355
                level=LogLevel.DEBUG,
356
            )
357
        )
358
    )
359

360
    return FallibleScalaSourceDependencyAnalysisResult(process_result=process_result)
×
361

362

363
@rule(level=LogLevel.DEBUG)
1✔
364
async def resolve_fallible_result_to_analysis(
1✔
365
    fallible_result: FallibleScalaSourceDependencyAnalysisResult,
366
) -> ScalaSourceDependencyAnalysis:
367
    description = ProductDescription("Scala source dependency analysis failed.")
×
368
    result = await fallible_to_exec_result_or_raise(
×
369
        **implicitly(
370
            {
371
                fallible_result.process_result: FallibleProcessResult,
372
                description: ProductDescription,
373
            }
374
        )
375
    )
376
    analysis_contents = await get_digest_contents(result.output_digest)
×
377
    analysis = json.loads(analysis_contents[0].content)
×
378
    return ScalaSourceDependencyAnalysis.from_json_dict(analysis)
×
379

380

381
# TODO(13879): Consolidate compilation of wrapper binaries to common rules.
382
@rule
1✔
383
async def setup_scala_parser_classfiles(
1✔
384
    jdk: InternalJdk, tool: ScalaParser
385
) -> ScalaParserCompiledClassfiles:
386
    dest_dir = "classfiles"
×
387

388
    parser_source_content = read_resource(
×
389
        "pants.backend.scala.dependency_inference", "ScalaParser.scala"
390
    )
391
    if not parser_source_content:
×
392
        raise AssertionError("Unable to find ScalaParser.scala resource.")
×
393

394
    parser_source = FileContent("ScalaParser.scala", parser_source_content)
×
395

396
    scala_artifacts = await resolve_scala_artifacts_for_version(
×
397
        ScalaArtifactsForVersionRequest(_PARSER_SCALA_VERSION)
398
    )
399

400
    tool_classpath, parser_classpath, source_digest = await concurrently(
×
401
        materialize_classpath_for_tool(
402
            ToolClasspathRequest(
403
                prefix="__toolcp",
404
                artifact_requirements=ArtifactRequirements.from_coordinates(
405
                    scala_artifacts.all_coordinates
406
                ),
407
            )
408
        ),
409
        materialize_classpath_for_tool(
410
            ToolClasspathRequest(
411
                prefix="__parsercp", lockfile=(GenerateJvmLockfileFromTool.create(tool))
412
            )
413
        ),
414
        create_digest(CreateDigest([parser_source, Directory(dest_dir)])),
415
    )
416

417
    merged_digest = await merge_digests(
×
418
        MergeDigests(
419
            (
420
                tool_classpath.digest,
421
                parser_classpath.digest,
422
                source_digest,
423
            )
424
        )
425
    )
426

427
    process_result = await execute_process_or_raise(
×
428
        **implicitly(
429
            JvmProcess(
430
                jdk=jdk,
431
                classpath_entries=tool_classpath.classpath_entries(),
432
                argv=[
433
                    "scala.tools.nsc.Main",
434
                    "-bootclasspath",
435
                    ":".join(tool_classpath.classpath_entries()),
436
                    "-classpath",
437
                    ":".join(parser_classpath.classpath_entries()),
438
                    "-d",
439
                    dest_dir,
440
                    parser_source.path,
441
                ],
442
                input_digest=merged_digest,
443
                output_directories=(dest_dir,),
444
                description="Compile Scala parser for dependency inference with scalac",
445
                level=LogLevel.DEBUG,
446
                # NB: We do not use nailgun for this process, since it is launched exactly once.
447
                use_nailgun=False,
448
            )
449
        )
450
    )
451
    stripped_classfiles_digest = await remove_prefix(
×
452
        RemovePrefix(process_result.output_digest, dest_dir)
453
    )
454
    return ScalaParserCompiledClassfiles(digest=stripped_classfiles_digest)
×
455

456

457
def rules():
1✔
458
    return (
1✔
459
        *collect_rules(),
460
        *jdk_rules(),
461
        UnionRule(ExportableTool, ScalaParser),
462
    )
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc