• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 19686705658

25 Nov 2025 11:02PM UTC coverage: 80.288%. Remained the same
19686705658

Pull #22906

github

web-flow
Merge 5cfee237d into 7c08ed5e3
Pull Request #22906: Update Coursier default version to v2.1.24

3 of 3 new or added lines in 2 files covered. (100.0%)

80 existing lines in 1 file now uncovered.

78386 of 97631 relevant lines covered (80.29%)

3.36 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

44.71
/src/python/pants/jvm/resolve/coursier_fetch.py
1
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3

4
from __future__ import annotations
12✔
5

6
import dataclasses
12✔
7
import importlib.resources
12✔
8
import itertools
12✔
9
import json
12✔
10
import logging
12✔
11
import os
12✔
12
from collections import defaultdict
12✔
13
from collections.abc import Iterable, Iterator
12✔
14
from dataclasses import dataclass
12✔
15
from itertools import chain
12✔
16
from typing import TYPE_CHECKING, Any
12✔
17

18
import toml
12✔
19

20
from pants.base.glob_match_error_behavior import GlobMatchErrorBehavior
12✔
21
from pants.core.goals.generate_lockfiles import DEFAULT_TOOL_LOCKFILE, GenerateLockfilesSubsystem
12✔
22
from pants.core.util_rules.source_files import SourceFilesRequest, determine_source_files
12✔
23
from pants.engine.addresses import UnparsedAddressInputs
12✔
24
from pants.engine.collection import Collection
12✔
25
from pants.engine.fs import (
12✔
26
    AddPrefix,
27
    CreateDigest,
28
    Digest,
29
    DigestSubset,
30
    FileContent,
31
    FileDigest,
32
    MergeDigests,
33
    PathGlobs,
34
    RemovePrefix,
35
    Snapshot,
36
)
37
from pants.engine.internals.graph import resolve_targets
12✔
38
from pants.engine.internals.native_engine import EMPTY_DIGEST
12✔
39
from pants.engine.intrinsics import (
12✔
40
    create_digest,
41
    digest_subset_to_digest,
42
    digest_to_snapshot,
43
    get_digest_contents,
44
    merge_digests,
45
    path_globs_to_digest,
46
    remove_prefix,
47
)
48
from pants.engine.process import fallible_to_exec_result_or_raise
12✔
49
from pants.engine.rules import collect_rules, concurrently, implicitly, rule
12✔
50
from pants.engine.target import CoarsenedTargets, Target
12✔
51
from pants.engine.unions import UnionRule
12✔
52
from pants.jvm.compile import (
12✔
53
    ClasspathEntry,
54
    ClasspathEntryRequest,
55
    CompileResult,
56
    FallibleClasspathEntry,
57
)
58
from pants.jvm.resolve import coursier_setup
12✔
59
from pants.jvm.resolve.common import (
12✔
60
    ArtifactRequirement,
61
    ArtifactRequirements,
62
    GatherJvmCoordinatesRequest,
63
)
64
from pants.jvm.resolve.coordinate import Coordinate, Coordinates
12✔
65
from pants.jvm.resolve.coursier_setup import Coursier, CoursierFetchProcess
12✔
66
from pants.jvm.resolve.jvm_tool import gather_coordinates_for_jvm_lockfile
12✔
67
from pants.jvm.resolve.key import CoursierResolveKey
12✔
68
from pants.jvm.resolve.lockfile_metadata import JVMLockfileMetadata, LockfileContext
12✔
69
from pants.jvm.subsystems import JvmSubsystem
12✔
70
from pants.jvm.target_types import (
12✔
71
    JvmArtifactFieldSet,
72
    JvmArtifactJarSourceField,
73
    JvmArtifactTarget,
74
    JvmResolveField,
75
)
76
from pants.jvm.util_rules import ExtractFileDigest, digest_to_file_digest
12✔
77
from pants.util.docutil import bin_name, doc_url
12✔
78
from pants.util.logging import LogLevel
12✔
79
from pants.util.ordered_set import FrozenOrderedSet, OrderedSet
12✔
80
from pants.util.strutil import bullet_list, pluralize
12✔
81

82
if TYPE_CHECKING:
83
    from pants.jvm.resolve.jvm_tool import GenerateJvmLockfileFromTool
84

85
logger = logging.getLogger(__name__)
12✔
86

87

88
class CoursierFetchRequest(ClasspathEntryRequest):
12✔
89
    field_sets = (JvmArtifactFieldSet,)
12✔
90

91

92
class CoursierError(Exception):
12✔
93
    """An exception relating to invoking Coursier or processing its output."""
94

95

96
class NoCompatibleResolve(Exception):
12✔
97
    """No compatible resolve could be found for a set of targets."""
98

99
    def __init__(self, jvm: JvmSubsystem, msg_prefix: str, relevant_targets: Iterable[Target]):
12✔
100
        resolves_to_addresses = defaultdict(list)
×
101
        for tgt in relevant_targets:
×
102
            if tgt.has_field(JvmResolveField):
×
103
                resolve = tgt[JvmResolveField].normalized_value(jvm)
×
104
                resolves_to_addresses[resolve].append(tgt.address.spec)
×
105

106
        formatted_resolve_lists = "\n\n".join(
×
107
            f"{resolve}:\n{bullet_list(sorted(addresses))}"
108
            for resolve, addresses in sorted(resolves_to_addresses.items())
109
        )
110
        super().__init__(
×
111
            f"{msg_prefix}:\n\n"
112
            f"{formatted_resolve_lists}\n\n"
113
            "Targets which will be merged onto the same classpath must share a resolve (from the "
114
            f"[resolve]({doc_url('reference/targets/deploy_jar#resolve')}) field)."
115
        )
116

117

118
@dataclass(frozen=True)
12✔
119
class CoursierLockfileEntry:
12✔
120
    """A single artifact entry from a Coursier-resolved lockfile.
121

122
    These fields are nearly identical to the JSON objects from the
123
    "dependencies" entries in Coursier's --json-output-file format.
124
    But unlike Coursier's JSON report, a CoursierLockfileEntry
125
    includes the content-address of the artifact fetched by Coursier
126
    and ingested by Pants.
127

128
    For example, a Coursier JSON report dependency entry might look like this:
129

130
    ```
131
    {
132
      "coord": "com.chuusai:shapeless_2.13:2.3.3",
133
      "file": "/home/USER/.cache/coursier/v1/https/repo1.maven.org/maven2/com/chuusai/shapeless_2.13/2.3.3/shapeless_2.13-2.3.3.jar",
134
      "directDependencies": [
135
        "org.scala-lang:scala-library:2.13.0"
136
      ],
137
      "dependencies": [
138
        "org.scala-lang:scala-library:2.13.0"
139
      ]
140
    }
141
    ```
142

143
    The equivalent CoursierLockfileEntry would look like this:
144

145
    ```
146
    CoursierLockfileEntry(
147
        coord="com.chuusai:shapeless_2.13:2.3.3", # identical
148
        file_name="shapeless_2.13-2.3.3.jar" # PurePath(entry["file"].name)
149
        direct_dependencies=(Coordinate.from_coord_str("org.scala-lang:scala-library:2.13.0"),),
150
        dependencies=(Coordinate.from_coord_str("org.scala-lang:scala-library:2.13.0"),),
151
        file_digest=FileDigest(fingerprint=<sha256 of the jar>, ...),
152
    )
153
    ```
154

155
    The fields `remote_url` and `pants_address` are set by Pants if the `coord` field matches a
156
    `jvm_artifact` that had either the `url` or `jar` fields set.
157
    """
158

159
    coord: Coordinate
12✔
160
    file_name: str
12✔
161
    direct_dependencies: Coordinates
12✔
162
    dependencies: Coordinates
12✔
163
    file_digest: FileDigest
12✔
164
    remote_url: str | None = None
12✔
165
    pants_address: str | None = None
12✔
166

167
    @classmethod
12✔
168
    def from_json_dict(cls, entry) -> CoursierLockfileEntry:
12✔
169
        """Construct a CoursierLockfileEntry from its JSON dictionary representation."""
170

171
        return cls(
10✔
172
            coord=Coordinate.from_json_dict(entry["coord"]),
173
            file_name=entry["file_name"],
174
            direct_dependencies=Coordinates(
175
                Coordinate.from_json_dict(d) for d in entry["directDependencies"]
176
            ),
177
            dependencies=Coordinates(Coordinate.from_json_dict(d) for d in entry["dependencies"]),
178
            file_digest=FileDigest(
179
                fingerprint=entry["file_digest"]["fingerprint"],
180
                serialized_bytes_length=entry["file_digest"]["serialized_bytes_length"],
181
            ),
182
            remote_url=entry.get("remote_url"),
183
            pants_address=entry.get("pants_address"),
184
        )
185

186
    def to_json_dict(self) -> dict[str, Any]:
12✔
187
        """Export this CoursierLockfileEntry to a JSON object."""
188

189
        return dict(
×
190
            coord=self.coord.to_json_dict(),
191
            directDependencies=[coord.to_json_dict() for coord in self.direct_dependencies],
192
            dependencies=[coord.to_json_dict() for coord in self.dependencies],
193
            file_name=self.file_name,
194
            file_digest=dict(
195
                fingerprint=self.file_digest.fingerprint,
196
                serialized_bytes_length=self.file_digest.serialized_bytes_length,
197
            ),
198
            remote_url=self.remote_url,
199
            pants_address=self.pants_address,
200
        )
201

202

203
@dataclass(frozen=True)
12✔
204
class CoursierResolvedLockfile:
12✔
205
    """An in-memory representation of Pants' Coursier lockfile format.
206

207
    All coordinates in the resolved lockfile will be compatible, so we do not need to do version
208
    testing when looking up coordinates.
209
    """
210

211
    entries: tuple[CoursierLockfileEntry, ...]
12✔
212
    metadata: JVMLockfileMetadata | None = None
12✔
213

214
    @classmethod
12✔
215
    def _coordinate_not_found(cls, key: CoursierResolveKey, coord: Coordinate) -> CoursierError:
12✔
216
        # TODO: After fixing https://github.com/pantsbuild/pants/issues/13496, coordinate matches
217
        # should become exact, and this error message will capture all cases of stale lockfiles.
218
        return CoursierError(
×
219
            f"{coord} was not present in resolve `{key.name}` at `{key.path}`.\n"
220
            f"If you have recently added new `{JvmArtifactTarget.alias}` targets, you might "
221
            f"need to update your lockfile by running `coursier-resolve --names={key.name}`."
222
        )
223

224
    def direct_dependencies(
12✔
225
        self, key: CoursierResolveKey, coord: Coordinate
226
    ) -> tuple[CoursierLockfileEntry, tuple[CoursierLockfileEntry, ...]]:
227
        """Return the entry for the given Coordinate, and for its direct dependencies."""
228
        entries = {(i.coord.group, i.coord.artifact, i.coord.classifier): i for i in self.entries}
2✔
229
        entry = entries.get((coord.group, coord.artifact, coord.classifier))
2✔
230
        if entry is None:
2✔
231
            raise self._coordinate_not_found(key, coord)
×
232

233
        return (
2✔
234
            entry,
235
            tuple(entries[(i.group, i.artifact, i.classifier)] for i in entry.direct_dependencies),
236
        )
237

238
    def dependencies(
12✔
239
        self, key: CoursierResolveKey, coord: Coordinate
240
    ) -> tuple[CoursierLockfileEntry, tuple[CoursierLockfileEntry, ...]]:
241
        """Return the entry for the given Coordinate, and for its transitive dependencies."""
242
        entries = {(i.coord.group, i.coord.artifact, i.coord.classifier): i for i in self.entries}
2✔
243
        entry = entries.get((coord.group, coord.artifact, coord.classifier))
2✔
244
        if entry is None:
2✔
245
            raise self._coordinate_not_found(key, coord)
×
246

247
        return (
2✔
248
            entry,
249
            tuple(
250
                dep_entry
251
                for d in entry.dependencies
252
                # Coursier will pass "pom" coords through to us. These coords don't have
253
                # a coords entry, but all of their relevant dependencies have already been taken into account
254
                # and will appear in the dependencies list
255
                if d.classifier != "pom"
256
                and (dep_entry := entries.get((d.group, d.artifact, d.classifier))) is not None
257
            ),
258
        )
259

260
    @classmethod
12✔
261
    def from_toml(cls, lockfile: str | bytes) -> CoursierResolvedLockfile:
12✔
262
        """Constructs a CoursierResolvedLockfile from it's TOML + metadata comment representation.
263

264
        The toml file should consist of an `[entries]` block, followed by several entries.
265
        """
266

267
        lockfile_str: str
268
        lockfile_bytes: bytes
269
        if isinstance(lockfile, str):
10✔
270
            lockfile_str = lockfile
×
UNCOV
271
            lockfile_bytes = lockfile.encode("utf-8")
×
272
        else:
273
            lockfile_str = lockfile.decode("utf-8")
10✔
274
            lockfile_bytes = lockfile
10✔
275

276
        contents = toml.loads(lockfile_str)
10✔
277
        entries = tuple(
10✔
278
            CoursierLockfileEntry.from_json_dict(entry) for entry in (contents["entries"])
279
        )
280
        metadata = JVMLockfileMetadata.from_lockfile(lockfile_bytes, delimeter="#")
10✔
281

282
        return cls(
10✔
283
            entries=entries,
284
            metadata=metadata,
285
        )
286

287
    @classmethod
12✔
288
    def from_serialized(cls, lockfile: str | bytes) -> CoursierResolvedLockfile:
12✔
289
        """Construct a CoursierResolvedLockfile from its serialized representation (either TOML with
290
        attached metadata, or old-style JSON.)."""
291

292
        return cls.from_toml(lockfile)
10✔
293

294
    def to_serialized(self) -> bytes:
12✔
295
        """Export this CoursierResolvedLockfile to a human-readable serialized form.
296

297
        This serialized form is intended to be checked in to the user's repo as a hermetic snapshot
298
        of a Coursier resolved JVM classpath.
299
        """
300

301
        lockfile = {
3✔
302
            "entries": [entry.to_json_dict() for entry in self.entries],
303
        }
304

305
        return toml.dumps(lockfile).encode("utf-8")
3✔
306

307

308
def classpath_dest_filename(coord: str, src_filename: str) -> str:
12✔
309
    """Calculates the destination filename on the classpath for the given source filename and coord.
310

311
    TODO: This is duplicated in `COURSIER_POST_PROCESSING_SCRIPT`.
312
    """
313
    dest_name = coord.replace(":", "_")
×
314
    _, ext = os.path.splitext(src_filename)
×
UNCOV
315
    return f"{dest_name}{ext}"
×
316

317

318
@dataclass(frozen=True)
12✔
319
class CoursierResolveInfo:
12✔
320
    coord_arg_strings: FrozenOrderedSet[str]
12✔
321
    force_version_coord_arg_strings: FrozenOrderedSet[str]
12✔
322
    extra_args: tuple[str, ...]
12✔
323
    digest: Digest
12✔
324

325
    @property
12✔
326
    def argv(self) -> Iterable[str]:
12✔
327
        """Return coursier arguments that can be used to compute or fetch this resolve.
328

329
        Must be used in concert with `digest`.
330
        """
UNCOV
331
        return itertools.chain(
×
332
            self.coord_arg_strings,
333
            itertools.chain.from_iterable(
334
                zip(itertools.repeat("--force-version"), self.force_version_coord_arg_strings)
335
            ),
336
            self.extra_args,
337
        )
338

339

340
@rule
12✔
341
async def prepare_coursier_resolve_info(
12✔
342
    artifact_requirements: ArtifactRequirements,
343
) -> CoursierResolveInfo:
344
    # Transform requirements that correspond to local JAR files into coordinates with `file:/`
345
    # URLs, and put the files in the place specified by the URLs.
346
    no_jars: list[ArtifactRequirement] = []
×
347
    jars: list[tuple[ArtifactRequirement, JvmArtifactJarSourceField]] = []
×
UNCOV
348
    extra_args: list[str] = []
×
349

UNCOV
350
    LOCAL_EXCLUDE_FILE = "PANTS_RESOLVE_EXCLUDES"
×
351

352
    for req in artifact_requirements:
×
353
        jar = req.jar
×
354
        if not jar:
×
UNCOV
355
            no_jars.append(req)
×
356
        else:
UNCOV
357
            jars.append((req, jar))
×
358

UNCOV
359
    excludes = [
×
360
        (req.coordinate, exclude)
361
        for req in artifact_requirements
362
        for exclude in (req.excludes or [])
363
    ]
364

365
    excludes_digest = EMPTY_DIGEST
×
366
    if excludes:
×
UNCOV
367
        excludes_file_content = FileContent(
×
368
            LOCAL_EXCLUDE_FILE,
369
            "\n".join(
370
                f"{coord.group}:{coord.artifact}--{exclude}" for (coord, exclude) in excludes
371
            ).encode("utf-8"),
372
        )
373
        excludes_digest = await create_digest(CreateDigest([excludes_file_content]))
×
UNCOV
374
        extra_args += ["--local-exclude-file", LOCAL_EXCLUDE_FILE]
×
375

UNCOV
376
    jar_file_sources = await concurrently(
×
377
        determine_source_files(SourceFilesRequest([jar_source_field]))
378
        for _, jar_source_field in jars
379
    )
UNCOV
380
    jar_file_paths = [jar_file_source.snapshot.files[0] for jar_file_source in jar_file_sources]
×
381

UNCOV
382
    resolvable_jar_requirements = [
×
383
        dataclasses.replace(
384
            req, jar=None, url=f"file:{Coursier.working_directory_placeholder}/{path}"
385
        )
386
        for (req, _), path in zip(jars, jar_file_paths)
387
    ]
388

389
    # Coursier only fetches non-jar artifact types ("packaging" in Pants parlance) if passed an `-A` option
390
    # explicitly requesting that the non-jar artifact(s) be fetched. This is an addition to passing the coordinate
391
    # with the desired type (packaging) value.
392
    extra_types: set[str] = set()
×
393
    for no_jar in no_jars:
×
394
        if no_jar.coordinate.packaging != "jar":
×
395
            extra_types.add(no_jar.coordinate.packaging)
×
UNCOV
396
    if extra_types:
×
397
        # Note: `-A` defaults to `jar,bundle` and any value set replaces (and does not supplement) those defaults,
398
        # so the defaults must be included here for them to remain usable.
UNCOV
399
        extra_args.extend(["-A", ",".join(sorted(["jar", "bundle", *extra_types]))])
×
400

UNCOV
401
    to_resolve = chain(no_jars, resolvable_jar_requirements)
×
402

UNCOV
403
    digest = await merge_digests(
×
404
        MergeDigests(
405
            [
406
                *(jar_file_source.snapshot.digest for jar_file_source in jar_file_sources),
407
                excludes_digest,
408
            ]
409
        )
410
    )
411

412
    coord_arg_strings: OrderedSet[str] = OrderedSet()
×
413
    force_version_coord_arg_strings: OrderedSet[str] = OrderedSet()
×
414
    for req in sorted(to_resolve, key=lambda ar: ar.coordinate):
×
415
        coord_arg_str = req.to_coord_arg_str()
×
416
        coord_arg_strings.add(coord_arg_str)
×
417
        if req.force_version:
×
UNCOV
418
            force_version_coord_arg_strings.add(coord_arg_str)
×
419

UNCOV
420
    return CoursierResolveInfo(
×
421
        coord_arg_strings=FrozenOrderedSet(coord_arg_strings),
422
        force_version_coord_arg_strings=FrozenOrderedSet(force_version_coord_arg_strings),
423
        digest=digest,
424
        extra_args=tuple(extra_args),
425
    )
426

427

428
@rule(level=LogLevel.DEBUG)
12✔
429
async def coursier_resolve_lockfile(
12✔
430
    artifact_requirements: ArtifactRequirements,
431
) -> CoursierResolvedLockfile:
432
    """Run `coursier fetch ...` against a list of Maven coordinates and capture the result.
433

434
    This rule does two things in a single Process invocation:
435

436
        * Runs `coursier fetch` to let Coursier do the heavy lifting of resolving
437
          dependencies and downloading resolved artifacts (jars, etc).
438
        * Copies the resolved artifacts into the Process output directory, capturing
439
          the artifacts as content-addressed `Digest`s.
440

441
    It's important that this happens in the same process, since the process isn't
442
    guaranteed to run on the same machine as the rule, nor is a subsequent process
443
    invocation.  This guarantees that whatever Coursier resolved, it was fully
444
    captured into Pants' content addressed artifact storage.
445

446
    Note however that we still get the benefit of Coursier's "global" cache if it
447
    had already been run on the machine where the `coursier fetch` runs, so rerunning
448
    `coursier fetch` tends to be fast in practice.
449

450
    Finally, this rule bundles up the result into a `CoursierResolvedLockfile`.  This
451
    data structure encapsulates everything necessary to either materialize the
452
    resolved dependencies to a classpath for Java invocations, or to write the
453
    lockfile out to the workspace to hermetically freeze the result of the resolve.
454
    """
455

456
    if len(artifact_requirements) == 0:
×
UNCOV
457
        return CoursierResolvedLockfile(entries=())
×
458

UNCOV
459
    coursier_resolve_info = await prepare_coursier_resolve_info(artifact_requirements)
×
460

UNCOV
461
    coursier_report_file_name = "coursier_report.json"
×
462

UNCOV
463
    process_result = await fallible_to_exec_result_or_raise(
×
464
        **implicitly(
465
            CoursierFetchProcess(
466
                args=(
467
                    coursier_report_file_name,
468
                    *coursier_resolve_info.argv,
469
                ),
470
                input_digest=coursier_resolve_info.digest,
471
                output_directories=("classpath",),
472
                output_files=(coursier_report_file_name,),
473
                description=(
474
                    "Running `coursier fetch` against "
475
                    f"{pluralize(len(artifact_requirements), 'requirement')}: "
476
                    f"{', '.join(req.to_coord_arg_str() for req in artifact_requirements)}"
477
                ),
478
            )
479
        )
480
    )
481

UNCOV
482
    report_digest = await digest_subset_to_digest(
×
483
        DigestSubset(process_result.output_digest, PathGlobs([coursier_report_file_name]))
484
    )
485
    report_contents = await get_digest_contents(report_digest)
×
UNCOV
486
    report = json.loads(report_contents[0].content)
×
487

UNCOV
488
    artifact_file_names = tuple(
×
489
        classpath_dest_filename(dep["coord"], dep["file"]) for dep in report["dependencies"]
490
    )
491
    artifact_output_paths = tuple(f"classpath/{file_name}" for file_name in artifact_file_names)
×
UNCOV
492
    artifact_digests = await concurrently(
×
493
        digest_subset_to_digest(
494
            DigestSubset(process_result.output_digest, PathGlobs([output_path]))
495
        )
496
        for output_path in artifact_output_paths
497
    )
UNCOV
498
    stripped_artifact_digests = await concurrently(
×
499
        remove_prefix(RemovePrefix(artifact_digest, "classpath"))
500
        for artifact_digest in artifact_digests
501
    )
UNCOV
502
    artifact_file_digests = await concurrently(
×
503
        digest_to_file_digest(ExtractFileDigest(stripped_artifact_digest, file_name))
504
        for stripped_artifact_digest, file_name in zip(
505
            stripped_artifact_digests, artifact_file_names
506
        )
507
    )
508

UNCOV
509
    first_pass_lockfile = CoursierResolvedLockfile(
×
510
        entries=tuple(
511
            CoursierLockfileEntry(
512
                coord=Coordinate.from_coord_str(dep["coord"]),
513
                direct_dependencies=Coordinates(
514
                    Coordinate.from_coord_str(dd) for dd in dep["directDependencies"]
515
                ),
516
                dependencies=Coordinates(Coordinate.from_coord_str(d) for d in dep["dependencies"]),
517
                file_name=file_name,
518
                file_digest=artifact_file_digest,
519
            )
520
            for dep, file_name, artifact_file_digest in zip(
521
                report["dependencies"], artifact_file_names, artifact_file_digests
522
            )
523
        )
524
    )
525

526
    inverted_artifacts = {req.coordinate: req for req in artifact_requirements}
×
527
    new_entries = []
×
528
    for entry in first_pass_lockfile.entries:
×
529
        req = inverted_artifacts.get(entry.coord)
×
530
        if req:
×
531
            address = req.jar.address if req.jar else None
×
532
            address_spec = address.spec if address else None
×
533
            entry = dataclasses.replace(entry, remote_url=req.url, pants_address=address_spec)
×
UNCOV
534
        new_entries.append(entry)
×
535

UNCOV
536
    return CoursierResolvedLockfile(entries=tuple(new_entries))
×
537

538

539
@rule
12✔
540
async def get_coursier_lockfile_for_resolve(
12✔
541
    coursier_resolve: CoursierResolveKey,
542
) -> CoursierResolvedLockfile:
543
    lockfile_digest_contents = await get_digest_contents(coursier_resolve.digest)
×
544
    lockfile_contents = lockfile_digest_contents[0].content
×
UNCOV
545
    return CoursierResolvedLockfile.from_serialized(lockfile_contents)
×
546

547

548
class ResolvedClasspathEntries(Collection[ClasspathEntry]):
12✔
549
    """A collection of resolved classpath entries."""
550

551

552
@rule
12✔
553
async def coursier_fetch_one_coord(
12✔
554
    request: CoursierLockfileEntry,
555
) -> ClasspathEntry:
556
    """Run `coursier fetch --intransitive` to fetch a single artifact.
557

558
    This rule exists to permit efficient subsetting of a "global" classpath
559
    in the form of a lockfile.  Callers can determine what subset of dependencies
560
    from the lockfile are needed for a given target, then request those
561
    lockfile entries individually.
562

563
    By fetching only one entry at a time, we maximize our cache efficiency.  If instead
564
    we fetched the entire subset that the caller wanted, there would be a different cache
565
    key for every possible subset.
566

567
    This rule also guarantees exact reproducibility.  If all caches have been
568
    removed, `coursier fetch` will re-download the artifact, and this rule will
569
    confirm that what was downloaded matches exactly (by content digest) what
570
    was specified in the lockfile (what Coursier originally downloaded).
571
    """
572

573
    # Prepare any URL- or JAR-specifying entries for use with Coursier
574
    req: ArtifactRequirement
575
    if request.pants_address:
×
UNCOV
576
        targets = await resolve_targets(
×
577
            **implicitly(
578
                UnparsedAddressInputs(
579
                    [request.pants_address],
580
                    owning_address=None,
581
                    description_of_origin="<infallible - coursier fetch>",
582
                )
583
            )
584
        )
UNCOV
585
        req = ArtifactRequirement(request.coord, jar=targets[0][JvmArtifactJarSourceField])
×
586
    else:
UNCOV
587
        req = ArtifactRequirement(request.coord, url=request.remote_url)
×
588

UNCOV
589
    coursier_resolve_info = await prepare_coursier_resolve_info(ArtifactRequirements([req]))
×
590

UNCOV
591
    coursier_report_file_name = "coursier_report.json"
×
592

UNCOV
593
    process_result = await fallible_to_exec_result_or_raise(
×
594
        **implicitly(
595
            CoursierFetchProcess(
596
                args=(
597
                    coursier_report_file_name,
598
                    "--intransitive",
599
                    *coursier_resolve_info.argv,
600
                ),
601
                input_digest=coursier_resolve_info.digest,
602
                output_directories=("classpath",),
603
                output_files=(coursier_report_file_name,),
604
                description=f"Fetching with coursier: {request.coord.to_coord_str()}",
605
            )
606
        )
607
    )
UNCOV
608
    report_digest = await digest_subset_to_digest(
×
609
        DigestSubset(process_result.output_digest, PathGlobs([coursier_report_file_name]))
610
    )
611
    report_contents = await get_digest_contents(report_digest)
×
UNCOV
612
    report = json.loads(report_contents[0].content)
×
613

614
    report_deps = report["dependencies"]
×
615
    if len(report_deps) == 0:
×
616
        raise CoursierError("Coursier fetch report has no dependencies (i.e. nothing was fetched).")
×
617
    elif len(report_deps) > 1:
×
UNCOV
618
        raise CoursierError(
×
619
            "Coursier fetch report has multiple dependencies, but exactly 1 was expected."
620
        )
621

622
    dep = report_deps[0]
×
623
    resolved_coord = Coordinate.from_coord_str(dep["coord"])
×
624
    if resolved_coord != request.coord:
×
UNCOV
625
        raise CoursierError(
×
626
            f'Coursier resolved coord "{resolved_coord.to_coord_str()}" does not match requested coord "{request.coord.to_coord_str()}".'
627
        )
628

629
    classpath_dest_name = classpath_dest_filename(dep["coord"], dep["file"])
×
UNCOV
630
    classpath_dest = f"classpath/{classpath_dest_name}"
×
631

UNCOV
632
    resolved_file_digest = await digest_subset_to_digest(
×
633
        DigestSubset(process_result.output_digest, PathGlobs([classpath_dest]))
634
    )
635
    stripped_digest = await remove_prefix(RemovePrefix(resolved_file_digest, "classpath"))
×
UNCOV
636
    file_digest = await digest_to_file_digest(
×
637
        ExtractFileDigest(stripped_digest, classpath_dest_name)
638
    )
639
    if file_digest != request.file_digest:
×
UNCOV
640
        raise CoursierError(
×
641
            f"Coursier fetch for '{resolved_coord}' succeeded, but fetched artifact {file_digest} did not match the expected artifact: {request.file_digest}."
642
        )
UNCOV
643
    return ClasspathEntry(digest=stripped_digest, filenames=(classpath_dest_name,))
×
644

645

646
@rule(desc="Fetch with coursier")
12✔
647
async def fetch_with_coursier(request: CoursierFetchRequest) -> FallibleClasspathEntry:
12✔
648
    # TODO: Loading this per JvmArtifact.
UNCOV
649
    lockfile = await get_coursier_lockfile_for_resolve(request.resolve)
×
650

UNCOV
651
    requirement = ArtifactRequirement.from_jvm_artifact_target(request.component.representative)
×
652

UNCOV
653
    if lockfile.metadata and not lockfile.metadata.is_valid_for(
×
654
        [requirement], LockfileContext.USER
655
    ):
UNCOV
656
        raise ValueError(
×
657
            f"Requirement `{requirement.to_coord_arg_str()}` has changed since the lockfile "
658
            f"for {request.resolve.path} was generated. Run `{bin_name()} generate-lockfiles` to update your "
659
            "lockfile based on the new requirements."
660
        )
661

662
    # All of the transitive dependencies are exported.
663
    # TODO: Expose an option to control whether this exports only the root, direct dependencies,
664
    # transitive dependencies, etc.
665
    assert len(request.component.members) == 1, "JvmArtifact does not have dependencies."
×
UNCOV
666
    root_entry, transitive_entries = lockfile.dependencies(
×
667
        request.resolve,
668
        requirement.coordinate,
669
    )
670

UNCOV
671
    classpath_entries = await concurrently(
×
672
        coursier_fetch_one_coord(entry) for entry in (root_entry, *transitive_entries)
673
    )
UNCOV
674
    exported_digest = await merge_digests(MergeDigests(cpe.digest for cpe in classpath_entries))
×
675

UNCOV
676
    return FallibleClasspathEntry(
×
677
        description=str(request.component),
678
        result=CompileResult.SUCCEEDED,
679
        output=ClasspathEntry.merge(exported_digest, classpath_entries),
680
        exit_code=0,
681
    )
682

683

684
@rule(level=LogLevel.DEBUG)
12✔
685
async def coursier_fetch_lockfile(lockfile: CoursierResolvedLockfile) -> ResolvedClasspathEntries:
12✔
686
    """Fetch every artifact in a lockfile."""
UNCOV
687
    classpath_entries = await concurrently(
×
688
        coursier_fetch_one_coord(entry) for entry in lockfile.entries
689
    )
UNCOV
690
    return ResolvedClasspathEntries(classpath_entries)
×
691

692

693
@rule
12✔
694
async def select_coursier_resolve_for_targets(
12✔
695
    coarsened_targets: CoarsenedTargets, jvm: JvmSubsystem
696
) -> CoursierResolveKey:
697
    """Selects and validates (transitively) a single resolve for a set of roots in a compile graph.
698

699
    In most cases, a `CoursierResolveKey` should be requested for a single `CoarsenedTarget` root,
700
    which avoids coupling un-related roots unnecessarily. But in other cases, a single compatible
701
    resolve is required for multiple roots (such as when running a `repl` over unrelated code), and
702
    in that case there might be multiple CoarsenedTargets.
703
    """
UNCOV
704
    targets = list(coarsened_targets.closure())
×
705

706
    # Find a single resolve that is compatible with all targets in the closure.
707
    compatible_resolve: str | None = None
×
708
    all_compatible = True
×
709
    for tgt in targets:
×
710
        if not tgt.has_field(JvmResolveField):
×
711
            continue
×
712
        resolve = tgt[JvmResolveField].normalized_value(jvm)
×
713
        if compatible_resolve is None:
×
714
            compatible_resolve = resolve
×
715
        elif resolve != compatible_resolve:
×
UNCOV
716
            all_compatible = False
×
717

718
    if not all_compatible:
×
UNCOV
719
        raise NoCompatibleResolve(
×
720
            jvm, "The selected targets did not have a resolve in common", targets
721
        )
UNCOV
722
    resolve = compatible_resolve or jvm.default_resolve
×
723

724
    # Load the resolve.
725
    resolve_path = jvm.resolves[resolve]
×
UNCOV
726
    lockfile_source = PathGlobs(
×
727
        [resolve_path],
728
        glob_match_error_behavior=GlobMatchErrorBehavior.error,
729
        description_of_origin=f"The resolve `{resolve}` from `[jvm].resolves`",
730
    )
731
    resolve_digest = await path_globs_to_digest(lockfile_source)
×
UNCOV
732
    return CoursierResolveKey(resolve, resolve_path, resolve_digest)
×
733

734

735
@dataclass(frozen=True)
12✔
736
class ToolClasspathRequest:
12✔
737
    """A request to set up the classpath for a JVM tool by fetching artifacts and merging the
738
    classpath.
739

740
    :param prefix: if set, should be a relative directory that will
741
        be prepended to every classpath element.  This is useful for
742
        keeping all classpath elements isolated under a single directory
743
        in a process invocation, where other inputs on the process's
744
        root directory might interfere with un-prefixed classpath
745
        entries (or vice versa).
746
    """
747

748
    prefix: str | None = None
12✔
749
    lockfile: GenerateJvmLockfileFromTool | None = None
12✔
750
    artifact_requirements: ArtifactRequirements = ArtifactRequirements()
12✔
751

752
    def __post_init__(self) -> None:
12✔
753
        if not bool(self.lockfile) ^ bool(self.artifact_requirements):
×
UNCOV
754
            raise AssertionError(
×
755
                f"Exactly one of `lockfile` or `artifact_requirements` must be provided: {self}"
756
            )
757

758

759
@dataclass(frozen=True)
12✔
760
class ToolClasspath:
12✔
761
    """A fully fetched and merged classpath for running a JVM tool."""
762

763
    content: Snapshot
12✔
764

765
    @property
12✔
766
    def digest(self) -> Digest:
12✔
UNCOV
767
        return self.content.digest
×
768

769
    def classpath_entries(self, root: str | None = None) -> Iterator[str]:
12✔
770
        """Returns optionally prefixed classpath entry filenames.
771

772
        :param prefix: if set, will be prepended to all entries.  This is useful
773
            if the process working directory is not the same as the root
774
            directory for the process input `Digest`.
775
        """
776
        if root is None:
×
777
            yield from self.content.files
×
UNCOV
778
            return
×
779

780
        for file_name in self.content.files:
×
UNCOV
781
            yield os.path.join(root, file_name)
×
782

783

784
@rule(level=LogLevel.DEBUG)
12✔
785
async def materialize_classpath_for_tool(request: ToolClasspathRequest) -> ToolClasspath:
12✔
786
    if request.artifact_requirements:
×
UNCOV
787
        resolution = await coursier_resolve_lockfile(request.artifact_requirements)
×
788
    else:
789
        lockfile_req = request.lockfile
×
790
        assert lockfile_req is not None
×
791
        regen_command = f"`{GenerateLockfilesSubsystem.name} --resolve={lockfile_req.resolve_name}`"
×
792
        if lockfile_req.lockfile == DEFAULT_TOOL_LOCKFILE:
×
UNCOV
793
            lockfile_bytes = (
×
794
                importlib.resources.files(lockfile_req.default_lockfile_resource[0])
795
                .joinpath(lockfile_req.default_lockfile_resource[1])
796
                .read_bytes()
797
            )
UNCOV
798
            resolution = CoursierResolvedLockfile.from_serialized(lockfile_bytes)
×
799
        else:
UNCOV
800
            lockfile_snapshot = await digest_to_snapshot(
×
801
                **implicitly(PathGlobs([lockfile_req.lockfile]))
802
            )
803
            if not lockfile_snapshot.files:
×
UNCOV
804
                raise ValueError(
×
805
                    f"No lockfile found at {lockfile_req.lockfile}, which is configured "
806
                    f"by the option {lockfile_req.lockfile_option_name}."
807
                    f"Run {regen_command} to generate it."
808
                )
809

UNCOV
810
            resolution = await get_coursier_lockfile_for_resolve(
×
811
                CoursierResolveKey(
812
                    name=lockfile_req.resolve_name,
813
                    path=lockfile_req.lockfile,
814
                    digest=lockfile_snapshot.digest,
815
                )
816
            )
817

818
        # Validate that the lockfile is correct.
UNCOV
819
        lockfile_inputs = await gather_coordinates_for_jvm_lockfile(
×
820
            GatherJvmCoordinatesRequest(
821
                lockfile_req.artifact_inputs, lockfile_req.artifact_option_name
822
            )
823
        )
UNCOV
824
        if resolution.metadata and not resolution.metadata.is_valid_for(
×
825
            lockfile_inputs, LockfileContext.TOOL
826
        ):
UNCOV
827
            raise ValueError(
×
828
                f"The lockfile {lockfile_req.lockfile} (configured by the option "
829
                f"{lockfile_req.lockfile_option_name}) was generated with different requirements "
830
                f"than are currently set via {lockfile_req.artifact_option_name}. Run "
831
                f"{regen_command} to regenerate the lockfile."
832
            )
833

834
    classpath_entries = await coursier_fetch_lockfile(resolution)
×
UNCOV
835
    merged_snapshot = await digest_to_snapshot(
×
836
        **implicitly(MergeDigests(classpath_entry.digest for classpath_entry in classpath_entries))
837
    )
838
    if request.prefix is not None:
×
UNCOV
839
        merged_snapshot = await digest_to_snapshot(
×
840
            **implicitly(AddPrefix(merged_snapshot.digest, request.prefix))
841
        )
UNCOV
842
    return ToolClasspath(merged_snapshot)
×
843

844

845
def rules():
12✔
846
    return [
12✔
847
        *collect_rules(),
848
        *coursier_setup.rules(),
849
        UnionRule(ClasspathEntryRequest, CoursierFetchRequest),
850
    ]
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc