• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 25443604553

06 May 2026 03:05PM UTC coverage: 92.879% (-0.04%) from 92.915%
25443604553

push

github

web-flow
[pants_ng] Scaffolding for a pants_ng mode. (#23319)

In this mode the command line is parsed as an
NG invocation, and dispatched appropriately.

Of course at the moment there are no
implementations to dispatch to. That will follow.

This does expose a new option, `pants_ng` to users. 
There is a big warning not to set it, but we're not trying
to hide that we're working on a new thing, so I am
comfortable with this.

25 of 76 new or added lines in 9 files covered. (32.89%)

1294 existing lines in 76 files now uncovered.

92234 of 99306 relevant lines covered (92.88%)

4.05 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

94.26
/src/python/pants/backend/python/util_rules/pex_requirements.py
1
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3

4
from __future__ import annotations
12✔
5

6
import importlib.resources
12✔
7
import json
12✔
8
import logging
12✔
9
import tomllib
12✔
10
from collections.abc import Iterable, Iterator
12✔
11
from dataclasses import dataclass, field
12✔
12
from typing import TYPE_CHECKING
12✔
13
from urllib.parse import urlparse
12✔
14

15
from pants.backend.python.subsystems.repos import PythonRepos
12✔
16
from pants.backend.python.subsystems.setup import InvalidLockfileBehavior, PythonSetup
12✔
17
from pants.backend.python.target_types import PythonRequirementsField
12✔
18
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
12✔
19
from pants.backend.python.util_rules.lockfile_metadata import (
12✔
20
    InvalidPythonLockfileReason,
21
    LockfileFormat,
22
    PythonLockfileMetadata,
23
    PythonLockfileMetadataV2,
24
    PythonLockfileMetadataV8,
25
)
26
from pants.build_graph.address import Address
12✔
27
from pants.core.util_rules.lockfile_metadata import (
12✔
28
    InvalidLockfileError,
29
    LockfileMetadataValidation,
30
    NoLockfileMetadataBlock,
31
)
32
from pants.engine.engine_aware import EngineAwareParameter
12✔
33
from pants.engine.fs import CreateDigest, Digest, FileContent, GlobMatchErrorBehavior, PathGlobs
12✔
34
from pants.engine.internals.native_engine import IntrinsicError
12✔
35
from pants.engine.intrinsics import (
12✔
36
    create_digest,
37
    get_digest_contents,
38
    get_digest_entries,
39
    path_globs_to_digest,
40
)
41
from pants.engine.rules import collect_rules, concurrently, implicitly, rule
12✔
42
from pants.engine.unions import UnionMembership
12✔
43
from pants.util.docutil import bin_name, doc_url
12✔
44
from pants.util.ordered_set import FrozenOrderedSet
12✔
45
from pants.util.pip_requirement import PipRequirement
12✔
46
from pants.util.requirements import parse_requirements_file
12✔
47
from pants.util.strutil import comma_separated_list, pluralize, softwrap
12✔
48

49
if TYPE_CHECKING:
50
    from pants.backend.python.util_rules.pex import Pex
51

52

53
logger = logging.getLogger(__name__)
12✔
54

55

56
@dataclass(frozen=True)
12✔
57
class Resolve:
12✔
58
    # A named resolve for a "user lockfile".
59
    # Soon to be the only kind of lockfile, as this class will help
60
    # get rid of the "tool lockfile" concept.
61
    # TODO: Once we get rid of old-style tool lockfiles we can possibly
62
    #  unify this with EntireLockfile.
63
    # TODO: We might want to add the requirements subset to this data structure,
64
    #  to further detangle this from PexRequirements.
65
    name: str
12✔
66

67
    use_entire_lockfile: bool
12✔
68

69

70
@dataclass(frozen=True)
12✔
71
class Lockfile:
12✔
72
    url: str
12✔
73
    url_description_of_origin: str
12✔
74
    resolve_name: str
12✔
75
    lockfile_hex_digest: str | None = None
12✔
76

77

78
@rule
12✔
79
async def get_lockfile_for_resolve(resolve: Resolve, python_setup: PythonSetup) -> Lockfile:
12✔
80
    lockfile_path = python_setup.resolves.get(resolve.name)
5✔
81
    if not lockfile_path:
5✔
82
        raise ValueError(f"No such resolve: {resolve.name}")
×
83
    return Lockfile(
5✔
84
        url=lockfile_path,
85
        url_description_of_origin=f"the resolve `{resolve.name}`",
86
        resolve_name=resolve.name,
87
    )
88

89

90
@dataclass(frozen=True)
12✔
91
class LoadedLockfile:
12✔
92
    """A lockfile after loading and header stripping.
93

94
    Validation is deferred until consumption time, because each consumed subset (in the case of a
95
    PEX-native lockfile) can be individually validated.
96
    """
97

98
    # The digest of the loaded lockfile (which may not be identical to the input).
99
    lockfile_digest: Digest
12✔
100
    # The path of the loaded lockfile within the Digest.
101
    lockfile_path: str
12✔
102
    # The loaded metadata for this lockfile, if any.
103
    metadata: PythonLockfileMetadata | None = field(hash=False)
12✔
104
    # An estimate of the number of requirements in this lockfile, to be used as a heuristic for
105
    # available parallelism.
106
    requirement_estimate: int
12✔
107
    # The format of the loaded lockfile.
108
    lockfile_format: LockfileFormat
12✔
109
    # If lockfile_format is ConstraintsDeprecated, the lockfile parsed as constraints strings,
110
    # for use when the lockfile needs to be subsetted (see #15031, ##12222).
111
    as_constraints_strings: FrozenOrderedSet[str] | None
12✔
112
    # The original file or file content (which may not have identical content to the output
113
    # `lockfile_digest`).
114
    original_lockfile: Lockfile
12✔
115

116

117
@dataclass(frozen=True)
12✔
118
class LoadedLockfileRequest:
12✔
119
    """A request to load and validate the content of the given lockfile."""
120

121
    lockfile: Lockfile
12✔
122

123

124
def strip_comments_from_pex_json_lockfile(lockfile_bytes: bytes) -> bytes:
12✔
125
    """Pex does not like the header Pants adds to lockfiles, as it violates JSON.
126

127
    Note that we only strip lines starting with `//`, which is all that Pants will ever add. If
128
    users add their own comments, things will fail.
129
    """
130
    return b"\n".join(
12✔
131
        line for line in lockfile_bytes.splitlines() if not line.lstrip().startswith(b"//")
132
    )
133

134

135
def is_probably_pex_json_lockfile(lockfile_bytes: bytes) -> bool:
12✔
136
    for line in lockfile_bytes.splitlines():
12✔
137
        if line and not line.startswith(b"//"):
12✔
138
            # Note that pip/Pex complain if a requirements.txt style starts with `{`.
139
            return line.lstrip().startswith(b"{")
12✔
140
    return False
2✔
141

142

143
def _pex_lockfile_requirement_count(lockfile_bytes: bytes) -> int:
12✔
144
    # TODO: this is a very naive heuristic that will overcount, and also relies on Pants
145
    #  setting `--indent` when generating lockfiles. More robust would be parsing the JSON
146
    #  and getting the len(locked_resolves.locked_requirements.project_name), but we risk
147
    #  if Pex ever changes its lockfile format.
148

149
    num_lines = len(lockfile_bytes.splitlines())
12✔
150
    # These are very naive estimates, and they bias towards overcounting. For example, requirements
151
    # often are 20+ lines.
152
    num_lines_for_options = 10
12✔
153
    lines_per_req = 10
12✔
154
    return max((num_lines - num_lines_for_options) // lines_per_req, 2)
12✔
155

156

157
def get_metadata(
12✔
158
    python_setup: PythonSetup,
159
    lock_bytes: bytes,
160
    lockfile_path: str | None,
161
    resolve_name: str,
162
    delimiter: str,
163
) -> PythonLockfileMetadata | None:
164
    metadata: PythonLockfileMetadata | None = None
7✔
165
    if python_setup.invalid_lockfile_behavior != InvalidLockfileBehavior.ignore:
7✔
166
        try:
7✔
167
            metadata = PythonLockfileMetadata.from_lockfile(
7✔
168
                lockfile=lock_bytes,
169
                lockfile_path=lockfile_path,
170
                resolve_name=resolve_name,
171
                delimeter=delimiter,
172
            )
173
        except NoLockfileMetadataBlock:
7✔
174
            # We don't validate if the file isn't a pants-generated lockfile (as determined
175
            # by the lack of a metadata block). But we propagate any other type of
176
            # InvalidLockfileError incurred while parsing the metadata block.
177
            logger.debug(
7✔
178
                f"Lockfile for resolve {resolve_name} "
179
                f"{('at ' + lockfile_path) if lockfile_path else ''}"
180
                f" has no metadata block, so was not generated by Pants. "
181
                f"Lockfile will not be validated."
182
            )
183
    return metadata
7✔
184

185

186
async def read_file_or_resource(url: str, description_of_origin: str) -> Digest:
12✔
187
    """Read from a path, file:// or resource:// URL and return the digest of the content.
188

189
    If no content is found at the path/URL, raise.
190
    """
191
    parts = urlparse(url)
12✔
192
    # urlparse retains the leading / in URLs with a netloc.
193
    path = parts.path[1:] if parts.path.startswith("/") else parts.path
12✔
194
    if parts.scheme in {"", "file"}:
12✔
195
        digest = await path_globs_to_digest(
7✔
196
            PathGlobs(
197
                [path],
198
                glob_match_error_behavior=GlobMatchErrorBehavior.error,
199
                description_of_origin=description_of_origin,
200
            )
201
        )
202
    elif parts.scheme == "resource":
12✔
203
        _fc = FileContent(
12✔
204
            path,
205
            # The "netloc" in our made-up "resource://" scheme is the package.
206
            importlib.resources.files(parts.netloc).joinpath(path).read_bytes(),
207
        )
208
        digest = await create_digest(CreateDigest([_fc]))
12✔
209
    else:
210
        raise ValueError(
×
211
            f"Unsupported scheme {parts.scheme} for URL: {url} (origin: {description_of_origin})"
212
        )
213
    return digest
12✔
214

215

216
@rule
12✔
217
async def load_lockfile(
12✔
218
    request: LoadedLockfileRequest,
219
    python_setup: PythonSetup,
220
) -> LoadedLockfile:
221
    lockfile = request.lockfile
12✔
222
    # TODO: This is temporary. Once we regenerate all embedded lockfiles to have sidecar metadata
223
    #  files instead of metadata front matter, we won't need to call get_metadata() on them.
224
    synthetic_lock = lockfile.url.startswith("resource://")
12✔
225
    lockfile_digest = await read_file_or_resource(lockfile.url, lockfile.url_description_of_origin)
12✔
226
    lockfile_digest_entries = await get_digest_entries(lockfile_digest)
12✔
227
    lockfile_path = lockfile_digest_entries[0].path
12✔
228

229
    lockfile_contents = await get_digest_contents(lockfile_digest)
12✔
230
    lock_bytes = lockfile_contents[0].content
12✔
231
    lockfile_format: LockfileFormat | None = None
12✔
232
    constraints_strings = None
12✔
233

234
    metadata_url = PythonLockfileMetadata.metadata_location_for_lockfile(lockfile.url)
12✔
235
    metadata = None
12✔
236

237
    # If there's a sidecar metadata file, always load it, at least to get the lockfile_format.
238
    try:
12✔
239
        metadata_digest = await read_file_or_resource(
12✔
240
            metadata_url,
241
            description_of_origin="We squelch errors, so this is never seen by users",
242
        )
243
        digest_contents = await get_digest_contents(metadata_digest)
12✔
244
        metadata_bytes = digest_contents[0].content
12✔
245
        json_dict = json.loads(metadata_bytes)
12✔
246
        metadata = PythonLockfileMetadata.from_json_dict(
12✔
247
            json_dict,
248
            lockfile_description=f"the lockfile for `{lockfile.resolve_name}`",
249
            error_suffix=softwrap(
250
                f"""
251
                To resolve this error, you will need to regenerate the lockfile by running
252
                `{bin_name()} generate-lockfiles --resolve={lockfile.resolve_name}.
253
                """
254
            ),
255
        )
256
        if isinstance(metadata, PythonLockfileMetadataV8):
12✔
UNCOV
257
            lockfile_format = metadata.lockfile_format
1✔
258
    except IntrinsicError:
7✔
259
        # No metadata file or resource found, so fall through to finding a metadata header block
260
        # prepended to the lockfile itself.
261
        pass
7✔
262

263
    # If this is a uv lockfile then its metadata will have told us so, otherwise fall back
264
    # to detection (since older lockfiles may not have the format field in the metadata).
265
    lockfile_format = lockfile_format or (
12✔
266
        LockfileFormat.PEX
267
        if is_probably_pex_json_lockfile(lock_bytes)
268
        else LockfileFormat.CONSTRAINTS_DEPRECATED
269
    )
270

271
    if lockfile_format == LockfileFormat.CONSTRAINTS_DEPRECATED:
12✔
272
        lock_string = lock_bytes.decode()
2✔
273
        constraints_strings = FrozenOrderedSet(
2✔
274
            str(req) for req in parse_requirements_file(lock_string, rel_path=lockfile_path)
275
        )
276

277
    if lockfile_format == LockfileFormat.PEX:
12✔
278
        stripped_lock_bytes = strip_comments_from_pex_json_lockfile(lock_bytes)
12✔
279
        lockfile_digest = await create_digest(
12✔
280
            CreateDigest([FileContent(lockfile_path, stripped_lock_bytes)])
281
        )
282

283
    if not metadata and python_setup.invalid_lockfile_behavior != InvalidLockfileBehavior.ignore:
12✔
284
        # uv lockfiles must have sidecar metadata, so this can only be Pex or ConstraintsDeprecated.
285
        header_delimiter = "//" if lockfile_format == LockfileFormat.PEX else "#"
6✔
286
        metadata = get_metadata(
6✔
287
            python_setup,
288
            lock_bytes,
289
            None if synthetic_lock else lockfile_path,
290
            lockfile.resolve_name,
291
            header_delimiter,
292
        )
293

294
    match lockfile_format:
12✔
295
        case LockfileFormat.UV:
12✔
296
            # Use the virtual root package's direct dependencies as a rough estimate
297
            # of how many packages need resolving.
298
            # NB: The uv project recommends not relying on lockfile internals, but
299
            # this particular aspect seems relatively stable in practice.
UNCOV
300
            lockfile_toml = tomllib.loads(lock_bytes.decode())
1✔
UNCOV
301
            root_package = next(
1✔
302
                (
303
                    p
304
                    for p in lockfile_toml.get("package", [])
305
                    if p.get("source", {}).get("virtual") == "."
306
                ),
307
                None,
308
            )
UNCOV
309
            deps = root_package.get("dependencies") if root_package else None
1✔
UNCOV
310
            if deps is None:
1✔
311
                logger.warning(
×
312
                    f"Couldn't find the virtual root [[package]].dependencies entry in {lockfile_path}. "
313
                    "Has the uv lockfile format changed? This will not affect correctness but "
314
                    "may affect performance. Please reach out to the Pants team if you encounter "
315
                    "this warning."
316
                )
UNCOV
317
            requirement_estimate = 4 if deps is None else len(deps)
1✔
318
        case LockfileFormat.PEX:
12✔
319
            requirement_estimate = _pex_lockfile_requirement_count(lock_bytes)
12✔
320
        case LockfileFormat.CONSTRAINTS_DEPRECATED:
2✔
321
            # Note: this is a very naive heuristic. It will overcount because entries often
322
            # have >1 line due to `--hash`.
323
            requirement_estimate = len(lock_bytes.splitlines())
2✔
324
        case _:
×
325
            raise ValueError(f"Unknown lockfile format: {lockfile_format}")
×
326

327
    return LoadedLockfile(
12✔
328
        lockfile_digest,
329
        lockfile_path,
330
        metadata,
331
        requirement_estimate,
332
        lockfile_format,
333
        constraints_strings,
334
        original_lockfile=lockfile,
335
    )
336

337

338
@dataclass(frozen=True)
12✔
339
class EntireLockfile:
12✔
340
    """A request to resolve the entire contents of a lockfile.
341

342
    This resolution mode is used in a few cases:
343
    1. for poetry or handwritten lockfiles (which do not support being natively subsetted the
344
       way that a PEX lockfile can be), in order to build a repository-PEX to subset separately.
345
    2. for tool lockfiles, which (regardless of format), need to resolve the entire lockfile
346
       content anyway.
347
    """
348

349
    lockfile: Lockfile
12✔
350
    # If available, the current complete set of requirement strings that influence this lockfile.
351
    # Used for metadata validation.
352
    complete_req_strings: tuple[str, ...] | None = None
12✔
353

354

355
@dataclass(frozen=True)
12✔
356
class PexRequirements:
12✔
357
    """A request to resolve a series of requirements (optionally from a "superset" resolve)."""
358

359
    req_strings_or_addrs: FrozenOrderedSet[str | Address]
12✔
360
    constraints_strings: FrozenOrderedSet[str]
12✔
361
    # If these requirements should be resolved as a subset of either a repository PEX, or a
362
    # PEX-native lockfile, the superset to use. # NB: Use of a lockfile here asserts that the
363
    # lockfile is PEX-native, because legacy lockfiles do not support subset resolves.
364
    from_superset: Pex | Resolve | None
12✔
365
    description_of_origin: str
12✔
366

367
    def __init__(
12✔
368
        self,
369
        req_strings_or_addrs: Iterable[str | Address] = (),
370
        *,
371
        constraints_strings: Iterable[str] = (),
372
        from_superset: Pex | Resolve | None = None,
373
        description_of_origin: str = "",
374
    ) -> None:
375
        """
376
        :param req_strings_or_addrs: The requirement strings to resolve, or addresses
377
          of targets that refer to them, or string specs of such addresses.
378
        :param constraints_strings: Constraints strings to apply during the resolve.
379
        :param from_superset: An optional superset PEX or lockfile to resolve the req strings from.
380
        :param description_of_origin: A human-readable description of what these requirements
381
          represent, for use in error messages.
382
        """
383
        object.__setattr__(
12✔
384
            self, "req_strings_or_addrs", FrozenOrderedSet(sorted(req_strings_or_addrs))
385
        )
386
        object.__setattr__(
12✔
387
            self, "constraints_strings", FrozenOrderedSet(sorted(constraints_strings))
388
        )
389
        object.__setattr__(self, "from_superset", from_superset)
12✔
390
        object.__setattr__(self, "description_of_origin", description_of_origin)
12✔
391

392
    @classmethod
12✔
393
    def req_strings_from_requirement_fields(
12✔
394
        cls, fields: Iterable[PythonRequirementsField]
395
    ) -> FrozenOrderedSet[str]:
396
        """A convenience when you only need the raw requirement strings from fields and don't need
397
        to consider things like constraints or resolves."""
398
        return FrozenOrderedSet(
12✔
399
            sorted(str(python_req) for fld in fields for python_req in fld.value)
400
        )
401

402
    def __bool__(self) -> bool:
12✔
403
        return bool(self.req_strings_or_addrs)
3✔
404

405

406
@dataclass(frozen=True)
12✔
407
class ResolvePexConstraintsFile:
12✔
408
    digest: Digest
12✔
409
    path: str
12✔
410
    constraints: FrozenOrderedSet[PipRequirement]
12✔
411

412

413
@dataclass(frozen=True)
12✔
414
class ResolveConfig:
12✔
415
    """Configuration from `[python]` that impacts how the resolve is created."""
416

417
    indexes: tuple[str, ...]
12✔
418
    find_links: tuple[str, ...]
12✔
419
    manylinux: str | None
12✔
420
    constraints_file: ResolvePexConstraintsFile | None
12✔
421
    only_binary: FrozenOrderedSet[str]
12✔
422
    no_binary: FrozenOrderedSet[str]
12✔
423
    excludes: FrozenOrderedSet[str]
12✔
424
    overrides: FrozenOrderedSet[str]
12✔
425
    sources: FrozenOrderedSet[str]
12✔
426
    path_mappings: tuple[str, ...]
12✔
427
    lock_style: str
12✔
428
    complete_platforms: tuple[str, ...]
12✔
429
    uploaded_prior_to: str | None
12✔
430

431
    def pex_args(self) -> Iterator[str]:
12✔
432
        """Arguments for Pex for indexes/--find-links, manylinux, and path mappings.
433

434
        Does not include arguments for constraints files, which must be set up independently.
435
        """
436
        # NB: In setting `--no-pypi`, we rely on the default value of `[python-repos].indexes`
437
        # including PyPI, which will override `--no-pypi` and result in using PyPI in the default
438
        # case. Why set `--no-pypi`, then? We need to do this so that
439
        # `[python-repos].indexes = ['custom_url']` will only point to that index and not include
440
        # PyPI.
441
        yield "--no-pypi"
12✔
442
        yield from (f"--index={index}" for index in self.indexes)
12✔
443
        yield from (f"--find-links={repo}" for repo in self.find_links)
12✔
444

445
        if self.manylinux:
12✔
446
            yield "--manylinux"
12✔
447
            yield self.manylinux
12✔
448
        else:
449
            yield "--no-manylinux"
2✔
450

451
        # Pex logically plumbs through equivalent settings, but uses a
452
        # separate flag instead of the Pip magic :all:/:none: syntax.  To
453
        # support the exitings Pants config settings we need to go from
454
        # :all:/:none: --> Pex options, which Pex will translate back into Pip
455
        # options.  Note that Pex's --wheel (for example) means "allow
456
        # wheels", not "require wheels".
457
        if self.only_binary and ":all:" in self.only_binary:
12✔
458
            yield "--wheel"
1✔
459
            yield "--no-build"
1✔
460
        elif self.only_binary and ":none:" in self.only_binary:
12✔
461
            yield "--no-wheel"
1✔
462
            yield "--build"
1✔
463
        elif self.only_binary:
12✔
464
            yield from (f"--only-binary={pkg}" for pkg in self.only_binary)
2✔
465

466
        if self.no_binary and ":all:" in self.no_binary:
12✔
467
            yield "--no-wheel"
1✔
468
            yield "--build"
1✔
469
        elif self.no_binary and ":none:" in self.no_binary:
12✔
470
            yield "--wheel"
1✔
471
            yield "--no-build"
1✔
472
        elif self.no_binary:
12✔
473
            yield from (f"--only-build={pkg}" for pkg in self.no_binary)
2✔
474

475
        yield from (f"--path-mapping={v}" for v in self.path_mappings)
12✔
476

477
        yield from (f"--exclude={exclude}" for exclude in self.excludes)
12✔
478
        yield from (f"--source={source}" for source in self.sources)
12✔
479

480
        if self.uploaded_prior_to:
12✔
481
            yield f"--uploaded-prior-to={self.uploaded_prior_to}"
2✔
482

483
    def uv_config(self, extra_find_links: Iterable[str] = ()) -> str:
12✔
484
        """Content for uv.toml based on this resolve's configuration.
485

486
        Only uv-supported fields are used. Call validate_for_uv() first to ensure no
487
        pex-specific fields are set.
488
        """
489
        config_lines: list[str] = []
3✔
490

491
        if not self.indexes:
3✔
492
            config_lines.append("no-index = true")
1✔
493

494
        all_find_links = (*self.find_links, *extra_find_links)
3✔
495
        if all_find_links:
3✔
496
            entries = "\n".join(f'    "{fl}",' for fl in all_find_links)
1✔
497
            config_lines.append(f"find-links = [\n{entries}\n]")
1✔
498

499
        if self.no_binary:
3✔
500
            if ":all:" in self.no_binary:
2✔
501
                config_lines.append("no-binary = true")
1✔
502
            elif ":none:" not in self.no_binary:
2✔
503
                entries = "\n".join(f'    "{pkg}",' for pkg in self.no_binary)
2✔
504
                config_lines.append(f"no-binary-package = [\n{entries}\n]")
2✔
505

506
        if self.only_binary:
3✔
507
            if ":all:" in self.only_binary:
2✔
508
                config_lines.append("no-build = true")
1✔
509
            elif ":none:" not in self.only_binary:
2✔
510
                entries = "\n".join(f'    "{pkg}",' for pkg in self.only_binary)
2✔
511
                config_lines.append(f"no-build-package = [\n{entries}\n]")
2✔
512

513
        if self.uploaded_prior_to:
3✔
514
            config_lines.append(f'exclude-newer = "{self.uploaded_prior_to}"')
1✔
515

516
        for i, index_url in enumerate(self.indexes):
3✔
517
            # The first index gets `default = true`, replacing uv's built-in PyPI default.
518
            # Subsequent indexes are additional sources.
519
            block = f'[[index]]\nurl = "{index_url}"\n'
3✔
520
            block += "default = true\n" if i == 0 else f'name = "extra-{i}"\n'
3✔
521
            config_lines.append(block)
3✔
522

523
        return "\n".join(config_lines) + "\n" if config_lines else ""
3✔
524

525
    def validate_for_uv(self, resolve_name: str) -> None:
12✔
526
        """Raise if any pex-specific resolve options are set that have no uv equivalent."""
527
        pex_specific: list[str] = []
2✔
528
        if self.constraints_file:
2✔
529
            pex_specific.append("`[python].resolves_to_constraints_file`")
×
530
        if self.complete_platforms:
2✔
531
            pex_specific.append("`[python].resolves_to_complete_platforms`")
×
532
        if self.excludes:
2✔
533
            pex_specific.append("`[python].resolves_to_excludes`")
×
534
        if self.overrides:
2✔
535
            pex_specific.append("`[python].resolves_to_overrides`")
×
536
        if self.sources:
2✔
537
            pex_specific.append("`[python].resolves_to_sources`")
×
538
        if self.lock_style != "universal":
2✔
539
            pex_specific.append("`[python]._resolves_to_lock_style`")
×
540
        if self.path_mappings:
2✔
541
            pex_specific.append("`[python-repos].path_mappings`")
×
542
        if pex_specific:
2✔
543
            raise ValueError(
×
544
                f"The following options are set for the resolve `{resolve_name}` but are not "
545
                f"supported when using the uv resolver:\n"
546
                + "\n".join(f"  - {opt}" for opt in pex_specific)
547
            )
548

549

550
@dataclass(frozen=True)
12✔
551
class ResolveConfigRequest(EngineAwareParameter):
12✔
552
    """Find all configuration from `[python]` that impacts how the resolve is created.
553

554
    If `resolve_name` is None, then most per-resolve options will be ignored because there is no way
555
    for users to configure them. However, some options like `[python-repos].indexes` will still be
556
    loaded.
557
    """
558

559
    resolve_name: str | None
12✔
560

561
    def debug_hint(self) -> str:
12✔
562
        return self.resolve_name or "<no resolve>"
×
563

564

565
@rule
12✔
566
async def determine_resolve_config(
12✔
567
    request: ResolveConfigRequest,
568
    python_setup: PythonSetup,
569
    python_repos: PythonRepos,
570
    union_membership: UnionMembership,
571
) -> ResolveConfig:
572
    if request.resolve_name is None:
12✔
573
        return ResolveConfig(
11✔
574
            indexes=python_repos.indexes,
575
            find_links=python_repos.find_links,
576
            manylinux=python_setup.manylinux,
577
            constraints_file=None,
578
            no_binary=FrozenOrderedSet(),
579
            only_binary=FrozenOrderedSet(),
580
            excludes=FrozenOrderedSet(),
581
            overrides=FrozenOrderedSet(),
582
            sources=FrozenOrderedSet(),
583
            path_mappings=python_repos.path_mappings,
584
            lock_style="universal",  # Default to universal when no resolve name
585
            complete_platforms=(),  # No complete platforms by default
586
            uploaded_prior_to=None,
587
        )
588

589
    no_binary = python_setup.resolves_to_no_binary().get(request.resolve_name) or []
12✔
590
    only_binary = python_setup.resolves_to_only_binary().get(request.resolve_name) or []
12✔
591
    excludes = python_setup.resolves_to_excludes().get(request.resolve_name) or []
12✔
592
    overrides = python_setup.resolves_to_overrides().get(request.resolve_name) or []
12✔
593
    sources = python_setup.resolves_to_sources().get(request.resolve_name) or []
12✔
594
    lock_style = python_setup.resolves_to_lock_style().get(request.resolve_name) or "universal"
12✔
595
    complete_platforms = tuple(
12✔
596
        python_setup.resolves_to_complete_platforms().get(request.resolve_name) or []
597
    )
598
    uploaded_prior_to = python_setup.resolves_to_uploaded_prior_to().get(request.resolve_name)
12✔
599

600
    constraints_file: ResolvePexConstraintsFile | None = None
12✔
601
    _constraints_file_path = python_setup.resolves_to_constraints_file().get(request.resolve_name)
12✔
602
    if _constraints_file_path:
12✔
603
        _constraints_origin = softwrap(
1✔
604
            f"""
605
            the option `[python].resolves_to_constraints_file` for the resolve
606
            '{request.resolve_name}'
607
            """
608
        )
609
        _constraints_path_globs = PathGlobs(
1✔
610
            [_constraints_file_path] if _constraints_file_path else [],
611
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
612
            description_of_origin=_constraints_origin,
613
        )
614
        # TODO: Probably re-doing work here - instead of just calling one, then the next
615
        _constraints_digest, _constraints_digest_contents = await concurrently(
1✔
616
            path_globs_to_digest(_constraints_path_globs),
617
            get_digest_contents(**implicitly({_constraints_path_globs: PathGlobs})),
618
        )
619

620
        if len(_constraints_digest_contents) != 1:
1✔
621
            raise ValueError(
×
622
                softwrap(
623
                    f"""
624
                    Expected only one file from {_constraints_origin}, but matched:
625
                    {sorted(fc.path for fc in _constraints_digest_contents)}
626

627
                    Did you use a glob like `*`?
628
                    """
629
                )
630
            )
631
        _constraints_file_content = next(iter(_constraints_digest_contents))
1✔
632
        constraints = parse_requirements_file(
1✔
633
            _constraints_file_content.content.decode("utf-8"), rel_path=_constraints_file_path
634
        )
635
        constraints_file = ResolvePexConstraintsFile(
1✔
636
            _constraints_digest, _constraints_file_path, FrozenOrderedSet(constraints)
637
        )
638

639
    return ResolveConfig(
12✔
640
        indexes=python_repos.indexes,
641
        find_links=python_repos.find_links,
642
        manylinux=python_setup.manylinux,
643
        constraints_file=constraints_file,
644
        no_binary=FrozenOrderedSet(no_binary),
645
        only_binary=FrozenOrderedSet(only_binary),
646
        excludes=FrozenOrderedSet(excludes),
647
        overrides=FrozenOrderedSet(overrides),
648
        sources=FrozenOrderedSet(sources),
649
        path_mappings=python_repos.path_mappings,
650
        lock_style=lock_style,
651
        complete_platforms=complete_platforms,
652
        uploaded_prior_to=uploaded_prior_to,
653
    )
654

655

656
def validate_metadata(
12✔
657
    metadata: PythonLockfileMetadata,
658
    interpreter_constraints: InterpreterConstraints,
659
    lockfile: Lockfile,
660
    consumed_req_strings: Iterable[str],
661
    validate_consumed_req_strings: bool,
662
    python_setup: PythonSetup,
663
    resolve_config: ResolveConfig,
664
) -> None:
665
    """Given interpreter constraints and requirements to be consumed, validate lockfile metadata."""
666

667
    # TODO(#12314): Improve the exception if invalid strings
668
    user_requirements = [PipRequirement.parse(i) for i in consumed_req_strings]
2✔
669
    validation = metadata.is_valid_for(
2✔
670
        expected_invalidation_digest=lockfile.lockfile_hex_digest,
671
        user_interpreter_constraints=interpreter_constraints,
672
        interpreter_universe=python_setup.interpreter_versions_universe,
673
        user_requirements=user_requirements if validate_consumed_req_strings else {},
674
        manylinux=resolve_config.manylinux,
675
        requirement_constraints=(
676
            resolve_config.constraints_file.constraints
677
            if resolve_config.constraints_file
678
            else set()
679
        ),
680
        only_binary=resolve_config.only_binary,
681
        no_binary=resolve_config.no_binary,
682
        excludes=resolve_config.excludes,
683
        overrides=resolve_config.overrides,
684
        sources=resolve_config.sources,
685
        lock_style=resolve_config.lock_style,
686
        complete_platforms=resolve_config.complete_platforms,
687
        uploaded_prior_to=resolve_config.uploaded_prior_to,
688
    )
689
    if validation:
2✔
UNCOV
690
        return
1✔
691

692
    error_msg_kwargs = dict(
2✔
693
        metadata=metadata,
694
        validation=validation,
695
        lockfile=lockfile,
696
        is_default_user_lockfile=lockfile.resolve_name == python_setup.default_resolve,
697
        user_interpreter_constraints=interpreter_constraints,
698
        user_requirements=user_requirements,
699
        maybe_constraints_file_path=(
700
            resolve_config.constraints_file.path if resolve_config.constraints_file else None
701
        ),
702
    )
703
    msg_iter = _invalid_lockfile_error(**error_msg_kwargs)  # type: ignore[arg-type]
2✔
704
    msg = "".join(msg_iter).strip()
2✔
705
    if python_setup.invalid_lockfile_behavior == InvalidLockfileBehavior.error:
2✔
UNCOV
706
        raise InvalidLockfileError(msg)
1✔
707
    logger.warning(msg)
1✔
708

709

710
def _common_failure_reasons(
12✔
711
    failure_reasons: set[InvalidPythonLockfileReason], maybe_constraints_file_path: str | None
712
) -> Iterator[str]:
713
    if InvalidPythonLockfileReason.CONSTRAINTS_FILE_MISMATCH in failure_reasons:
2✔
714
        if maybe_constraints_file_path is None:
1✔
715
            yield softwrap(
×
716
                """
717
                - Constraint file expected from lockfile metadata but no
718
                constraints file configured.  See the option
719
                `[python].resolves_to_constraints_file`.
720
                """
721
            )
722
        else:
723
            yield softwrap(
1✔
724
                f"""
725
                - The constraints file at {maybe_constraints_file_path} has changed from when the
726
                lockfile was generated. (Constraints files are set via the option
727
                `[python].resolves_to_constraints_file`)
728
                """
729
            )
730
    if InvalidPythonLockfileReason.ONLY_BINARY_MISMATCH in failure_reasons:
2✔
731
        yield softwrap(
1✔
732
            """
733
            - The `only_binary` arguments have changed from when the lockfile was generated.
734
            (`only_binary` is set via the options `[python].resolves_to_only_binary` and deprecated
735
            `[python].only_binary`)
736
            """
737
        )
738
    if InvalidPythonLockfileReason.NO_BINARY_MISMATCH in failure_reasons:
2✔
739
        yield softwrap(
1✔
740
            """
741
            - The `no_binary` arguments have changed from when the lockfile was generated.
742
            (`no_binary` is set via the options `[python].resolves_to_no_binary` and deprecated
743
            `[python].no_binary`)
744
            """
745
        )
746
    if InvalidPythonLockfileReason.MANYLINUX_MISMATCH in failure_reasons:
2✔
747
        yield softwrap(
2✔
748
            """
749
            - The `manylinux` argument has changed from when the lockfile was generated.
750
            (manylinux is set via the option `[python].resolver_manylinux`)
751
            """
752
        )
753
    if InvalidPythonLockfileReason.UPLOADED_PRIOR_TO_MISMATCH in failure_reasons:
2✔
754
        yield softwrap(
×
755
            """
756
            - The `uploaded_prior_to` argument has changed from when the lockfile was generated.
757
            (uploaded_prior_to is set via the option `[python].resolves_to_uploaded_prior_to`)
758
            """
759
        )
760

761

762
def _invalid_lockfile_error(
12✔
763
    metadata: PythonLockfileMetadata,
764
    validation: LockfileMetadataValidation,
765
    lockfile: Lockfile,
766
    *,
767
    is_default_user_lockfile: bool,
768
    user_requirements: list[PipRequirement],
769
    user_interpreter_constraints: InterpreterConstraints,
770
    maybe_constraints_file_path: str | None,
771
) -> Iterator[str]:
772
    resolve = lockfile.resolve_name
2✔
773
    consumed_msg_parts = [f"`{str(r)}`" for r in user_requirements[0:2]]
2✔
774
    if len(user_requirements) > 2:
2✔
775
        consumed_msg_parts.append(
×
776
            f"{len(user_requirements) - 2} other "
777
            f"{pluralize(len(user_requirements) - 2, 'requirement', include_count=False)}"
778
        )
779

780
    yield f"\n\nYou are consuming {comma_separated_list(consumed_msg_parts)} from "
2✔
781
    if lockfile.url.startswith("resource://"):
2✔
782
        yield f"the built-in `{resolve}` lockfile provided by Pants "
×
783
    else:
784
        yield f"the `{resolve}` lockfile at {lockfile.url} "
2✔
785
    yield "with incompatible inputs.\n\n"
2✔
786

787
    if any(
2✔
788
        i
789
        in (
790
            InvalidPythonLockfileReason.INVALIDATION_DIGEST_MISMATCH,
791
            InvalidPythonLockfileReason.REQUIREMENTS_MISMATCH,
792
        )
793
        for i in validation.failure_reasons
794
    ):
795
        yield (
2✔
796
            softwrap(
797
                """
798
            - The lockfile does not provide all the necessary requirements. You must
799
            modify the input requirements and/or regenerate the lockfile (see below).
800
            """
801
            )
802
            + "\n\n"
803
        )
804
        if is_default_user_lockfile:
2✔
805
            yield softwrap(
1✔
806
                f"""
807
                - The necessary requirements are specified by requirements targets marked with
808
                `resolve="{resolve}"`, or those with no explicit resolve (since `{resolve}` is the
809
                default for this repo).
810

811
                - The lockfile destination is specified by the `{resolve}` key in `[python].resolves`.
812
                """
813
            )
814
        else:
UNCOV
815
            yield softwrap(
1✔
816
                f"""
817
                - The necessary requirements are specified by requirements targets marked with
818
                `resolve="{resolve}"`.
819

820
                - The lockfile destination is specified by the `{resolve}` key in
821
                `[python].resolves`.
822
                """
823
            )
824

825
        if isinstance(metadata, PythonLockfileMetadataV2):
2✔
826
            # Note that by the time we have gotten to this error message, we should have already
827
            # validated that the transitive closure is using the same resolve, via
828
            # pex_from_targets.py. This implies that we don't need to worry about users depending
829
            # on python_requirement targets that aren't in that code's resolve.
830
            not_in_lock = sorted(str(r) for r in set(user_requirements) - metadata.requirements)
2✔
831
            yield f"\n\n- The requirements not provided by the `{resolve}` resolve are:\n  "
2✔
832
            yield str(not_in_lock)
2✔
833

834
    if InvalidPythonLockfileReason.INTERPRETER_CONSTRAINTS_MISMATCH in validation.failure_reasons:
2✔
835
        yield "\n\n"
2✔
836
        yield softwrap(
2✔
837
            f"""
838
            - The inputs use interpreter constraints (`{user_interpreter_constraints}`) that
839
            are not a subset of those used to generate the lockfile
840
            (`{metadata.valid_for_interpreter_constraints}`).
841

842
            - The input interpreter constraints are specified by your code, using
843
            the `[python].interpreter_constraints` option and the `interpreter_constraints`
844
            target field.
845

846
            - To create a lockfile with new interpreter constraints, update the option
847
            `[python].resolves_to_interpreter_constraints`, and then generate the lockfile
848
            (see below).
849
            """
850
        )
851
        yield f"\n\nSee {doc_url('docs/python/overview/interpreter-compatibility')} for details."
2✔
852

853
    yield "\n\n"
2✔
854
    yield from (
2✔
855
        f"{fail}\n"
856
        for fail in _common_failure_reasons(validation.failure_reasons, maybe_constraints_file_path)
857
    )
858
    yield "To regenerate your lockfile, "
2✔
859
    yield f"run `{bin_name()} generate-lockfiles --resolve={resolve}`."
2✔
860
    yield f"\n\nSee {doc_url('docs/python/overview/third-party-dependencies')} for details.\n\n"
2✔
861

862

863
def rules():
12✔
864
    return collect_rules()
12✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc