• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 18143316655

30 Sep 2025 09:00PM UTC coverage: 80.263% (-0.01%) from 80.275%
18143316655

push

github

web-flow
Write Python lockfile metadata to separate files (#22713)

Currently we tack it on as a header to the lockfile, which
makes the lockfile unusable when working directly with Pex
without first manually editing it to remove the header.

Instead, we now (optionally) write to a separate metadata
sibling file. 

We always unconditionally try and read the metadata file,
falling back to the header if it doesn't exist. This will
allow us to regenerate the embedded lockfiles without
worrying about whether the user has the new metadata
files enabled.

42 of 87 new or added lines in 7 files covered. (48.28%)

1 existing line in 1 file now uncovered.

77226 of 96216 relevant lines covered (80.26%)

3.37 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

72.2
/src/python/pants/backend/python/util_rules/pex_requirements.py
1
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3

4
from __future__ import annotations
12✔
5

6
import importlib.resources
12✔
7
import json
12✔
8
import logging
12✔
9
from collections.abc import Iterable, Iterator
12✔
10
from dataclasses import dataclass, field
12✔
11
from typing import TYPE_CHECKING
12✔
12
from urllib.parse import urlparse
12✔
13

14
from pants.backend.python.subsystems.repos import PythonRepos
12✔
15
from pants.backend.python.subsystems.setup import InvalidLockfileBehavior, PythonSetup
12✔
16
from pants.backend.python.target_types import PythonRequirementsField
12✔
17
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
12✔
18
from pants.backend.python.util_rules.lockfile_metadata import (
12✔
19
    InvalidPythonLockfileReason,
20
    PythonLockfileMetadata,
21
    PythonLockfileMetadataV2,
22
)
23
from pants.build_graph.address import Address
12✔
24
from pants.core.util_rules.lockfile_metadata import (
12✔
25
    InvalidLockfileError,
26
    LockfileMetadataValidation,
27
    NoLockfileMetadataBlock,
28
)
29
from pants.engine.engine_aware import EngineAwareParameter
12✔
30
from pants.engine.fs import CreateDigest, Digest, FileContent, GlobMatchErrorBehavior, PathGlobs
12✔
31
from pants.engine.internals.native_engine import IntrinsicError
12✔
32
from pants.engine.intrinsics import (
12✔
33
    create_digest,
34
    get_digest_contents,
35
    get_digest_entries,
36
    path_globs_to_digest,
37
)
38
from pants.engine.rules import collect_rules, concurrently, implicitly, rule
12✔
39
from pants.engine.unions import UnionMembership
12✔
40
from pants.util.docutil import bin_name, doc_url
12✔
41
from pants.util.ordered_set import FrozenOrderedSet
12✔
42
from pants.util.pip_requirement import PipRequirement
12✔
43
from pants.util.requirements import parse_requirements_file
12✔
44
from pants.util.strutil import comma_separated_list, pluralize, softwrap
12✔
45

46
if TYPE_CHECKING:
47
    from pants.backend.python.util_rules.pex import Pex
48

49

50
logger = logging.getLogger(__name__)
12✔
51

52

53
@dataclass(frozen=True)
12✔
54
class Resolve:
12✔
55
    # A named resolve for a "user lockfile".
56
    # Soon to be the only kind of lockfile, as this class will help
57
    # get rid of the "tool lockfile" concept.
58
    # TODO: Once we get rid of old-style tool lockfiles we can possibly
59
    #  unify this with EntireLockfile.
60
    # TODO: We might want to add the requirements subset to this data structure,
61
    #  to further detangle this from PexRequirements.
62
    name: str
12✔
63

64
    use_entire_lockfile: bool
12✔
65

66

67
@dataclass(frozen=True)
12✔
68
class Lockfile:
12✔
69
    url: str
12✔
70
    url_description_of_origin: str
12✔
71
    resolve_name: str
12✔
72
    lockfile_hex_digest: str | None = None
12✔
73

74

75
@rule
12✔
76
async def get_lockfile_for_resolve(resolve: Resolve, python_setup: PythonSetup) -> Lockfile:
12✔
77
    lockfile_path = python_setup.resolves.get(resolve.name)
×
78
    if not lockfile_path:
×
79
        raise ValueError(f"No such resolve: {resolve.name}")
×
80
    return Lockfile(
×
81
        url=lockfile_path,
82
        url_description_of_origin=f"the resolve `{resolve.name}`",
83
        resolve_name=resolve.name,
84
    )
85

86

87
@dataclass(frozen=True)
12✔
88
class LoadedLockfile:
12✔
89
    """A lockfile after loading and header stripping.
90

91
    Validation is deferred until consumption time, because each consumed subset (in the case of a
92
    PEX-native lockfile) can be individually validated.
93
    """
94

95
    # The digest of the loaded lockfile (which may not be identical to the input).
96
    lockfile_digest: Digest
12✔
97
    # The path of the loaded lockfile within the Digest.
98
    lockfile_path: str
12✔
99
    # The loaded metadata for this lockfile, if any.
100
    metadata: PythonLockfileMetadata | None = field(hash=False)
12✔
101
    # An estimate of the number of requirements in this lockfile, to be used as a heuristic for
102
    # available parallelism.
103
    requirement_estimate: int
12✔
104
    # True if the loaded lockfile is in PEX's native format.
105
    is_pex_native: bool
12✔
106
    # If !is_pex_native, the lockfile parsed as constraints strings, for use when the lockfile
107
    # needs to be subsetted (see #15031, ##12222).
108
    as_constraints_strings: FrozenOrderedSet[str] | None
12✔
109
    # The original file or file content (which may not have identical content to the output
110
    # `lockfile_digest`).
111
    original_lockfile: Lockfile
12✔
112

113

114
@dataclass(frozen=True)
12✔
115
class LoadedLockfileRequest:
12✔
116
    """A request to load and validate the content of the given lockfile."""
117

118
    lockfile: Lockfile
12✔
119

120

121
def strip_comments_from_pex_json_lockfile(lockfile_bytes: bytes) -> bytes:
12✔
122
    """Pex does not like the header Pants adds to lockfiles, as it violates JSON.
123

124
    Note that we only strip lines starting with `//`, which is all that Pants will ever add. If
125
    users add their own comments, things will fail.
126
    """
127
    return b"\n".join(
12✔
128
        line for line in lockfile_bytes.splitlines() if not line.lstrip().startswith(b"//")
129
    )
130

131

132
def is_probably_pex_json_lockfile(lockfile_bytes: bytes) -> bool:
12✔
133
    for line in lockfile_bytes.splitlines():
1✔
134
        if line and not line.startswith(b"//"):
1✔
135
            # Note that pip/Pex complain if a requirements.txt style starts with `{`.
136
            return line.lstrip().startswith(b"{")
1✔
137
    return False
1✔
138

139

140
def _pex_lockfile_requirement_count(lockfile_bytes: bytes) -> int:
12✔
141
    # TODO: this is a very naive heuristic that will overcount, and also relies on Pants
142
    #  setting `--indent` when generating lockfiles. More robust would be parsing the JSON
143
    #  and getting the len(locked_resolves.locked_requirements.project_name), but we risk
144
    #  if Pex ever changes its lockfile format.
145

146
    num_lines = len(lockfile_bytes.splitlines())
1✔
147
    # These are very naive estimates, and they bias towards overcounting. For example, requirements
148
    # often are 20+ lines.
149
    num_lines_for_options = 10
1✔
150
    lines_per_req = 10
1✔
151
    return max((num_lines - num_lines_for_options) // lines_per_req, 2)
1✔
152

153

154
def get_metadata(
12✔
155
    python_setup: PythonSetup,
156
    lock_bytes: bytes,
157
    lockfile_path: str | None,
158
    resolve_name: str,
159
    delimiter: str,
160
) -> PythonLockfileMetadata | None:
161
    metadata: PythonLockfileMetadata | None = None
1✔
162
    if python_setup.invalid_lockfile_behavior != InvalidLockfileBehavior.ignore:
1✔
163
        try:
1✔
164
            metadata = PythonLockfileMetadata.from_lockfile(
1✔
165
                lockfile=lock_bytes,
166
                lockfile_path=lockfile_path,
167
                resolve_name=resolve_name,
168
                delimeter=delimiter,
169
            )
170
        except NoLockfileMetadataBlock:
1✔
171
            # We don't validate if the file isn't a pants-generated lockfile (as determined
172
            # by the lack of a metadata block). But we propagate any other type of
173
            # InvalidLockfileError incurred while parsing the metadata block.
174
            logger.debug(
1✔
175
                f"Lockfile for resolve {resolve_name} "
176
                f"{('at ' + lockfile_path) if lockfile_path else ''}"
177
                f" has no metadata block, so was not generated by Pants. "
178
                f"Lockfile will not be validated."
179
            )
180
    return metadata
1✔
181

182

183
async def read_file_or_resource(url: str, description_of_origin: str) -> Digest:
12✔
184
    """Read from a path, file:// or resource:// URL and return the digest of the content.
185

186
    If no content is found at the path/URL, raise.
187
    """
NEW
188
    parts = urlparse(url)
×
189
    # urlparse retains the leading / in URLs with a netloc.
NEW
190
    path = parts.path[1:] if parts.path.startswith("/") else parts.path
×
191
    if parts.scheme in {"", "file"}:
×
NEW
192
        digest = await path_globs_to_digest(
×
193
            PathGlobs(
194
                [path],
195
                glob_match_error_behavior=GlobMatchErrorBehavior.error,
196
                description_of_origin=description_of_origin,
197
            )
198
        )
199
    elif parts.scheme == "resource":
×
200
        _fc = FileContent(
×
201
            path,
202
            # The "netloc" in our made-up "resource://" scheme is the package.
203
            importlib.resources.files(parts.netloc).joinpath(path).read_bytes(),
204
        )
NEW
205
        digest = await create_digest(CreateDigest([_fc]))
×
206
    else:
207
        raise ValueError(
×
208
            f"Unsupported scheme {parts.scheme} for URL: {url} (origin: {description_of_origin})"
209
        )
NEW
210
    return digest
×
211

212

213
@rule
12✔
214
async def load_lockfile(
12✔
215
    request: LoadedLockfileRequest,
216
    python_setup: PythonSetup,
217
) -> LoadedLockfile:
NEW
218
    lockfile = request.lockfile
×
219
    # TODO: This is temporary. Once we regenerate all embedded lockfiles to have sidecar metadata
220
    #  files instead of metadata front matter, we won't need to call get_metadata() on them.
NEW
221
    synthetic_lock = lockfile.url.startswith("resource://")
×
NEW
222
    lockfile_digest = await read_file_or_resource(lockfile.url, lockfile.url_description_of_origin)
×
NEW
223
    lockfile_digest_entries = await get_digest_entries(lockfile_digest)
×
NEW
224
    lockfile_path = lockfile_digest_entries[0].path
×
225

NEW
226
    lockfile_contents = await get_digest_contents(lockfile_digest)
×
NEW
227
    lock_bytes = lockfile_contents[0].content
×
228
    is_pex_native = is_probably_pex_json_lockfile(lock_bytes)
×
NEW
229
    constraints_strings = None
×
230

NEW
231
    metadata_url = PythonLockfileMetadata.metadata_location_for_lockfile(lockfile.url)
×
NEW
232
    metadata = None
×
NEW
233
    try:
×
NEW
234
        metadata_digest = await read_file_or_resource(
×
235
            metadata_url,
236
            description_of_origin="We squelch errors, so this is never seen by users",
237
        )
NEW
238
        digest_contents = await get_digest_contents(metadata_digest)
×
NEW
239
        metadata_bytes = digest_contents[0].content
×
NEW
240
        json_dict = json.loads(metadata_bytes)
×
NEW
241
        metadata = PythonLockfileMetadata.from_json_dict(
×
242
            json_dict,
243
            lockfile_description=f"the lockfile for `{lockfile.resolve_name}`",
244
            error_suffix=softwrap(
245
                f"""
246
                To resolve this error, you will need to regenerate the lockfile by running
247
                `{bin_name()} generate-lockfiles --resolve={lockfile.resolve_name}.
248
                """
249
            ),
250
        )
NEW
251
        requirement_estimate = _pex_lockfile_requirement_count(lock_bytes)
×
NEW
252
    except (IntrinsicError, FileNotFoundError):
×
253
        # No metadata file or resource found, so fall through to finding a metadata
254
        # header block prepended to the lockfile itself.
NEW
255
        pass
×
256

NEW
257
    if not metadata:
×
NEW
258
        if is_pex_native:
×
NEW
259
            header_delimiter = "//"
×
NEW
260
            stripped_lock_bytes = strip_comments_from_pex_json_lockfile(lock_bytes)
×
NEW
261
            lockfile_digest = await create_digest(
×
262
                CreateDigest([FileContent(lockfile_path, stripped_lock_bytes)])
263
            )
NEW
264
            requirement_estimate = _pex_lockfile_requirement_count(lock_bytes)
×
265
        else:
NEW
266
            header_delimiter = "#"
×
NEW
267
            lock_string = lock_bytes.decode()
×
268
            # Note: this is a very naive heuristic. It will overcount because entries often
269
            # have >1 line due to `--hash`.
NEW
270
            requirement_estimate = len(lock_string.splitlines())
×
NEW
271
            constraints_strings = FrozenOrderedSet(
×
272
                str(req) for req in parse_requirements_file(lock_string, rel_path=lockfile_path)
273
            )
274

NEW
275
        metadata = get_metadata(
×
276
            python_setup,
277
            lock_bytes,
278
            None if synthetic_lock else lockfile_path,
279
            lockfile.resolve_name,
280
            header_delimiter,
281
        )
282

283
    return LoadedLockfile(
×
284
        lockfile_digest,
285
        lockfile_path,
286
        metadata,
287
        requirement_estimate,
288
        is_pex_native,
289
        constraints_strings,
290
        original_lockfile=lockfile,
291
    )
292

293

294
@dataclass(frozen=True)
12✔
295
class EntireLockfile:
12✔
296
    """A request to resolve the entire contents of a lockfile.
297

298
    This resolution mode is used in a few cases:
299
    1. for poetry or handwritten lockfiles (which do not support being natively subsetted the
300
       way that a PEX lockfile can be), in order to build a repository-PEX to subset separately.
301
    2. for tool lockfiles, which (regardless of format), need to resolve the entire lockfile
302
       content anyway.
303
    """
304

305
    lockfile: Lockfile
12✔
306
    # If available, the current complete set of requirement strings that influence this lockfile.
307
    # Used for metadata validation.
308
    complete_req_strings: tuple[str, ...] | None = None
12✔
309

310

311
@dataclass(frozen=True)
12✔
312
class PexRequirements:
12✔
313
    """A request to resolve a series of requirements (optionally from a "superset" resolve)."""
314

315
    req_strings_or_addrs: FrozenOrderedSet[str | Address]
12✔
316
    constraints_strings: FrozenOrderedSet[str]
12✔
317
    # If these requirements should be resolved as a subset of either a repository PEX, or a
318
    # PEX-native lockfile, the superset to use. # NB: Use of a lockfile here asserts that the
319
    # lockfile is PEX-native, because legacy lockfiles do not support subset resolves.
320
    from_superset: Pex | Resolve | None
12✔
321
    description_of_origin: str
12✔
322

323
    def __init__(
12✔
324
        self,
325
        req_strings_or_addrs: Iterable[str | Address] = (),
326
        *,
327
        constraints_strings: Iterable[str] = (),
328
        from_superset: Pex | Resolve | None = None,
329
        description_of_origin: str = "",
330
    ) -> None:
331
        """
332
        :param req_strings_or_addrs: The requirement strings to resolve, or addresses
333
          of targets that refer to them, or string specs of such addresses.
334
        :param constraints_strings: Constraints strings to apply during the resolve.
335
        :param from_superset: An optional superset PEX or lockfile to resolve the req strings from.
336
        :param description_of_origin: A human-readable description of what these requirements
337
          represent, for use in error messages.
338
        """
339
        object.__setattr__(
12✔
340
            self, "req_strings_or_addrs", FrozenOrderedSet(sorted(req_strings_or_addrs))
341
        )
342
        object.__setattr__(
12✔
343
            self, "constraints_strings", FrozenOrderedSet(sorted(constraints_strings))
344
        )
345
        object.__setattr__(self, "from_superset", from_superset)
12✔
346
        object.__setattr__(self, "description_of_origin", description_of_origin)
12✔
347

348
    @classmethod
12✔
349
    def req_strings_from_requirement_fields(
12✔
350
        cls, fields: Iterable[PythonRequirementsField]
351
    ) -> FrozenOrderedSet[str]:
352
        """A convenience when you only need the raw requirement strings from fields and don't need
353
        to consider things like constraints or resolves."""
354
        return FrozenOrderedSet(
×
355
            sorted(str(python_req) for fld in fields for python_req in fld.value)
356
        )
357

358
    def __bool__(self) -> bool:
12✔
359
        return bool(self.req_strings_or_addrs)
×
360

361

362
@dataclass(frozen=True)
12✔
363
class ResolvePexConstraintsFile:
12✔
364
    digest: Digest
12✔
365
    path: str
12✔
366
    constraints: FrozenOrderedSet[PipRequirement]
12✔
367

368

369
@dataclass(frozen=True)
12✔
370
class ResolvePexConfig:
12✔
371
    """Configuration from `[python]` that impacts how the resolve is created."""
372

373
    indexes: tuple[str, ...]
12✔
374
    find_links: tuple[str, ...]
12✔
375
    manylinux: str | None
12✔
376
    constraints_file: ResolvePexConstraintsFile | None
12✔
377
    only_binary: FrozenOrderedSet[str]
12✔
378
    no_binary: FrozenOrderedSet[str]
12✔
379
    excludes: FrozenOrderedSet[str]
12✔
380
    overrides: FrozenOrderedSet[str]
12✔
381
    path_mappings: tuple[str, ...]
12✔
382

383
    def pex_args(self) -> Iterator[str]:
12✔
384
        """Arguments for Pex for indexes/--find-links, manylinux, and path mappings.
385

386
        Does not include arguments for constraints files, which must be set up independently.
387
        """
388
        # NB: In setting `--no-pypi`, we rely on the default value of `[python-repos].indexes`
389
        # including PyPI, which will override `--no-pypi` and result in using PyPI in the default
390
        # case. Why set `--no-pypi`, then? We need to do this so that
391
        # `[python-repos].indexes = ['custom_url']` will only point to that index and not include
392
        # PyPI.
393
        yield "--no-pypi"
2✔
394
        yield from (f"--index={index}" for index in self.indexes)
2✔
395
        yield from (f"--find-links={repo}" for repo in self.find_links)
2✔
396

397
        if self.manylinux:
2✔
398
            yield "--manylinux"
1✔
399
            yield self.manylinux
1✔
400
        else:
401
            yield "--no-manylinux"
2✔
402

403
        # Pex logically plumbs through equivalent settings, but uses a
404
        # separate flag instead of the Pip magic :all:/:none: syntax.  To
405
        # support the exitings Pants config settings we need to go from
406
        # :all:/:none: --> Pex options, which Pex will translate back into Pip
407
        # options.  Note that Pex's --wheel (for example) means "allow
408
        # wheels", not "require wheels".
409
        if self.only_binary and ":all:" in self.only_binary:
2✔
410
            yield "--wheel"
1✔
411
            yield "--no-build"
1✔
412
        elif self.only_binary and ":none:" in self.only_binary:
2✔
413
            yield "--no-wheel"
1✔
414
            yield "--build"
1✔
415
        elif self.only_binary:
2✔
416
            yield from (f"--only-binary={pkg}" for pkg in self.only_binary)
1✔
417

418
        if self.no_binary and ":all:" in self.no_binary:
2✔
419
            yield "--no-wheel"
1✔
420
            yield "--build"
1✔
421
        elif self.no_binary and ":none:" in self.no_binary:
2✔
422
            yield "--wheel"
1✔
423
            yield "--no-build"
1✔
424
        elif self.no_binary:
2✔
425
            yield from (f"--only-build={pkg}" for pkg in self.no_binary)
1✔
426

427
        yield from (f"--path-mapping={v}" for v in self.path_mappings)
2✔
428

429
        yield from (f"--exclude={exclude}" for exclude in self.excludes)
2✔
430
        yield from (f"--override={override}" for override in self.overrides)
2✔
431

432

433
@dataclass(frozen=True)
12✔
434
class ResolvePexConfigRequest(EngineAwareParameter):
12✔
435
    """Find all configuration from `[python]` that impacts how the resolve is created.
436

437
    If `resolve_name` is None, then most per-resolve options will be ignored because there is no way
438
    for users to configure them. However, some options like `[python-repos].indexes` will still be
439
    loaded.
440
    """
441

442
    resolve_name: str | None
12✔
443

444
    def debug_hint(self) -> str:
12✔
445
        return self.resolve_name or "<no resolve>"
×
446

447

448
@rule
12✔
449
async def determine_resolve_pex_config(
12✔
450
    request: ResolvePexConfigRequest,
451
    python_setup: PythonSetup,
452
    python_repos: PythonRepos,
453
    union_membership: UnionMembership,
454
) -> ResolvePexConfig:
455
    if request.resolve_name is None:
×
456
        return ResolvePexConfig(
×
457
            indexes=python_repos.indexes,
458
            find_links=python_repos.find_links,
459
            manylinux=python_setup.manylinux,
460
            constraints_file=None,
461
            no_binary=FrozenOrderedSet(),
462
            only_binary=FrozenOrderedSet(),
463
            excludes=FrozenOrderedSet(),
464
            overrides=FrozenOrderedSet(),
465
            path_mappings=python_repos.path_mappings,
466
        )
467

468
    no_binary = python_setup.resolves_to_no_binary().get(request.resolve_name) or []
×
469
    only_binary = python_setup.resolves_to_only_binary().get(request.resolve_name) or []
×
470
    excludes = python_setup.resolves_to_excludes().get(request.resolve_name) or []
×
471
    overrides = python_setup.resolves_to_overrides().get(request.resolve_name) or []
×
472

473
    constraints_file: ResolvePexConstraintsFile | None = None
×
474
    _constraints_file_path = python_setup.resolves_to_constraints_file().get(request.resolve_name)
×
475
    if _constraints_file_path:
×
476
        _constraints_origin = softwrap(
×
477
            f"""
478
            the option `[python].resolves_to_constraints_file` for the resolve
479
            '{request.resolve_name}'
480
            """
481
        )
482
        _constraints_path_globs = PathGlobs(
×
483
            [_constraints_file_path] if _constraints_file_path else [],
484
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
485
            description_of_origin=_constraints_origin,
486
        )
487
        # TODO: Probably re-doing work here - instead of just calling one, then the next
488
        _constraints_digest, _constraints_digest_contents = await concurrently(
×
489
            path_globs_to_digest(_constraints_path_globs),
490
            get_digest_contents(**implicitly({_constraints_path_globs: PathGlobs})),
491
        )
492

493
        if len(_constraints_digest_contents) != 1:
×
494
            raise ValueError(
×
495
                softwrap(
496
                    f"""
497
                    Expected only one file from {_constraints_origin}, but matched:
498
                    {sorted(fc.path for fc in _constraints_digest_contents)}
499

500
                    Did you use a glob like `*`?
501
                    """
502
                )
503
            )
504
        _constraints_file_content = next(iter(_constraints_digest_contents))
×
505
        constraints = parse_requirements_file(
×
506
            _constraints_file_content.content.decode("utf-8"), rel_path=_constraints_file_path
507
        )
508
        constraints_file = ResolvePexConstraintsFile(
×
509
            _constraints_digest, _constraints_file_path, FrozenOrderedSet(constraints)
510
        )
511

512
    return ResolvePexConfig(
×
513
        indexes=python_repos.indexes,
514
        find_links=python_repos.find_links,
515
        manylinux=python_setup.manylinux,
516
        constraints_file=constraints_file,
517
        no_binary=FrozenOrderedSet(no_binary),
518
        only_binary=FrozenOrderedSet(only_binary),
519
        excludes=FrozenOrderedSet(excludes),
520
        overrides=FrozenOrderedSet(overrides),
521
        path_mappings=python_repos.path_mappings,
522
    )
523

524

525
def validate_metadata(
12✔
526
    metadata: PythonLockfileMetadata,
527
    interpreter_constraints: InterpreterConstraints,
528
    lockfile: Lockfile,
529
    consumed_req_strings: Iterable[str],
530
    validate_consumed_req_strings: bool,
531
    python_setup: PythonSetup,
532
    resolve_config: ResolvePexConfig,
533
) -> None:
534
    """Given interpreter constraints and requirements to be consumed, validate lockfile metadata."""
535

536
    # TODO(#12314): Improve the exception if invalid strings
537
    user_requirements = [PipRequirement.parse(i) for i in consumed_req_strings]
1✔
538
    validation = metadata.is_valid_for(
1✔
539
        expected_invalidation_digest=lockfile.lockfile_hex_digest,
540
        user_interpreter_constraints=interpreter_constraints,
541
        interpreter_universe=python_setup.interpreter_versions_universe,
542
        user_requirements=user_requirements if validate_consumed_req_strings else {},
543
        manylinux=resolve_config.manylinux,
544
        requirement_constraints=(
545
            resolve_config.constraints_file.constraints
546
            if resolve_config.constraints_file
547
            else set()
548
        ),
549
        only_binary=resolve_config.only_binary,
550
        no_binary=resolve_config.no_binary,
551
        excludes=resolve_config.excludes,
552
        overrides=resolve_config.overrides,
553
    )
554
    if validation:
1✔
555
        return
×
556

557
    error_msg_kwargs = dict(
1✔
558
        metadata=metadata,
559
        validation=validation,
560
        lockfile=lockfile,
561
        is_default_user_lockfile=lockfile.resolve_name == python_setup.default_resolve,
562
        user_interpreter_constraints=interpreter_constraints,
563
        user_requirements=user_requirements,
564
        maybe_constraints_file_path=(
565
            resolve_config.constraints_file.path if resolve_config.constraints_file else None
566
        ),
567
    )
568
    msg_iter = _invalid_lockfile_error(**error_msg_kwargs)  # type: ignore[arg-type]
1✔
569
    msg = "".join(msg_iter).strip()
1✔
570
    if python_setup.invalid_lockfile_behavior == InvalidLockfileBehavior.error:
1✔
571
        raise InvalidLockfileError(msg)
×
572
    logger.warning(msg)
1✔
573

574

575
def _common_failure_reasons(
12✔
576
    failure_reasons: set[InvalidPythonLockfileReason], maybe_constraints_file_path: str | None
577
) -> Iterator[str]:
578
    if InvalidPythonLockfileReason.CONSTRAINTS_FILE_MISMATCH in failure_reasons:
1✔
579
        if maybe_constraints_file_path is None:
1✔
580
            yield softwrap(
×
581
                """
582
                - Constraint file expected from lockfile metadata but no
583
                constraints file configured.  See the option
584
                `[python].resolves_to_constraints_file`.
585
                """
586
            )
587
        else:
588
            yield softwrap(
1✔
589
                f"""
590
                - The constraints file at {maybe_constraints_file_path} has changed from when the
591
                lockfile was generated. (Constraints files are set via the option
592
                `[python].resolves_to_constraints_file`)
593
                """
594
            )
595
    if InvalidPythonLockfileReason.ONLY_BINARY_MISMATCH in failure_reasons:
1✔
596
        yield softwrap(
1✔
597
            """
598
            - The `only_binary` arguments have changed from when the lockfile was generated.
599
            (`only_binary` is set via the options `[python].resolves_to_only_binary` and deprecated
600
            `[python].only_binary`)
601
            """
602
        )
603
    if InvalidPythonLockfileReason.NO_BINARY_MISMATCH in failure_reasons:
1✔
604
        yield softwrap(
1✔
605
            """
606
            - The `no_binary` arguments have changed from when the lockfile was generated.
607
            (`no_binary` is set via the options `[python].resolves_to_no_binary` and deprecated
608
            `[python].no_binary`)
609
            """
610
        )
611
    if InvalidPythonLockfileReason.MANYLINUX_MISMATCH in failure_reasons:
1✔
612
        yield softwrap(
1✔
613
            """
614
            - The `manylinux` argument has changed from when the lockfile was generated.
615
            (manylinux is set via the option `[python].resolver_manylinux`)
616
            """
617
        )
618

619

620
def _invalid_lockfile_error(
12✔
621
    metadata: PythonLockfileMetadata,
622
    validation: LockfileMetadataValidation,
623
    lockfile: Lockfile,
624
    *,
625
    is_default_user_lockfile: bool,
626
    user_requirements: list[PipRequirement],
627
    user_interpreter_constraints: InterpreterConstraints,
628
    maybe_constraints_file_path: str | None,
629
) -> Iterator[str]:
630
    resolve = lockfile.resolve_name
1✔
631
    consumed_msg_parts = [f"`{str(r)}`" for r in user_requirements[0:2]]
1✔
632
    if len(user_requirements) > 2:
1✔
633
        consumed_msg_parts.append(
×
634
            f"{len(user_requirements) - 2} other "
635
            f"{pluralize(len(user_requirements) - 2, 'requirement', include_count=False)}"
636
        )
637

638
    yield f"\n\nYou are consuming {comma_separated_list(consumed_msg_parts)} from "
1✔
639
    if lockfile.url.startswith("resource://"):
1✔
640
        yield f"the built-in `{resolve}` lockfile provided by Pants "
×
641
    else:
642
        yield f"the `{resolve}` lockfile at {lockfile.url} "
1✔
643
    yield "with incompatible inputs.\n\n"
1✔
644

645
    if any(
1✔
646
        i
647
        in (
648
            InvalidPythonLockfileReason.INVALIDATION_DIGEST_MISMATCH,
649
            InvalidPythonLockfileReason.REQUIREMENTS_MISMATCH,
650
        )
651
        for i in validation.failure_reasons
652
    ):
653
        yield (
1✔
654
            softwrap(
655
                """
656
            - The lockfile does not provide all the necessary requirements. You must
657
            modify the input requirements and/or regenerate the lockfile (see below).
658
            """
659
            )
660
            + "\n\n"
661
        )
662
        if is_default_user_lockfile:
1✔
663
            yield softwrap(
1✔
664
                f"""
665
                - The necessary requirements are specified by requirements targets marked with
666
                `resolve="{resolve}"`, or those with no explicit resolve (since `{resolve}` is the
667
                default for this repo).
668

669
                - The lockfile destination is specified by the `{resolve}` key in `[python].resolves`.
670
                """
671
            )
672
        else:
673
            yield softwrap(
×
674
                f"""
675
                - The necessary requirements are specified by requirements targets marked with
676
                `resolve="{resolve}"`.
677

678
                - The lockfile destination is specified by the `{resolve}` key in
679
                `[python].resolves`.
680
                """
681
            )
682

683
        if isinstance(metadata, PythonLockfileMetadataV2):
1✔
684
            # Note that by the time we have gotten to this error message, we should have already
685
            # validated that the transitive closure is using the same resolve, via
686
            # pex_from_targets.py. This implies that we don't need to worry about users depending
687
            # on python_requirement targets that aren't in that code's resolve.
688
            not_in_lock = sorted(str(r) for r in set(user_requirements) - metadata.requirements)
1✔
689
            yield f"\n\n- The requirements not provided by the `{resolve}` resolve are:\n  "
1✔
690
            yield str(not_in_lock)
1✔
691

692
    if InvalidPythonLockfileReason.INTERPRETER_CONSTRAINTS_MISMATCH in validation.failure_reasons:
1✔
693
        yield "\n\n"
1✔
694
        yield softwrap(
1✔
695
            f"""
696
            - The inputs use interpreter constraints (`{user_interpreter_constraints}`) that
697
            are not a subset of those used to generate the lockfile
698
            (`{metadata.valid_for_interpreter_constraints}`).
699

700
            - The input interpreter constraints are specified by your code, using
701
            the `[python].interpreter_constraints` option and the `interpreter_constraints`
702
            target field.
703

704
            - To create a lockfile with new interpreter constraints, update the option
705
            `[python].resolves_to_interpreter_constraints`, and then generate the lockfile
706
            (see below).
707
            """
708
        )
709
        yield f"\n\nSee {doc_url('docs/python/overview/interpreter-compatibility')} for details."
1✔
710

711
    yield "\n\n"
1✔
712
    yield from (
1✔
713
        f"{fail}\n"
714
        for fail in _common_failure_reasons(validation.failure_reasons, maybe_constraints_file_path)
715
    )
716
    yield "To regenerate your lockfile, "
1✔
717
    yield f"run `{bin_name()} generate-lockfiles --resolve={resolve}`."
1✔
718
    yield f"\n\nSee {doc_url('docs/python/overview/third-party-dependencies')} for details.\n\n"
1✔
719

720

721
def rules():
12✔
722
    return collect_rules()
12✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc