• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 24145945949

08 Apr 2026 04:14PM UTC coverage: 82.077% (-10.8%) from 92.91%
24145945949

Pull #23233

github

web-flow
Merge 089d98e3c into 9036734c9
Pull Request #23233: Introduce a LockfileFormat enum.

8 of 11 new or added lines in 4 files covered. (72.73%)

7635 existing lines in 306 files now uncovered.

63732 of 77649 relevant lines covered (82.08%)

2.96 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

73.91
/src/python/pants/backend/python/util_rules/pex_requirements.py
1
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3

4
from __future__ import annotations
7✔
5

6
import importlib.resources
7✔
7
import json
7✔
8
import logging
7✔
9
from collections.abc import Iterable, Iterator
7✔
10
from dataclasses import dataclass, field
7✔
11
from enum import Enum
7✔
12
from typing import TYPE_CHECKING
7✔
13
from urllib.parse import urlparse
7✔
14

15
from pants.backend.python.subsystems.repos import PythonRepos
7✔
16
from pants.backend.python.subsystems.setup import InvalidLockfileBehavior, PythonSetup
7✔
17
from pants.backend.python.target_types import PythonRequirementsField
7✔
18
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
7✔
19
from pants.backend.python.util_rules.lockfile_metadata import (
7✔
20
    InvalidPythonLockfileReason,
21
    PythonLockfileMetadata,
22
    PythonLockfileMetadataV2,
23
)
24
from pants.build_graph.address import Address
7✔
25
from pants.core.util_rules.lockfile_metadata import (
7✔
26
    InvalidLockfileError,
27
    LockfileMetadataValidation,
28
    NoLockfileMetadataBlock,
29
)
30
from pants.engine.engine_aware import EngineAwareParameter
7✔
31
from pants.engine.fs import CreateDigest, Digest, FileContent, GlobMatchErrorBehavior, PathGlobs
7✔
32
from pants.engine.internals.native_engine import IntrinsicError
7✔
33
from pants.engine.intrinsics import (
7✔
34
    create_digest,
35
    get_digest_contents,
36
    get_digest_entries,
37
    path_globs_to_digest,
38
)
39
from pants.engine.rules import collect_rules, concurrently, implicitly, rule
7✔
40
from pants.engine.unions import UnionMembership
7✔
41
from pants.util.docutil import bin_name, doc_url
7✔
42
from pants.util.ordered_set import FrozenOrderedSet
7✔
43
from pants.util.pip_requirement import PipRequirement
7✔
44
from pants.util.requirements import parse_requirements_file
7✔
45
from pants.util.strutil import comma_separated_list, pluralize, softwrap
7✔
46

47
if TYPE_CHECKING:
48
    from pants.backend.python.util_rules.pex import Pex
49

50

51
logger = logging.getLogger(__name__)
7✔
52

53

54
class LockfileFormat(Enum):
7✔
55
    Pex = "pex"
7✔
56
    # The very old, deprecated constraints-based "lockfile" that should
57
    # be removed entirely.
58
    ConstraintsDeprecated = "constraints_deprecated"
7✔
59

60

61
@dataclass(frozen=True)
7✔
62
class Resolve:
7✔
63
    # A named resolve for a "user lockfile".
64
    # Soon to be the only kind of lockfile, as this class will help
65
    # get rid of the "tool lockfile" concept.
66
    # TODO: Once we get rid of old-style tool lockfiles we can possibly
67
    #  unify this with EntireLockfile.
68
    # TODO: We might want to add the requirements subset to this data structure,
69
    #  to further detangle this from PexRequirements.
70
    name: str
7✔
71

72
    use_entire_lockfile: bool
7✔
73

74

75
@dataclass(frozen=True)
7✔
76
class Lockfile:
7✔
77
    url: str
7✔
78
    url_description_of_origin: str
7✔
79
    resolve_name: str
7✔
80
    lockfile_hex_digest: str | None = None
7✔
81

82

83
@rule
7✔
84
async def get_lockfile_for_resolve(resolve: Resolve, python_setup: PythonSetup) -> Lockfile:
7✔
85
    lockfile_path = python_setup.resolves.get(resolve.name)
2✔
86
    if not lockfile_path:
2✔
87
        raise ValueError(f"No such resolve: {resolve.name}")
×
88
    return Lockfile(
2✔
89
        url=lockfile_path,
90
        url_description_of_origin=f"the resolve `{resolve.name}`",
91
        resolve_name=resolve.name,
92
    )
93

94

95
@dataclass(frozen=True)
7✔
96
class LoadedLockfile:
7✔
97
    """A lockfile after loading and header stripping.
98

99
    Validation is deferred until consumption time, because each consumed subset (in the case of a
100
    PEX-native lockfile) can be individually validated.
101
    """
102

103
    # The digest of the loaded lockfile (which may not be identical to the input).
104
    lockfile_digest: Digest
7✔
105
    # The path of the loaded lockfile within the Digest.
106
    lockfile_path: str
7✔
107
    # The loaded metadata for this lockfile, if any.
108
    metadata: PythonLockfileMetadata | None = field(hash=False)
7✔
109
    # An estimate of the number of requirements in this lockfile, to be used as a heuristic for
110
    # available parallelism.
111
    requirement_estimate: int
7✔
112
    # The format of the loaded lockfile.
113
    lockfile_format: LockfileFormat
7✔
114
    # If lockfile_format is ConstraintsDeprecated, the lockfile parsed as constraints strings,
115
    # for use when the lockfile needs to be subsetted (see #15031, ##12222).
116
    as_constraints_strings: FrozenOrderedSet[str] | None
7✔
117
    # The original file or file content (which may not have identical content to the output
118
    # `lockfile_digest`).
119
    original_lockfile: Lockfile
7✔
120

121

122
@dataclass(frozen=True)
7✔
123
class LoadedLockfileRequest:
7✔
124
    """A request to load and validate the content of the given lockfile."""
125

126
    lockfile: Lockfile
7✔
127

128

129
def strip_comments_from_pex_json_lockfile(lockfile_bytes: bytes) -> bytes:
7✔
130
    """Pex does not like the header Pants adds to lockfiles, as it violates JSON.
131

132
    Note that we only strip lines starting with `//`, which is all that Pants will ever add. If
133
    users add their own comments, things will fail.
134
    """
135
    return b"\n".join(
7✔
136
        line for line in lockfile_bytes.splitlines() if not line.lstrip().startswith(b"//")
137
    )
138

139

140
def is_probably_pex_json_lockfile(lockfile_bytes: bytes) -> bool:
7✔
141
    for line in lockfile_bytes.splitlines():
7✔
142
        if line and not line.startswith(b"//"):
7✔
143
            # Note that pip/Pex complain if a requirements.txt style starts with `{`.
144
            return line.lstrip().startswith(b"{")
7✔
145
    return False
1✔
146

147

148
def _pex_lockfile_requirement_count(lockfile_bytes: bytes) -> int:
7✔
149
    # TODO: this is a very naive heuristic that will overcount, and also relies on Pants
150
    #  setting `--indent` when generating lockfiles. More robust would be parsing the JSON
151
    #  and getting the len(locked_resolves.locked_requirements.project_name), but we risk
152
    #  if Pex ever changes its lockfile format.
153

154
    num_lines = len(lockfile_bytes.splitlines())
7✔
155
    # These are very naive estimates, and they bias towards overcounting. For example, requirements
156
    # often are 20+ lines.
157
    num_lines_for_options = 10
7✔
158
    lines_per_req = 10
7✔
159
    return max((num_lines - num_lines_for_options) // lines_per_req, 2)
7✔
160

161

162
def get_metadata(
7✔
163
    python_setup: PythonSetup,
164
    lock_bytes: bytes,
165
    lockfile_path: str | None,
166
    resolve_name: str,
167
    delimiter: str,
168
) -> PythonLockfileMetadata | None:
169
    metadata: PythonLockfileMetadata | None = None
6✔
170
    if python_setup.invalid_lockfile_behavior != InvalidLockfileBehavior.ignore:
6✔
171
        try:
6✔
172
            metadata = PythonLockfileMetadata.from_lockfile(
6✔
173
                lockfile=lock_bytes,
174
                lockfile_path=lockfile_path,
175
                resolve_name=resolve_name,
176
                delimeter=delimiter,
177
            )
178
        except NoLockfileMetadataBlock:
6✔
179
            # We don't validate if the file isn't a pants-generated lockfile (as determined
180
            # by the lack of a metadata block). But we propagate any other type of
181
            # InvalidLockfileError incurred while parsing the metadata block.
182
            logger.debug(
6✔
183
                f"Lockfile for resolve {resolve_name} "
184
                f"{('at ' + lockfile_path) if lockfile_path else ''}"
185
                f" has no metadata block, so was not generated by Pants. "
186
                f"Lockfile will not be validated."
187
            )
188
    return metadata
6✔
189

190

191
async def read_file_or_resource(url: str, description_of_origin: str) -> Digest:
7✔
192
    """Read from a path, file:// or resource:// URL and return the digest of the content.
193

194
    If no content is found at the path/URL, raise.
195
    """
196
    parts = urlparse(url)
7✔
197
    # urlparse retains the leading / in URLs with a netloc.
198
    path = parts.path[1:] if parts.path.startswith("/") else parts.path
7✔
199
    if parts.scheme in {"", "file"}:
7✔
200
        digest = await path_globs_to_digest(
3✔
201
            PathGlobs(
202
                [path],
203
                glob_match_error_behavior=GlobMatchErrorBehavior.error,
204
                description_of_origin=description_of_origin,
205
            )
206
        )
207
    elif parts.scheme == "resource":
7✔
208
        _fc = FileContent(
7✔
209
            path,
210
            # The "netloc" in our made-up "resource://" scheme is the package.
211
            importlib.resources.files(parts.netloc).joinpath(path).read_bytes(),
212
        )
213
        digest = await create_digest(CreateDigest([_fc]))
7✔
214
    else:
215
        raise ValueError(
×
216
            f"Unsupported scheme {parts.scheme} for URL: {url} (origin: {description_of_origin})"
217
        )
218
    return digest
7✔
219

220

221
@rule
7✔
222
async def load_lockfile(
7✔
223
    request: LoadedLockfileRequest,
224
    python_setup: PythonSetup,
225
) -> LoadedLockfile:
226
    lockfile = request.lockfile
7✔
227
    # TODO: This is temporary. Once we regenerate all embedded lockfiles to have sidecar metadata
228
    #  files instead of metadata front matter, we won't need to call get_metadata() on them.
229
    synthetic_lock = lockfile.url.startswith("resource://")
7✔
230
    lockfile_digest = await read_file_or_resource(lockfile.url, lockfile.url_description_of_origin)
7✔
231
    lockfile_digest_entries = await get_digest_entries(lockfile_digest)
7✔
232
    lockfile_path = lockfile_digest_entries[0].path
7✔
233

234
    lockfile_contents = await get_digest_contents(lockfile_digest)
7✔
235
    lock_bytes = lockfile_contents[0].content
7✔
236
    lockfile_format = (
7✔
237
        LockfileFormat.Pex
238
        if is_probably_pex_json_lockfile(lock_bytes)
239
        else LockfileFormat.ConstraintsDeprecated
240
    )
241
    constraints_strings = None
7✔
242

243
    metadata_url = PythonLockfileMetadata.metadata_location_for_lockfile(lockfile.url)
7✔
244
    metadata = None
7✔
245
    if python_setup.invalid_lockfile_behavior != InvalidLockfileBehavior.ignore:
7✔
246
        try:
7✔
247
            metadata_digest = await read_file_or_resource(
7✔
248
                metadata_url,
249
                description_of_origin="We squelch errors, so this is never seen by users",
250
            )
251
            digest_contents = await get_digest_contents(metadata_digest)
7✔
252
            metadata_bytes = digest_contents[0].content
7✔
253
            json_dict = json.loads(metadata_bytes)
7✔
254
            metadata = PythonLockfileMetadata.from_json_dict(
7✔
255
                json_dict,
256
                lockfile_description=f"the lockfile for `{lockfile.resolve_name}`",
257
                error_suffix=softwrap(
258
                    f"""
259
                    To resolve this error, you will need to regenerate the lockfile by running
260
                    `{bin_name()} generate-lockfiles --resolve={lockfile.resolve_name}.
261
                    """
262
                ),
263
            )
264
            requirement_estimate = _pex_lockfile_requirement_count(lock_bytes)
7✔
265
        except (IntrinsicError, FileNotFoundError):
6✔
266
            # No metadata file or resource found, so fall through to finding a metadata
267
            # header block prepended to the lockfile itself.
268
            pass
6✔
269

270
    if not metadata:
7✔
271
        if lockfile_format == LockfileFormat.Pex:
6✔
272
            header_delimiter = "//"
6✔
273
            stripped_lock_bytes = strip_comments_from_pex_json_lockfile(lock_bytes)
6✔
274
            lockfile_digest = await create_digest(
6✔
275
                CreateDigest([FileContent(lockfile_path, stripped_lock_bytes)])
276
            )
277
            requirement_estimate = _pex_lockfile_requirement_count(lock_bytes)
6✔
278
        else:
279
            header_delimiter = "#"
1✔
280
            lock_string = lock_bytes.decode()
1✔
281
            # Note: this is a very naive heuristic. It will overcount because entries often
282
            # have >1 line due to `--hash`.
283
            requirement_estimate = len(lock_string.splitlines())
1✔
284
            constraints_strings = FrozenOrderedSet(
1✔
285
                str(req) for req in parse_requirements_file(lock_string, rel_path=lockfile_path)
286
            )
287

288
        metadata = get_metadata(
6✔
289
            python_setup,
290
            lock_bytes,
291
            None if synthetic_lock else lockfile_path,
292
            lockfile.resolve_name,
293
            header_delimiter,
294
        )
295

296
    return LoadedLockfile(
7✔
297
        lockfile_digest,
298
        lockfile_path,
299
        metadata,
300
        requirement_estimate,
301
        lockfile_format,
302
        constraints_strings,
303
        original_lockfile=lockfile,
304
    )
305

306

307
@dataclass(frozen=True)
7✔
308
class EntireLockfile:
7✔
309
    """A request to resolve the entire contents of a lockfile.
310

311
    This resolution mode is used in a few cases:
312
    1. for poetry or handwritten lockfiles (which do not support being natively subsetted the
313
       way that a PEX lockfile can be), in order to build a repository-PEX to subset separately.
314
    2. for tool lockfiles, which (regardless of format), need to resolve the entire lockfile
315
       content anyway.
316
    """
317

318
    lockfile: Lockfile
7✔
319
    # If available, the current complete set of requirement strings that influence this lockfile.
320
    # Used for metadata validation.
321
    complete_req_strings: tuple[str, ...] | None = None
7✔
322

323

324
@dataclass(frozen=True)
7✔
325
class PexRequirements:
7✔
326
    """A request to resolve a series of requirements (optionally from a "superset" resolve)."""
327

328
    req_strings_or_addrs: FrozenOrderedSet[str | Address]
7✔
329
    constraints_strings: FrozenOrderedSet[str]
7✔
330
    # If these requirements should be resolved as a subset of either a repository PEX, or a
331
    # PEX-native lockfile, the superset to use. # NB: Use of a lockfile here asserts that the
332
    # lockfile is PEX-native, because legacy lockfiles do not support subset resolves.
333
    from_superset: Pex | Resolve | None
7✔
334
    description_of_origin: str
7✔
335

336
    def __init__(
7✔
337
        self,
338
        req_strings_or_addrs: Iterable[str | Address] = (),
339
        *,
340
        constraints_strings: Iterable[str] = (),
341
        from_superset: Pex | Resolve | None = None,
342
        description_of_origin: str = "",
343
    ) -> None:
344
        """
345
        :param req_strings_or_addrs: The requirement strings to resolve, or addresses
346
          of targets that refer to them, or string specs of such addresses.
347
        :param constraints_strings: Constraints strings to apply during the resolve.
348
        :param from_superset: An optional superset PEX or lockfile to resolve the req strings from.
349
        :param description_of_origin: A human-readable description of what these requirements
350
          represent, for use in error messages.
351
        """
352
        object.__setattr__(
7✔
353
            self, "req_strings_or_addrs", FrozenOrderedSet(sorted(req_strings_or_addrs))
354
        )
355
        object.__setattr__(
7✔
356
            self, "constraints_strings", FrozenOrderedSet(sorted(constraints_strings))
357
        )
358
        object.__setattr__(self, "from_superset", from_superset)
7✔
359
        object.__setattr__(self, "description_of_origin", description_of_origin)
7✔
360

361
    @classmethod
7✔
362
    def req_strings_from_requirement_fields(
7✔
363
        cls, fields: Iterable[PythonRequirementsField]
364
    ) -> FrozenOrderedSet[str]:
365
        """A convenience when you only need the raw requirement strings from fields and don't need
366
        to consider things like constraints or resolves."""
367
        return FrozenOrderedSet(
7✔
368
            sorted(str(python_req) for fld in fields for python_req in fld.value)
369
        )
370

371
    def __bool__(self) -> bool:
7✔
372
        return bool(self.req_strings_or_addrs)
1✔
373

374

375
@dataclass(frozen=True)
7✔
376
class ResolvePexConstraintsFile:
7✔
377
    digest: Digest
7✔
378
    path: str
7✔
379
    constraints: FrozenOrderedSet[PipRequirement]
7✔
380

381

382
@dataclass(frozen=True)
7✔
383
class ResolvePexConfig:
7✔
384
    """Configuration from `[python]` that impacts how the resolve is created."""
385

386
    indexes: tuple[str, ...]
7✔
387
    find_links: tuple[str, ...]
7✔
388
    manylinux: str | None
7✔
389
    constraints_file: ResolvePexConstraintsFile | None
7✔
390
    only_binary: FrozenOrderedSet[str]
7✔
391
    no_binary: FrozenOrderedSet[str]
7✔
392
    excludes: FrozenOrderedSet[str]
7✔
393
    overrides: FrozenOrderedSet[str]
7✔
394
    sources: FrozenOrderedSet[str]
7✔
395
    path_mappings: tuple[str, ...]
7✔
396
    lock_style: str
7✔
397
    complete_platforms: tuple[str, ...]
7✔
398
    uploaded_prior_to: str | None
7✔
399

400
    def pex_args(self) -> Iterator[str]:
7✔
401
        """Arguments for Pex for indexes/--find-links, manylinux, and path mappings.
402

403
        Does not include arguments for constraints files, which must be set up independently.
404
        """
405
        # NB: In setting `--no-pypi`, we rely on the default value of `[python-repos].indexes`
406
        # including PyPI, which will override `--no-pypi` and result in using PyPI in the default
407
        # case. Why set `--no-pypi`, then? We need to do this so that
408
        # `[python-repos].indexes = ['custom_url']` will only point to that index and not include
409
        # PyPI.
410
        yield "--no-pypi"
7✔
411
        yield from (f"--index={index}" for index in self.indexes)
7✔
412
        yield from (f"--find-links={repo}" for repo in self.find_links)
7✔
413

414
        if self.manylinux:
7✔
415
            yield "--manylinux"
7✔
416
            yield self.manylinux
7✔
417
        else:
UNCOV
418
            yield "--no-manylinux"
×
419

420
        # Pex logically plumbs through equivalent settings, but uses a
421
        # separate flag instead of the Pip magic :all:/:none: syntax.  To
422
        # support the exitings Pants config settings we need to go from
423
        # :all:/:none: --> Pex options, which Pex will translate back into Pip
424
        # options.  Note that Pex's --wheel (for example) means "allow
425
        # wheels", not "require wheels".
426
        if self.only_binary and ":all:" in self.only_binary:
7✔
UNCOV
427
            yield "--wheel"
×
UNCOV
428
            yield "--no-build"
×
429
        elif self.only_binary and ":none:" in self.only_binary:
7✔
UNCOV
430
            yield "--no-wheel"
×
UNCOV
431
            yield "--build"
×
432
        elif self.only_binary:
7✔
UNCOV
433
            yield from (f"--only-binary={pkg}" for pkg in self.only_binary)
×
434

435
        if self.no_binary and ":all:" in self.no_binary:
7✔
UNCOV
436
            yield "--no-wheel"
×
UNCOV
437
            yield "--build"
×
438
        elif self.no_binary and ":none:" in self.no_binary:
7✔
UNCOV
439
            yield "--wheel"
×
UNCOV
440
            yield "--no-build"
×
441
        elif self.no_binary:
7✔
UNCOV
442
            yield from (f"--only-build={pkg}" for pkg in self.no_binary)
×
443

444
        yield from (f"--path-mapping={v}" for v in self.path_mappings)
7✔
445

446
        yield from (f"--exclude={exclude}" for exclude in self.excludes)
7✔
447
        yield from (f"--source={source}" for source in self.sources)
7✔
448

449
        if self.uploaded_prior_to:
7✔
UNCOV
450
            yield f"--uploaded-prior-to={self.uploaded_prior_to}"
×
451

452

453
@dataclass(frozen=True)
7✔
454
class ResolvePexConfigRequest(EngineAwareParameter):
7✔
455
    """Find all configuration from `[python]` that impacts how the resolve is created.
456

457
    If `resolve_name` is None, then most per-resolve options will be ignored because there is no way
458
    for users to configure them. However, some options like `[python-repos].indexes` will still be
459
    loaded.
460
    """
461

462
    resolve_name: str | None
7✔
463

464
    def debug_hint(self) -> str:
7✔
465
        return self.resolve_name or "<no resolve>"
×
466

467

468
@rule
7✔
469
async def determine_resolve_pex_config(
7✔
470
    request: ResolvePexConfigRequest,
471
    python_setup: PythonSetup,
472
    python_repos: PythonRepos,
473
    union_membership: UnionMembership,
474
) -> ResolvePexConfig:
475
    if request.resolve_name is None:
7✔
476
        return ResolvePexConfig(
7✔
477
            indexes=python_repos.indexes,
478
            find_links=python_repos.find_links,
479
            manylinux=python_setup.manylinux,
480
            constraints_file=None,
481
            no_binary=FrozenOrderedSet(),
482
            only_binary=FrozenOrderedSet(),
483
            excludes=FrozenOrderedSet(),
484
            overrides=FrozenOrderedSet(),
485
            sources=FrozenOrderedSet(),
486
            path_mappings=python_repos.path_mappings,
487
            lock_style="universal",  # Default to universal when no resolve name
488
            complete_platforms=(),  # No complete platforms by default
489
            uploaded_prior_to=None,
490
        )
491

492
    no_binary = python_setup.resolves_to_no_binary().get(request.resolve_name) or []
7✔
493
    only_binary = python_setup.resolves_to_only_binary().get(request.resolve_name) or []
7✔
494
    excludes = python_setup.resolves_to_excludes().get(request.resolve_name) or []
7✔
495
    overrides = python_setup.resolves_to_overrides().get(request.resolve_name) or []
7✔
496
    sources = python_setup.resolves_to_sources().get(request.resolve_name) or []
7✔
497
    lock_style = python_setup.resolves_to_lock_style().get(request.resolve_name) or "universal"
7✔
498
    complete_platforms = tuple(
7✔
499
        python_setup.resolves_to_complete_platforms().get(request.resolve_name) or []
500
    )
501
    uploaded_prior_to = python_setup.resolves_to_uploaded_prior_to().get(request.resolve_name)
7✔
502

503
    constraints_file: ResolvePexConstraintsFile | None = None
7✔
504
    _constraints_file_path = python_setup.resolves_to_constraints_file().get(request.resolve_name)
7✔
505
    if _constraints_file_path:
7✔
UNCOV
506
        _constraints_origin = softwrap(
×
507
            f"""
508
            the option `[python].resolves_to_constraints_file` for the resolve
509
            '{request.resolve_name}'
510
            """
511
        )
UNCOV
512
        _constraints_path_globs = PathGlobs(
×
513
            [_constraints_file_path] if _constraints_file_path else [],
514
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
515
            description_of_origin=_constraints_origin,
516
        )
517
        # TODO: Probably re-doing work here - instead of just calling one, then the next
UNCOV
518
        _constraints_digest, _constraints_digest_contents = await concurrently(
×
519
            path_globs_to_digest(_constraints_path_globs),
520
            get_digest_contents(**implicitly({_constraints_path_globs: PathGlobs})),
521
        )
522

UNCOV
523
        if len(_constraints_digest_contents) != 1:
×
524
            raise ValueError(
×
525
                softwrap(
526
                    f"""
527
                    Expected only one file from {_constraints_origin}, but matched:
528
                    {sorted(fc.path for fc in _constraints_digest_contents)}
529

530
                    Did you use a glob like `*`?
531
                    """
532
                )
533
            )
UNCOV
534
        _constraints_file_content = next(iter(_constraints_digest_contents))
×
UNCOV
535
        constraints = parse_requirements_file(
×
536
            _constraints_file_content.content.decode("utf-8"), rel_path=_constraints_file_path
537
        )
UNCOV
538
        constraints_file = ResolvePexConstraintsFile(
×
539
            _constraints_digest, _constraints_file_path, FrozenOrderedSet(constraints)
540
        )
541

542
    return ResolvePexConfig(
7✔
543
        indexes=python_repos.indexes,
544
        find_links=python_repos.find_links,
545
        manylinux=python_setup.manylinux,
546
        constraints_file=constraints_file,
547
        no_binary=FrozenOrderedSet(no_binary),
548
        only_binary=FrozenOrderedSet(only_binary),
549
        excludes=FrozenOrderedSet(excludes),
550
        overrides=FrozenOrderedSet(overrides),
551
        sources=FrozenOrderedSet(sources),
552
        path_mappings=python_repos.path_mappings,
553
        lock_style=lock_style,
554
        complete_platforms=complete_platforms,
555
        uploaded_prior_to=uploaded_prior_to,
556
    )
557

558

559
def validate_metadata(
7✔
560
    metadata: PythonLockfileMetadata,
561
    interpreter_constraints: InterpreterConstraints,
562
    lockfile: Lockfile,
563
    consumed_req_strings: Iterable[str],
564
    validate_consumed_req_strings: bool,
565
    python_setup: PythonSetup,
566
    resolve_config: ResolvePexConfig,
567
) -> None:
568
    """Given interpreter constraints and requirements to be consumed, validate lockfile metadata."""
569

570
    # TODO(#12314): Improve the exception if invalid strings
UNCOV
571
    user_requirements = [PipRequirement.parse(i) for i in consumed_req_strings]
×
UNCOV
572
    validation = metadata.is_valid_for(
×
573
        expected_invalidation_digest=lockfile.lockfile_hex_digest,
574
        user_interpreter_constraints=interpreter_constraints,
575
        interpreter_universe=python_setup.interpreter_versions_universe,
576
        user_requirements=user_requirements if validate_consumed_req_strings else {},
577
        manylinux=resolve_config.manylinux,
578
        requirement_constraints=(
579
            resolve_config.constraints_file.constraints
580
            if resolve_config.constraints_file
581
            else set()
582
        ),
583
        only_binary=resolve_config.only_binary,
584
        no_binary=resolve_config.no_binary,
585
        excludes=resolve_config.excludes,
586
        overrides=resolve_config.overrides,
587
        sources=resolve_config.sources,
588
        lock_style=resolve_config.lock_style,
589
        complete_platforms=resolve_config.complete_platforms,
590
        uploaded_prior_to=resolve_config.uploaded_prior_to,
591
    )
UNCOV
592
    if validation:
×
UNCOV
593
        return
×
594

UNCOV
595
    error_msg_kwargs = dict(
×
596
        metadata=metadata,
597
        validation=validation,
598
        lockfile=lockfile,
599
        is_default_user_lockfile=lockfile.resolve_name == python_setup.default_resolve,
600
        user_interpreter_constraints=interpreter_constraints,
601
        user_requirements=user_requirements,
602
        maybe_constraints_file_path=(
603
            resolve_config.constraints_file.path if resolve_config.constraints_file else None
604
        ),
605
    )
UNCOV
606
    msg_iter = _invalid_lockfile_error(**error_msg_kwargs)  # type: ignore[arg-type]
×
UNCOV
607
    msg = "".join(msg_iter).strip()
×
UNCOV
608
    if python_setup.invalid_lockfile_behavior == InvalidLockfileBehavior.error:
×
UNCOV
609
        raise InvalidLockfileError(msg)
×
UNCOV
610
    logger.warning(msg)
×
611

612

613
def _common_failure_reasons(
7✔
614
    failure_reasons: set[InvalidPythonLockfileReason], maybe_constraints_file_path: str | None
615
) -> Iterator[str]:
UNCOV
616
    if InvalidPythonLockfileReason.CONSTRAINTS_FILE_MISMATCH in failure_reasons:
×
UNCOV
617
        if maybe_constraints_file_path is None:
×
618
            yield softwrap(
×
619
                """
620
                - Constraint file expected from lockfile metadata but no
621
                constraints file configured.  See the option
622
                `[python].resolves_to_constraints_file`.
623
                """
624
            )
625
        else:
UNCOV
626
            yield softwrap(
×
627
                f"""
628
                - The constraints file at {maybe_constraints_file_path} has changed from when the
629
                lockfile was generated. (Constraints files are set via the option
630
                `[python].resolves_to_constraints_file`)
631
                """
632
            )
UNCOV
633
    if InvalidPythonLockfileReason.ONLY_BINARY_MISMATCH in failure_reasons:
×
UNCOV
634
        yield softwrap(
×
635
            """
636
            - The `only_binary` arguments have changed from when the lockfile was generated.
637
            (`only_binary` is set via the options `[python].resolves_to_only_binary` and deprecated
638
            `[python].only_binary`)
639
            """
640
        )
UNCOV
641
    if InvalidPythonLockfileReason.NO_BINARY_MISMATCH in failure_reasons:
×
UNCOV
642
        yield softwrap(
×
643
            """
644
            - The `no_binary` arguments have changed from when the lockfile was generated.
645
            (`no_binary` is set via the options `[python].resolves_to_no_binary` and deprecated
646
            `[python].no_binary`)
647
            """
648
        )
UNCOV
649
    if InvalidPythonLockfileReason.MANYLINUX_MISMATCH in failure_reasons:
×
UNCOV
650
        yield softwrap(
×
651
            """
652
            - The `manylinux` argument has changed from when the lockfile was generated.
653
            (manylinux is set via the option `[python].resolver_manylinux`)
654
            """
655
        )
UNCOV
656
    if InvalidPythonLockfileReason.UPLOADED_PRIOR_TO_MISMATCH in failure_reasons:
×
657
        yield softwrap(
×
658
            """
659
            - The `uploaded_prior_to` argument has changed from when the lockfile was generated.
660
            (uploaded_prior_to is set via the option `[python].resolves_to_uploaded_prior_to`)
661
            """
662
        )
663

664

665
def _invalid_lockfile_error(
7✔
666
    metadata: PythonLockfileMetadata,
667
    validation: LockfileMetadataValidation,
668
    lockfile: Lockfile,
669
    *,
670
    is_default_user_lockfile: bool,
671
    user_requirements: list[PipRequirement],
672
    user_interpreter_constraints: InterpreterConstraints,
673
    maybe_constraints_file_path: str | None,
674
) -> Iterator[str]:
UNCOV
675
    resolve = lockfile.resolve_name
×
UNCOV
676
    consumed_msg_parts = [f"`{str(r)}`" for r in user_requirements[0:2]]
×
UNCOV
677
    if len(user_requirements) > 2:
×
678
        consumed_msg_parts.append(
×
679
            f"{len(user_requirements) - 2} other "
680
            f"{pluralize(len(user_requirements) - 2, 'requirement', include_count=False)}"
681
        )
682

UNCOV
683
    yield f"\n\nYou are consuming {comma_separated_list(consumed_msg_parts)} from "
×
UNCOV
684
    if lockfile.url.startswith("resource://"):
×
685
        yield f"the built-in `{resolve}` lockfile provided by Pants "
×
686
    else:
UNCOV
687
        yield f"the `{resolve}` lockfile at {lockfile.url} "
×
UNCOV
688
    yield "with incompatible inputs.\n\n"
×
689

UNCOV
690
    if any(
×
691
        i
692
        in (
693
            InvalidPythonLockfileReason.INVALIDATION_DIGEST_MISMATCH,
694
            InvalidPythonLockfileReason.REQUIREMENTS_MISMATCH,
695
        )
696
        for i in validation.failure_reasons
697
    ):
UNCOV
698
        yield (
×
699
            softwrap(
700
                """
701
            - The lockfile does not provide all the necessary requirements. You must
702
            modify the input requirements and/or regenerate the lockfile (see below).
703
            """
704
            )
705
            + "\n\n"
706
        )
UNCOV
707
        if is_default_user_lockfile:
×
UNCOV
708
            yield softwrap(
×
709
                f"""
710
                - The necessary requirements are specified by requirements targets marked with
711
                `resolve="{resolve}"`, or those with no explicit resolve (since `{resolve}` is the
712
                default for this repo).
713

714
                - The lockfile destination is specified by the `{resolve}` key in `[python].resolves`.
715
                """
716
            )
717
        else:
UNCOV
718
            yield softwrap(
×
719
                f"""
720
                - The necessary requirements are specified by requirements targets marked with
721
                `resolve="{resolve}"`.
722

723
                - The lockfile destination is specified by the `{resolve}` key in
724
                `[python].resolves`.
725
                """
726
            )
727

UNCOV
728
        if isinstance(metadata, PythonLockfileMetadataV2):
×
729
            # Note that by the time we have gotten to this error message, we should have already
730
            # validated that the transitive closure is using the same resolve, via
731
            # pex_from_targets.py. This implies that we don't need to worry about users depending
732
            # on python_requirement targets that aren't in that code's resolve.
UNCOV
733
            not_in_lock = sorted(str(r) for r in set(user_requirements) - metadata.requirements)
×
UNCOV
734
            yield f"\n\n- The requirements not provided by the `{resolve}` resolve are:\n  "
×
UNCOV
735
            yield str(not_in_lock)
×
736

UNCOV
737
    if InvalidPythonLockfileReason.INTERPRETER_CONSTRAINTS_MISMATCH in validation.failure_reasons:
×
UNCOV
738
        yield "\n\n"
×
UNCOV
739
        yield softwrap(
×
740
            f"""
741
            - The inputs use interpreter constraints (`{user_interpreter_constraints}`) that
742
            are not a subset of those used to generate the lockfile
743
            (`{metadata.valid_for_interpreter_constraints}`).
744

745
            - The input interpreter constraints are specified by your code, using
746
            the `[python].interpreter_constraints` option and the `interpreter_constraints`
747
            target field.
748

749
            - To create a lockfile with new interpreter constraints, update the option
750
            `[python].resolves_to_interpreter_constraints`, and then generate the lockfile
751
            (see below).
752
            """
753
        )
UNCOV
754
        yield f"\n\nSee {doc_url('docs/python/overview/interpreter-compatibility')} for details."
×
755

UNCOV
756
    yield "\n\n"
×
UNCOV
757
    yield from (
×
758
        f"{fail}\n"
759
        for fail in _common_failure_reasons(validation.failure_reasons, maybe_constraints_file_path)
760
    )
UNCOV
761
    yield "To regenerate your lockfile, "
×
UNCOV
762
    yield f"run `{bin_name()} generate-lockfiles --resolve={resolve}`."
×
UNCOV
763
    yield f"\n\nSee {doc_url('docs/python/overview/third-party-dependencies')} for details.\n\n"
×
764

765

766
def rules():
7✔
767
    return collect_rules()
7✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc