• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 24055979590

06 Apr 2026 11:17PM UTC coverage: 52.37% (-40.5%) from 92.908%
24055979590

Pull #23225

github

web-flow
Merge 67474653c into 542ca048d
Pull Request #23225: Add --test-show-all-batch-targets to expose all targets in batched pytest

6 of 17 new or added lines in 2 files covered. (35.29%)

23030 existing lines in 605 files now uncovered.

31643 of 60422 relevant lines covered (52.37%)

1.05 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

70.3
/src/python/pants/backend/python/util_rules/pex_requirements.py
1
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3

4
from __future__ import annotations
2✔
5

6
import importlib.resources
2✔
7
import json
2✔
8
import logging
2✔
9
from collections.abc import Iterable, Iterator
2✔
10
from dataclasses import dataclass, field
2✔
11
from typing import TYPE_CHECKING
2✔
12
from urllib.parse import urlparse
2✔
13

14
from pants.backend.python.subsystems.repos import PythonRepos
2✔
15
from pants.backend.python.subsystems.setup import InvalidLockfileBehavior, PythonSetup
2✔
16
from pants.backend.python.target_types import PythonRequirementsField
2✔
17
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
2✔
18
from pants.backend.python.util_rules.lockfile_metadata import (
2✔
19
    InvalidPythonLockfileReason,
20
    PythonLockfileMetadata,
21
    PythonLockfileMetadataV2,
22
)
23
from pants.build_graph.address import Address
2✔
24
from pants.core.util_rules.lockfile_metadata import (
2✔
25
    InvalidLockfileError,
26
    LockfileMetadataValidation,
27
    NoLockfileMetadataBlock,
28
)
29
from pants.engine.engine_aware import EngineAwareParameter
2✔
30
from pants.engine.fs import CreateDigest, Digest, FileContent, GlobMatchErrorBehavior, PathGlobs
2✔
31
from pants.engine.internals.native_engine import IntrinsicError
2✔
32
from pants.engine.intrinsics import (
2✔
33
    create_digest,
34
    get_digest_contents,
35
    get_digest_entries,
36
    path_globs_to_digest,
37
)
38
from pants.engine.rules import collect_rules, concurrently, implicitly, rule
2✔
39
from pants.engine.unions import UnionMembership
2✔
40
from pants.util.docutil import bin_name, doc_url
2✔
41
from pants.util.ordered_set import FrozenOrderedSet
2✔
42
from pants.util.pip_requirement import PipRequirement
2✔
43
from pants.util.requirements import parse_requirements_file
2✔
44
from pants.util.strutil import comma_separated_list, pluralize, softwrap
2✔
45

46
if TYPE_CHECKING:
47
    from pants.backend.python.util_rules.pex import Pex
48

49

50
logger = logging.getLogger(__name__)
2✔
51

52

53
@dataclass(frozen=True)
2✔
54
class Resolve:
2✔
55
    # A named resolve for a "user lockfile".
56
    # Soon to be the only kind of lockfile, as this class will help
57
    # get rid of the "tool lockfile" concept.
58
    # TODO: Once we get rid of old-style tool lockfiles we can possibly
59
    #  unify this with EntireLockfile.
60
    # TODO: We might want to add the requirements subset to this data structure,
61
    #  to further detangle this from PexRequirements.
62
    name: str
2✔
63

64
    use_entire_lockfile: bool
2✔
65

66

67
@dataclass(frozen=True)
2✔
68
class Lockfile:
2✔
69
    url: str
2✔
70
    url_description_of_origin: str
2✔
71
    resolve_name: str
2✔
72
    lockfile_hex_digest: str | None = None
2✔
73

74

75
@rule
2✔
76
async def get_lockfile_for_resolve(resolve: Resolve, python_setup: PythonSetup) -> Lockfile:
2✔
UNCOV
77
    lockfile_path = python_setup.resolves.get(resolve.name)
×
UNCOV
78
    if not lockfile_path:
×
79
        raise ValueError(f"No such resolve: {resolve.name}")
×
UNCOV
80
    return Lockfile(
×
81
        url=lockfile_path,
82
        url_description_of_origin=f"the resolve `{resolve.name}`",
83
        resolve_name=resolve.name,
84
    )
85

86

87
@dataclass(frozen=True)
2✔
88
class LoadedLockfile:
2✔
89
    """A lockfile after loading and header stripping.
90

91
    Validation is deferred until consumption time, because each consumed subset (in the case of a
92
    PEX-native lockfile) can be individually validated.
93
    """
94

95
    # The digest of the loaded lockfile (which may not be identical to the input).
96
    lockfile_digest: Digest
2✔
97
    # The path of the loaded lockfile within the Digest.
98
    lockfile_path: str
2✔
99
    # The loaded metadata for this lockfile, if any.
100
    metadata: PythonLockfileMetadata | None = field(hash=False)
2✔
101
    # An estimate of the number of requirements in this lockfile, to be used as a heuristic for
102
    # available parallelism.
103
    requirement_estimate: int
2✔
104
    # True if the loaded lockfile is in PEX's native format.
105
    is_pex_native: bool
2✔
106
    # If !is_pex_native, the lockfile parsed as constraints strings, for use when the lockfile
107
    # needs to be subsetted (see #15031, ##12222).
108
    as_constraints_strings: FrozenOrderedSet[str] | None
2✔
109
    # The original file or file content (which may not have identical content to the output
110
    # `lockfile_digest`).
111
    original_lockfile: Lockfile
2✔
112

113

114
@dataclass(frozen=True)
2✔
115
class LoadedLockfileRequest:
2✔
116
    """A request to load and validate the content of the given lockfile."""
117

118
    lockfile: Lockfile
2✔
119

120

121
def strip_comments_from_pex_json_lockfile(lockfile_bytes: bytes) -> bytes:
2✔
122
    """Pex does not like the header Pants adds to lockfiles, as it violates JSON.
123

124
    Note that we only strip lines starting with `//`, which is all that Pants will ever add. If
125
    users add their own comments, things will fail.
126
    """
127
    return b"\n".join(
2✔
128
        line for line in lockfile_bytes.splitlines() if not line.lstrip().startswith(b"//")
129
    )
130

131

132
def is_probably_pex_json_lockfile(lockfile_bytes: bytes) -> bool:
2✔
133
    for line in lockfile_bytes.splitlines():
2✔
134
        if line and not line.startswith(b"//"):
2✔
135
            # Note that pip/Pex complain if a requirements.txt style starts with `{`.
136
            return line.lstrip().startswith(b"{")
2✔
UNCOV
137
    return False
×
138

139

140
def _pex_lockfile_requirement_count(lockfile_bytes: bytes) -> int:
2✔
141
    # TODO: this is a very naive heuristic that will overcount, and also relies on Pants
142
    #  setting `--indent` when generating lockfiles. More robust would be parsing the JSON
143
    #  and getting the len(locked_resolves.locked_requirements.project_name), but we risk
144
    #  if Pex ever changes its lockfile format.
145

146
    num_lines = len(lockfile_bytes.splitlines())
2✔
147
    # These are very naive estimates, and they bias towards overcounting. For example, requirements
148
    # often are 20+ lines.
149
    num_lines_for_options = 10
2✔
150
    lines_per_req = 10
2✔
151
    return max((num_lines - num_lines_for_options) // lines_per_req, 2)
2✔
152

153

154
def get_metadata(
2✔
155
    python_setup: PythonSetup,
156
    lock_bytes: bytes,
157
    lockfile_path: str | None,
158
    resolve_name: str,
159
    delimiter: str,
160
) -> PythonLockfileMetadata | None:
161
    metadata: PythonLockfileMetadata | None = None
2✔
162
    if python_setup.invalid_lockfile_behavior != InvalidLockfileBehavior.ignore:
2✔
163
        try:
2✔
164
            metadata = PythonLockfileMetadata.from_lockfile(
2✔
165
                lockfile=lock_bytes,
166
                lockfile_path=lockfile_path,
167
                resolve_name=resolve_name,
168
                delimeter=delimiter,
169
            )
170
        except NoLockfileMetadataBlock:
2✔
171
            # We don't validate if the file isn't a pants-generated lockfile (as determined
172
            # by the lack of a metadata block). But we propagate any other type of
173
            # InvalidLockfileError incurred while parsing the metadata block.
174
            logger.debug(
2✔
175
                f"Lockfile for resolve {resolve_name} "
176
                f"{('at ' + lockfile_path) if lockfile_path else ''}"
177
                f" has no metadata block, so was not generated by Pants. "
178
                f"Lockfile will not be validated."
179
            )
180
    return metadata
2✔
181

182

183
async def read_file_or_resource(url: str, description_of_origin: str) -> Digest:
2✔
184
    """Read from a path, file:// or resource:// URL and return the digest of the content.
185

186
    If no content is found at the path/URL, raise.
187
    """
188
    parts = urlparse(url)
2✔
189
    # urlparse retains the leading / in URLs with a netloc.
190
    path = parts.path[1:] if parts.path.startswith("/") else parts.path
2✔
191
    if parts.scheme in {"", "file"}:
2✔
UNCOV
192
        digest = await path_globs_to_digest(
×
193
            PathGlobs(
194
                [path],
195
                glob_match_error_behavior=GlobMatchErrorBehavior.error,
196
                description_of_origin=description_of_origin,
197
            )
198
        )
199
    elif parts.scheme == "resource":
2✔
200
        _fc = FileContent(
2✔
201
            path,
202
            # The "netloc" in our made-up "resource://" scheme is the package.
203
            importlib.resources.files(parts.netloc).joinpath(path).read_bytes(),
204
        )
205
        digest = await create_digest(CreateDigest([_fc]))
2✔
206
    else:
207
        raise ValueError(
×
208
            f"Unsupported scheme {parts.scheme} for URL: {url} (origin: {description_of_origin})"
209
        )
210
    return digest
2✔
211

212

213
@rule
2✔
214
async def load_lockfile(
2✔
215
    request: LoadedLockfileRequest,
216
    python_setup: PythonSetup,
217
) -> LoadedLockfile:
218
    lockfile = request.lockfile
2✔
219
    # TODO: This is temporary. Once we regenerate all embedded lockfiles to have sidecar metadata
220
    #  files instead of metadata front matter, we won't need to call get_metadata() on them.
221
    synthetic_lock = lockfile.url.startswith("resource://")
2✔
222
    lockfile_digest = await read_file_or_resource(lockfile.url, lockfile.url_description_of_origin)
2✔
223
    lockfile_digest_entries = await get_digest_entries(lockfile_digest)
2✔
224
    lockfile_path = lockfile_digest_entries[0].path
2✔
225

226
    lockfile_contents = await get_digest_contents(lockfile_digest)
2✔
227
    lock_bytes = lockfile_contents[0].content
2✔
228
    is_pex_native = is_probably_pex_json_lockfile(lock_bytes)
2✔
229
    constraints_strings = None
2✔
230

231
    metadata_url = PythonLockfileMetadata.metadata_location_for_lockfile(lockfile.url)
2✔
232
    metadata = None
2✔
233
    if python_setup.invalid_lockfile_behavior != InvalidLockfileBehavior.ignore:
2✔
234
        try:
2✔
235
            metadata_digest = await read_file_or_resource(
2✔
236
                metadata_url,
237
                description_of_origin="We squelch errors, so this is never seen by users",
238
            )
239
            digest_contents = await get_digest_contents(metadata_digest)
2✔
240
            metadata_bytes = digest_contents[0].content
2✔
241
            json_dict = json.loads(metadata_bytes)
2✔
242
            metadata = PythonLockfileMetadata.from_json_dict(
2✔
243
                json_dict,
244
                lockfile_description=f"the lockfile for `{lockfile.resolve_name}`",
245
                error_suffix=softwrap(
246
                    f"""
247
                    To resolve this error, you will need to regenerate the lockfile by running
248
                    `{bin_name()} generate-lockfiles --resolve={lockfile.resolve_name}.
249
                    """
250
                ),
251
            )
252
            requirement_estimate = _pex_lockfile_requirement_count(lock_bytes)
2✔
253
        except (IntrinsicError, FileNotFoundError):
2✔
254
            # No metadata file or resource found, so fall through to finding a metadata
255
            # header block prepended to the lockfile itself.
256
            pass
2✔
257

258
    if not metadata:
2✔
259
        if is_pex_native:
2✔
260
            header_delimiter = "//"
2✔
261
            stripped_lock_bytes = strip_comments_from_pex_json_lockfile(lock_bytes)
2✔
262
            lockfile_digest = await create_digest(
2✔
263
                CreateDigest([FileContent(lockfile_path, stripped_lock_bytes)])
264
            )
265
            requirement_estimate = _pex_lockfile_requirement_count(lock_bytes)
2✔
266
        else:
UNCOV
267
            header_delimiter = "#"
×
UNCOV
268
            lock_string = lock_bytes.decode()
×
269
            # Note: this is a very naive heuristic. It will overcount because entries often
270
            # have >1 line due to `--hash`.
UNCOV
271
            requirement_estimate = len(lock_string.splitlines())
×
UNCOV
272
            constraints_strings = FrozenOrderedSet(
×
273
                str(req) for req in parse_requirements_file(lock_string, rel_path=lockfile_path)
274
            )
275

276
        metadata = get_metadata(
2✔
277
            python_setup,
278
            lock_bytes,
279
            None if synthetic_lock else lockfile_path,
280
            lockfile.resolve_name,
281
            header_delimiter,
282
        )
283

284
    return LoadedLockfile(
2✔
285
        lockfile_digest,
286
        lockfile_path,
287
        metadata,
288
        requirement_estimate,
289
        is_pex_native,
290
        constraints_strings,
291
        original_lockfile=lockfile,
292
    )
293

294

295
@dataclass(frozen=True)
2✔
296
class EntireLockfile:
2✔
297
    """A request to resolve the entire contents of a lockfile.
298

299
    This resolution mode is used in a few cases:
300
    1. for poetry or handwritten lockfiles (which do not support being natively subsetted the
301
       way that a PEX lockfile can be), in order to build a repository-PEX to subset separately.
302
    2. for tool lockfiles, which (regardless of format), need to resolve the entire lockfile
303
       content anyway.
304
    """
305

306
    lockfile: Lockfile
2✔
307
    # If available, the current complete set of requirement strings that influence this lockfile.
308
    # Used for metadata validation.
309
    complete_req_strings: tuple[str, ...] | None = None
2✔
310

311

312
@dataclass(frozen=True)
2✔
313
class PexRequirements:
2✔
314
    """A request to resolve a series of requirements (optionally from a "superset" resolve)."""
315

316
    req_strings_or_addrs: FrozenOrderedSet[str | Address]
2✔
317
    constraints_strings: FrozenOrderedSet[str]
2✔
318
    # If these requirements should be resolved as a subset of either a repository PEX, or a
319
    # PEX-native lockfile, the superset to use. # NB: Use of a lockfile here asserts that the
320
    # lockfile is PEX-native, because legacy lockfiles do not support subset resolves.
321
    from_superset: Pex | Resolve | None
2✔
322
    description_of_origin: str
2✔
323

324
    def __init__(
2✔
325
        self,
326
        req_strings_or_addrs: Iterable[str | Address] = (),
327
        *,
328
        constraints_strings: Iterable[str] = (),
329
        from_superset: Pex | Resolve | None = None,
330
        description_of_origin: str = "",
331
    ) -> None:
332
        """
333
        :param req_strings_or_addrs: The requirement strings to resolve, or addresses
334
          of targets that refer to them, or string specs of such addresses.
335
        :param constraints_strings: Constraints strings to apply during the resolve.
336
        :param from_superset: An optional superset PEX or lockfile to resolve the req strings from.
337
        :param description_of_origin: A human-readable description of what these requirements
338
          represent, for use in error messages.
339
        """
340
        object.__setattr__(
2✔
341
            self, "req_strings_or_addrs", FrozenOrderedSet(sorted(req_strings_or_addrs))
342
        )
343
        object.__setattr__(
2✔
344
            self, "constraints_strings", FrozenOrderedSet(sorted(constraints_strings))
345
        )
346
        object.__setattr__(self, "from_superset", from_superset)
2✔
347
        object.__setattr__(self, "description_of_origin", description_of_origin)
2✔
348

349
    @classmethod
2✔
350
    def req_strings_from_requirement_fields(
2✔
351
        cls, fields: Iterable[PythonRequirementsField]
352
    ) -> FrozenOrderedSet[str]:
353
        """A convenience when you only need the raw requirement strings from fields and don't need
354
        to consider things like constraints or resolves."""
355
        return FrozenOrderedSet(
2✔
356
            sorted(str(python_req) for fld in fields for python_req in fld.value)
357
        )
358

359
    def __bool__(self) -> bool:
2✔
UNCOV
360
        return bool(self.req_strings_or_addrs)
×
361

362

363
@dataclass(frozen=True)
2✔
364
class ResolvePexConstraintsFile:
2✔
365
    digest: Digest
2✔
366
    path: str
2✔
367
    constraints: FrozenOrderedSet[PipRequirement]
2✔
368

369

370
@dataclass(frozen=True)
2✔
371
class ResolvePexConfig:
2✔
372
    """Configuration from `[python]` that impacts how the resolve is created."""
373

374
    indexes: tuple[str, ...]
2✔
375
    find_links: tuple[str, ...]
2✔
376
    manylinux: str | None
2✔
377
    constraints_file: ResolvePexConstraintsFile | None
2✔
378
    only_binary: FrozenOrderedSet[str]
2✔
379
    no_binary: FrozenOrderedSet[str]
2✔
380
    excludes: FrozenOrderedSet[str]
2✔
381
    overrides: FrozenOrderedSet[str]
2✔
382
    sources: FrozenOrderedSet[str]
2✔
383
    path_mappings: tuple[str, ...]
2✔
384
    lock_style: str
2✔
385
    complete_platforms: tuple[str, ...]
2✔
386

387
    def pex_args(self) -> Iterator[str]:
2✔
388
        """Arguments for Pex for indexes/--find-links, manylinux, and path mappings.
389

390
        Does not include arguments for constraints files, which must be set up independently.
391
        """
392
        # NB: In setting `--no-pypi`, we rely on the default value of `[python-repos].indexes`
393
        # including PyPI, which will override `--no-pypi` and result in using PyPI in the default
394
        # case. Why set `--no-pypi`, then? We need to do this so that
395
        # `[python-repos].indexes = ['custom_url']` will only point to that index and not include
396
        # PyPI.
397
        yield "--no-pypi"
2✔
398
        yield from (f"--index={index}" for index in self.indexes)
2✔
399
        yield from (f"--find-links={repo}" for repo in self.find_links)
2✔
400

401
        if self.manylinux:
2✔
402
            yield "--manylinux"
2✔
403
            yield self.manylinux
2✔
404
        else:
UNCOV
405
            yield "--no-manylinux"
×
406

407
        # Pex logically plumbs through equivalent settings, but uses a
408
        # separate flag instead of the Pip magic :all:/:none: syntax.  To
409
        # support the exitings Pants config settings we need to go from
410
        # :all:/:none: --> Pex options, which Pex will translate back into Pip
411
        # options.  Note that Pex's --wheel (for example) means "allow
412
        # wheels", not "require wheels".
413
        if self.only_binary and ":all:" in self.only_binary:
2✔
UNCOV
414
            yield "--wheel"
×
UNCOV
415
            yield "--no-build"
×
416
        elif self.only_binary and ":none:" in self.only_binary:
2✔
UNCOV
417
            yield "--no-wheel"
×
UNCOV
418
            yield "--build"
×
419
        elif self.only_binary:
2✔
UNCOV
420
            yield from (f"--only-binary={pkg}" for pkg in self.only_binary)
×
421

422
        if self.no_binary and ":all:" in self.no_binary:
2✔
UNCOV
423
            yield "--no-wheel"
×
UNCOV
424
            yield "--build"
×
425
        elif self.no_binary and ":none:" in self.no_binary:
2✔
UNCOV
426
            yield "--wheel"
×
UNCOV
427
            yield "--no-build"
×
428
        elif self.no_binary:
2✔
UNCOV
429
            yield from (f"--only-build={pkg}" for pkg in self.no_binary)
×
430

431
        yield from (f"--path-mapping={v}" for v in self.path_mappings)
2✔
432

433
        yield from (f"--exclude={exclude}" for exclude in self.excludes)
2✔
434
        yield from (f"--source={source}" for source in self.sources)
2✔
435

436

437
@dataclass(frozen=True)
2✔
438
class ResolvePexConfigRequest(EngineAwareParameter):
2✔
439
    """Find all configuration from `[python]` that impacts how the resolve is created.
440

441
    If `resolve_name` is None, then most per-resolve options will be ignored because there is no way
442
    for users to configure them. However, some options like `[python-repos].indexes` will still be
443
    loaded.
444
    """
445

446
    resolve_name: str | None
2✔
447

448
    def debug_hint(self) -> str:
2✔
449
        return self.resolve_name or "<no resolve>"
×
450

451

452
@rule
2✔
453
async def determine_resolve_pex_config(
2✔
454
    request: ResolvePexConfigRequest,
455
    python_setup: PythonSetup,
456
    python_repos: PythonRepos,
457
    union_membership: UnionMembership,
458
) -> ResolvePexConfig:
459
    if request.resolve_name is None:
2✔
460
        return ResolvePexConfig(
2✔
461
            indexes=python_repos.indexes,
462
            find_links=python_repos.find_links,
463
            manylinux=python_setup.manylinux,
464
            constraints_file=None,
465
            no_binary=FrozenOrderedSet(),
466
            only_binary=FrozenOrderedSet(),
467
            excludes=FrozenOrderedSet(),
468
            overrides=FrozenOrderedSet(),
469
            sources=FrozenOrderedSet(),
470
            path_mappings=python_repos.path_mappings,
471
            lock_style="universal",  # Default to universal when no resolve name
472
            complete_platforms=(),  # No complete platforms by default
473
        )
474

475
    no_binary = python_setup.resolves_to_no_binary().get(request.resolve_name) or []
2✔
476
    only_binary = python_setup.resolves_to_only_binary().get(request.resolve_name) or []
2✔
477
    excludes = python_setup.resolves_to_excludes().get(request.resolve_name) or []
2✔
478
    overrides = python_setup.resolves_to_overrides().get(request.resolve_name) or []
2✔
479
    sources = python_setup.resolves_to_sources().get(request.resolve_name) or []
2✔
480
    lock_style = python_setup.resolves_to_lock_style().get(request.resolve_name) or "universal"
2✔
481
    complete_platforms = tuple(
2✔
482
        python_setup.resolves_to_complete_platforms().get(request.resolve_name) or []
483
    )
484

485
    constraints_file: ResolvePexConstraintsFile | None = None
2✔
486
    _constraints_file_path = python_setup.resolves_to_constraints_file().get(request.resolve_name)
2✔
487
    if _constraints_file_path:
2✔
UNCOV
488
        _constraints_origin = softwrap(
×
489
            f"""
490
            the option `[python].resolves_to_constraints_file` for the resolve
491
            '{request.resolve_name}'
492
            """
493
        )
UNCOV
494
        _constraints_path_globs = PathGlobs(
×
495
            [_constraints_file_path] if _constraints_file_path else [],
496
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
497
            description_of_origin=_constraints_origin,
498
        )
499
        # TODO: Probably re-doing work here - instead of just calling one, then the next
UNCOV
500
        _constraints_digest, _constraints_digest_contents = await concurrently(
×
501
            path_globs_to_digest(_constraints_path_globs),
502
            get_digest_contents(**implicitly({_constraints_path_globs: PathGlobs})),
503
        )
504

UNCOV
505
        if len(_constraints_digest_contents) != 1:
×
506
            raise ValueError(
×
507
                softwrap(
508
                    f"""
509
                    Expected only one file from {_constraints_origin}, but matched:
510
                    {sorted(fc.path for fc in _constraints_digest_contents)}
511

512
                    Did you use a glob like `*`?
513
                    """
514
                )
515
            )
UNCOV
516
        _constraints_file_content = next(iter(_constraints_digest_contents))
×
UNCOV
517
        constraints = parse_requirements_file(
×
518
            _constraints_file_content.content.decode("utf-8"), rel_path=_constraints_file_path
519
        )
UNCOV
520
        constraints_file = ResolvePexConstraintsFile(
×
521
            _constraints_digest, _constraints_file_path, FrozenOrderedSet(constraints)
522
        )
523

524
    return ResolvePexConfig(
2✔
525
        indexes=python_repos.indexes,
526
        find_links=python_repos.find_links,
527
        manylinux=python_setup.manylinux,
528
        constraints_file=constraints_file,
529
        no_binary=FrozenOrderedSet(no_binary),
530
        only_binary=FrozenOrderedSet(only_binary),
531
        excludes=FrozenOrderedSet(excludes),
532
        overrides=FrozenOrderedSet(overrides),
533
        sources=FrozenOrderedSet(sources),
534
        path_mappings=python_repos.path_mappings,
535
        lock_style=lock_style,
536
        complete_platforms=complete_platforms,
537
    )
538

539

540
def validate_metadata(
2✔
541
    metadata: PythonLockfileMetadata,
542
    interpreter_constraints: InterpreterConstraints,
543
    lockfile: Lockfile,
544
    consumed_req_strings: Iterable[str],
545
    validate_consumed_req_strings: bool,
546
    python_setup: PythonSetup,
547
    resolve_config: ResolvePexConfig,
548
) -> None:
549
    """Given interpreter constraints and requirements to be consumed, validate lockfile metadata."""
550

551
    # TODO(#12314): Improve the exception if invalid strings
UNCOV
552
    user_requirements = [PipRequirement.parse(i) for i in consumed_req_strings]
×
UNCOV
553
    validation = metadata.is_valid_for(
×
554
        expected_invalidation_digest=lockfile.lockfile_hex_digest,
555
        user_interpreter_constraints=interpreter_constraints,
556
        interpreter_universe=python_setup.interpreter_versions_universe,
557
        user_requirements=user_requirements if validate_consumed_req_strings else {},
558
        manylinux=resolve_config.manylinux,
559
        requirement_constraints=(
560
            resolve_config.constraints_file.constraints
561
            if resolve_config.constraints_file
562
            else set()
563
        ),
564
        only_binary=resolve_config.only_binary,
565
        no_binary=resolve_config.no_binary,
566
        excludes=resolve_config.excludes,
567
        overrides=resolve_config.overrides,
568
        sources=resolve_config.sources,
569
        lock_style=resolve_config.lock_style,
570
        complete_platforms=resolve_config.complete_platforms,
571
    )
UNCOV
572
    if validation:
×
UNCOV
573
        return
×
574

UNCOV
575
    error_msg_kwargs = dict(
×
576
        metadata=metadata,
577
        validation=validation,
578
        lockfile=lockfile,
579
        is_default_user_lockfile=lockfile.resolve_name == python_setup.default_resolve,
580
        user_interpreter_constraints=interpreter_constraints,
581
        user_requirements=user_requirements,
582
        maybe_constraints_file_path=(
583
            resolve_config.constraints_file.path if resolve_config.constraints_file else None
584
        ),
585
    )
UNCOV
586
    msg_iter = _invalid_lockfile_error(**error_msg_kwargs)  # type: ignore[arg-type]
×
UNCOV
587
    msg = "".join(msg_iter).strip()
×
UNCOV
588
    if python_setup.invalid_lockfile_behavior == InvalidLockfileBehavior.error:
×
UNCOV
589
        raise InvalidLockfileError(msg)
×
UNCOV
590
    logger.warning(msg)
×
591

592

593
def _common_failure_reasons(
2✔
594
    failure_reasons: set[InvalidPythonLockfileReason], maybe_constraints_file_path: str | None
595
) -> Iterator[str]:
UNCOV
596
    if InvalidPythonLockfileReason.CONSTRAINTS_FILE_MISMATCH in failure_reasons:
×
UNCOV
597
        if maybe_constraints_file_path is None:
×
598
            yield softwrap(
×
599
                """
600
                - Constraint file expected from lockfile metadata but no
601
                constraints file configured.  See the option
602
                `[python].resolves_to_constraints_file`.
603
                """
604
            )
605
        else:
UNCOV
606
            yield softwrap(
×
607
                f"""
608
                - The constraints file at {maybe_constraints_file_path} has changed from when the
609
                lockfile was generated. (Constraints files are set via the option
610
                `[python].resolves_to_constraints_file`)
611
                """
612
            )
UNCOV
613
    if InvalidPythonLockfileReason.ONLY_BINARY_MISMATCH in failure_reasons:
×
UNCOV
614
        yield softwrap(
×
615
            """
616
            - The `only_binary` arguments have changed from when the lockfile was generated.
617
            (`only_binary` is set via the options `[python].resolves_to_only_binary` and deprecated
618
            `[python].only_binary`)
619
            """
620
        )
UNCOV
621
    if InvalidPythonLockfileReason.NO_BINARY_MISMATCH in failure_reasons:
×
UNCOV
622
        yield softwrap(
×
623
            """
624
            - The `no_binary` arguments have changed from when the lockfile was generated.
625
            (`no_binary` is set via the options `[python].resolves_to_no_binary` and deprecated
626
            `[python].no_binary`)
627
            """
628
        )
UNCOV
629
    if InvalidPythonLockfileReason.MANYLINUX_MISMATCH in failure_reasons:
×
UNCOV
630
        yield softwrap(
×
631
            """
632
            - The `manylinux` argument has changed from when the lockfile was generated.
633
            (manylinux is set via the option `[python].resolver_manylinux`)
634
            """
635
        )
636

637

638
def _invalid_lockfile_error(
2✔
639
    metadata: PythonLockfileMetadata,
640
    validation: LockfileMetadataValidation,
641
    lockfile: Lockfile,
642
    *,
643
    is_default_user_lockfile: bool,
644
    user_requirements: list[PipRequirement],
645
    user_interpreter_constraints: InterpreterConstraints,
646
    maybe_constraints_file_path: str | None,
647
) -> Iterator[str]:
UNCOV
648
    resolve = lockfile.resolve_name
×
UNCOV
649
    consumed_msg_parts = [f"`{str(r)}`" for r in user_requirements[0:2]]
×
UNCOV
650
    if len(user_requirements) > 2:
×
651
        consumed_msg_parts.append(
×
652
            f"{len(user_requirements) - 2} other "
653
            f"{pluralize(len(user_requirements) - 2, 'requirement', include_count=False)}"
654
        )
655

UNCOV
656
    yield f"\n\nYou are consuming {comma_separated_list(consumed_msg_parts)} from "
×
UNCOV
657
    if lockfile.url.startswith("resource://"):
×
658
        yield f"the built-in `{resolve}` lockfile provided by Pants "
×
659
    else:
UNCOV
660
        yield f"the `{resolve}` lockfile at {lockfile.url} "
×
UNCOV
661
    yield "with incompatible inputs.\n\n"
×
662

UNCOV
663
    if any(
×
664
        i
665
        in (
666
            InvalidPythonLockfileReason.INVALIDATION_DIGEST_MISMATCH,
667
            InvalidPythonLockfileReason.REQUIREMENTS_MISMATCH,
668
        )
669
        for i in validation.failure_reasons
670
    ):
UNCOV
671
        yield (
×
672
            softwrap(
673
                """
674
            - The lockfile does not provide all the necessary requirements. You must
675
            modify the input requirements and/or regenerate the lockfile (see below).
676
            """
677
            )
678
            + "\n\n"
679
        )
UNCOV
680
        if is_default_user_lockfile:
×
UNCOV
681
            yield softwrap(
×
682
                f"""
683
                - The necessary requirements are specified by requirements targets marked with
684
                `resolve="{resolve}"`, or those with no explicit resolve (since `{resolve}` is the
685
                default for this repo).
686

687
                - The lockfile destination is specified by the `{resolve}` key in `[python].resolves`.
688
                """
689
            )
690
        else:
UNCOV
691
            yield softwrap(
×
692
                f"""
693
                - The necessary requirements are specified by requirements targets marked with
694
                `resolve="{resolve}"`.
695

696
                - The lockfile destination is specified by the `{resolve}` key in
697
                `[python].resolves`.
698
                """
699
            )
700

UNCOV
701
        if isinstance(metadata, PythonLockfileMetadataV2):
×
702
            # Note that by the time we have gotten to this error message, we should have already
703
            # validated that the transitive closure is using the same resolve, via
704
            # pex_from_targets.py. This implies that we don't need to worry about users depending
705
            # on python_requirement targets that aren't in that code's resolve.
UNCOV
706
            not_in_lock = sorted(str(r) for r in set(user_requirements) - metadata.requirements)
×
UNCOV
707
            yield f"\n\n- The requirements not provided by the `{resolve}` resolve are:\n  "
×
UNCOV
708
            yield str(not_in_lock)
×
709

UNCOV
710
    if InvalidPythonLockfileReason.INTERPRETER_CONSTRAINTS_MISMATCH in validation.failure_reasons:
×
UNCOV
711
        yield "\n\n"
×
UNCOV
712
        yield softwrap(
×
713
            f"""
714
            - The inputs use interpreter constraints (`{user_interpreter_constraints}`) that
715
            are not a subset of those used to generate the lockfile
716
            (`{metadata.valid_for_interpreter_constraints}`).
717

718
            - The input interpreter constraints are specified by your code, using
719
            the `[python].interpreter_constraints` option and the `interpreter_constraints`
720
            target field.
721

722
            - To create a lockfile with new interpreter constraints, update the option
723
            `[python].resolves_to_interpreter_constraints`, and then generate the lockfile
724
            (see below).
725
            """
726
        )
UNCOV
727
        yield f"\n\nSee {doc_url('docs/python/overview/interpreter-compatibility')} for details."
×
728

UNCOV
729
    yield "\n\n"
×
UNCOV
730
    yield from (
×
731
        f"{fail}\n"
732
        for fail in _common_failure_reasons(validation.failure_reasons, maybe_constraints_file_path)
733
    )
UNCOV
734
    yield "To regenerate your lockfile, "
×
UNCOV
735
    yield f"run `{bin_name()} generate-lockfiles --resolve={resolve}`."
×
UNCOV
736
    yield f"\n\nSee {doc_url('docs/python/overview/third-party-dependencies')} for details.\n\n"
×
737

738

739
def rules():
2✔
740
    return collect_rules()
2✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc