• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 18727405282

22 Oct 2025 07:22PM UTC coverage: 80.276% (+0.01%) from 80.262%
18727405282

push

github

web-flow
Enable scoping indexes for specific projects/platforms when generating lockfiles (#22760)

### Problem
The current `generate-lockfiles` goal only allows one locked version for
each requirement. This leads to issues when using certain libraries like
Pytorch and Tensorflow which have some complexities in the way the
wheels are published and tagged. See
https://github.com/pantsbuild/pants/issues/18965

An example with pytorch: the Linux wheels published to PyPI include cuda
dependencies, while the macos ones do not. There is a `+cpu` tag in the
pytorch-hosted index, _but only for Linux_. The current setup means that
we can't lock a cpu-only version of pytorch that works on both macos and
linux.

### Proposed solution
As of [pex 2.56.0](https://github.com/pex-tool/pex/releases/tag/v2.56.0)
it is possible to create a universal lock with two locked resolves (one
for macos, one for linux) by adding an appropriately scoped index:
```bash
pex3 lock create \
    --style universal \
    --target-system linux \
    --target-system mac \
    --elide-unused-requires-dist \
    --interpreter-constraint "CPython==3.13.*" \
    --index pytorch=https://download.pytorch.org/whl/cpu \
    --source "pytorch=torch; sys_platform != 'darwin'" \
    --indent 2 \
    -o lock.json \
    torch
```
This PR adds the option of specifying sources through a new
`[python.resolves_to_sources]` option. Named indexes can already be
defined in `[python-repos.indexes]`

94 of 96 new or added lines in 6 files covered. (97.92%)

3 existing lines in 1 file now uncovered.

77860 of 96990 relevant lines covered (80.28%)

3.08 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

72.14
/src/python/pants/backend/python/util_rules/pex_requirements.py
1
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3

4
from __future__ import annotations
11✔
5

6
import importlib.resources
11✔
7
import json
11✔
8
import logging
11✔
9
from collections.abc import Iterable, Iterator
11✔
10
from dataclasses import dataclass, field
11✔
11
from typing import TYPE_CHECKING
11✔
12
from urllib.parse import urlparse
11✔
13

14
from pants.backend.python.subsystems.repos import PythonRepos
11✔
15
from pants.backend.python.subsystems.setup import InvalidLockfileBehavior, PythonSetup
11✔
16
from pants.backend.python.target_types import PythonRequirementsField
11✔
17
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
11✔
18
from pants.backend.python.util_rules.lockfile_metadata import (
11✔
19
    InvalidPythonLockfileReason,
20
    PythonLockfileMetadata,
21
    PythonLockfileMetadataV2,
22
)
23
from pants.build_graph.address import Address
11✔
24
from pants.core.util_rules.lockfile_metadata import (
11✔
25
    InvalidLockfileError,
26
    LockfileMetadataValidation,
27
    NoLockfileMetadataBlock,
28
)
29
from pants.engine.engine_aware import EngineAwareParameter
11✔
30
from pants.engine.fs import CreateDigest, Digest, FileContent, GlobMatchErrorBehavior, PathGlobs
11✔
31
from pants.engine.internals.native_engine import IntrinsicError
11✔
32
from pants.engine.intrinsics import (
11✔
33
    create_digest,
34
    get_digest_contents,
35
    get_digest_entries,
36
    path_globs_to_digest,
37
)
38
from pants.engine.rules import collect_rules, concurrently, implicitly, rule
11✔
39
from pants.engine.unions import UnionMembership
11✔
40
from pants.util.docutil import bin_name, doc_url
11✔
41
from pants.util.ordered_set import FrozenOrderedSet
11✔
42
from pants.util.pip_requirement import PipRequirement
11✔
43
from pants.util.requirements import parse_requirements_file
11✔
44
from pants.util.strutil import comma_separated_list, pluralize, softwrap
11✔
45

46
if TYPE_CHECKING:
47
    from pants.backend.python.util_rules.pex import Pex
48

49

50
logger = logging.getLogger(__name__)
11✔
51

52

53
@dataclass(frozen=True)
11✔
54
class Resolve:
11✔
55
    # A named resolve for a "user lockfile".
56
    # Soon to be the only kind of lockfile, as this class will help
57
    # get rid of the "tool lockfile" concept.
58
    # TODO: Once we get rid of old-style tool lockfiles we can possibly
59
    #  unify this with EntireLockfile.
60
    # TODO: We might want to add the requirements subset to this data structure,
61
    #  to further detangle this from PexRequirements.
62
    name: str
11✔
63

64
    use_entire_lockfile: bool
11✔
65

66

67
@dataclass(frozen=True)
11✔
68
class Lockfile:
11✔
69
    url: str
11✔
70
    url_description_of_origin: str
11✔
71
    resolve_name: str
11✔
72
    lockfile_hex_digest: str | None = None
11✔
73

74

75
@rule
11✔
76
async def get_lockfile_for_resolve(resolve: Resolve, python_setup: PythonSetup) -> Lockfile:
11✔
77
    lockfile_path = python_setup.resolves.get(resolve.name)
×
78
    if not lockfile_path:
×
79
        raise ValueError(f"No such resolve: {resolve.name}")
×
80
    return Lockfile(
×
81
        url=lockfile_path,
82
        url_description_of_origin=f"the resolve `{resolve.name}`",
83
        resolve_name=resolve.name,
84
    )
85

86

87
@dataclass(frozen=True)
11✔
88
class LoadedLockfile:
11✔
89
    """A lockfile after loading and header stripping.
90

91
    Validation is deferred until consumption time, because each consumed subset (in the case of a
92
    PEX-native lockfile) can be individually validated.
93
    """
94

95
    # The digest of the loaded lockfile (which may not be identical to the input).
96
    lockfile_digest: Digest
11✔
97
    # The path of the loaded lockfile within the Digest.
98
    lockfile_path: str
11✔
99
    # The loaded metadata for this lockfile, if any.
100
    metadata: PythonLockfileMetadata | None = field(hash=False)
11✔
101
    # An estimate of the number of requirements in this lockfile, to be used as a heuristic for
102
    # available parallelism.
103
    requirement_estimate: int
11✔
104
    # True if the loaded lockfile is in PEX's native format.
105
    is_pex_native: bool
11✔
106
    # If !is_pex_native, the lockfile parsed as constraints strings, for use when the lockfile
107
    # needs to be subsetted (see #15031, ##12222).
108
    as_constraints_strings: FrozenOrderedSet[str] | None
11✔
109
    # The original file or file content (which may not have identical content to the output
110
    # `lockfile_digest`).
111
    original_lockfile: Lockfile
11✔
112

113

114
@dataclass(frozen=True)
11✔
115
class LoadedLockfileRequest:
11✔
116
    """A request to load and validate the content of the given lockfile."""
117

118
    lockfile: Lockfile
11✔
119

120

121
def strip_comments_from_pex_json_lockfile(lockfile_bytes: bytes) -> bytes:
11✔
122
    """Pex does not like the header Pants adds to lockfiles, as it violates JSON.
123

124
    Note that we only strip lines starting with `//`, which is all that Pants will ever add. If
125
    users add their own comments, things will fail.
126
    """
127
    return b"\n".join(
11✔
128
        line for line in lockfile_bytes.splitlines() if not line.lstrip().startswith(b"//")
129
    )
130

131

132
def is_probably_pex_json_lockfile(lockfile_bytes: bytes) -> bool:
11✔
133
    for line in lockfile_bytes.splitlines():
1✔
134
        if line and not line.startswith(b"//"):
1✔
135
            # Note that pip/Pex complain if a requirements.txt style starts with `{`.
136
            return line.lstrip().startswith(b"{")
1✔
137
    return False
1✔
138

139

140
def _pex_lockfile_requirement_count(lockfile_bytes: bytes) -> int:
11✔
141
    # TODO: this is a very naive heuristic that will overcount, and also relies on Pants
142
    #  setting `--indent` when generating lockfiles. More robust would be parsing the JSON
143
    #  and getting the len(locked_resolves.locked_requirements.project_name), but we risk
144
    #  if Pex ever changes its lockfile format.
145

146
    num_lines = len(lockfile_bytes.splitlines())
1✔
147
    # These are very naive estimates, and they bias towards overcounting. For example, requirements
148
    # often are 20+ lines.
149
    num_lines_for_options = 10
1✔
150
    lines_per_req = 10
1✔
151
    return max((num_lines - num_lines_for_options) // lines_per_req, 2)
1✔
152

153

154
def get_metadata(
11✔
155
    python_setup: PythonSetup,
156
    lock_bytes: bytes,
157
    lockfile_path: str | None,
158
    resolve_name: str,
159
    delimiter: str,
160
) -> PythonLockfileMetadata | None:
161
    metadata: PythonLockfileMetadata | None = None
1✔
162
    if python_setup.invalid_lockfile_behavior != InvalidLockfileBehavior.ignore:
1✔
163
        try:
1✔
164
            metadata = PythonLockfileMetadata.from_lockfile(
1✔
165
                lockfile=lock_bytes,
166
                lockfile_path=lockfile_path,
167
                resolve_name=resolve_name,
168
                delimeter=delimiter,
169
            )
170
        except NoLockfileMetadataBlock:
1✔
171
            # We don't validate if the file isn't a pants-generated lockfile (as determined
172
            # by the lack of a metadata block). But we propagate any other type of
173
            # InvalidLockfileError incurred while parsing the metadata block.
174
            logger.debug(
1✔
175
                f"Lockfile for resolve {resolve_name} "
176
                f"{('at ' + lockfile_path) if lockfile_path else ''}"
177
                f" has no metadata block, so was not generated by Pants. "
178
                f"Lockfile will not be validated."
179
            )
180
    return metadata
1✔
181

182

183
async def read_file_or_resource(url: str, description_of_origin: str) -> Digest:
11✔
184
    """Read from a path, file:// or resource:// URL and return the digest of the content.
185

186
    If no content is found at the path/URL, raise.
187
    """
188
    parts = urlparse(url)
×
189
    # urlparse retains the leading / in URLs with a netloc.
190
    path = parts.path[1:] if parts.path.startswith("/") else parts.path
×
191
    if parts.scheme in {"", "file"}:
×
192
        digest = await path_globs_to_digest(
×
193
            PathGlobs(
194
                [path],
195
                glob_match_error_behavior=GlobMatchErrorBehavior.error,
196
                description_of_origin=description_of_origin,
197
            )
198
        )
199
    elif parts.scheme == "resource":
×
200
        _fc = FileContent(
×
201
            path,
202
            # The "netloc" in our made-up "resource://" scheme is the package.
203
            importlib.resources.files(parts.netloc).joinpath(path).read_bytes(),
204
        )
205
        digest = await create_digest(CreateDigest([_fc]))
×
206
    else:
207
        raise ValueError(
×
208
            f"Unsupported scheme {parts.scheme} for URL: {url} (origin: {description_of_origin})"
209
        )
210
    return digest
×
211

212

213
@rule
11✔
214
async def load_lockfile(
11✔
215
    request: LoadedLockfileRequest,
216
    python_setup: PythonSetup,
217
) -> LoadedLockfile:
218
    lockfile = request.lockfile
×
219
    # TODO: This is temporary. Once we regenerate all embedded lockfiles to have sidecar metadata
220
    #  files instead of metadata front matter, we won't need to call get_metadata() on them.
221
    synthetic_lock = lockfile.url.startswith("resource://")
×
222
    lockfile_digest = await read_file_or_resource(lockfile.url, lockfile.url_description_of_origin)
×
223
    lockfile_digest_entries = await get_digest_entries(lockfile_digest)
×
224
    lockfile_path = lockfile_digest_entries[0].path
×
225

226
    lockfile_contents = await get_digest_contents(lockfile_digest)
×
227
    lock_bytes = lockfile_contents[0].content
×
228
    is_pex_native = is_probably_pex_json_lockfile(lock_bytes)
×
229
    constraints_strings = None
×
230

231
    metadata_url = PythonLockfileMetadata.metadata_location_for_lockfile(lockfile.url)
×
232
    metadata = None
×
233
    try:
×
234
        metadata_digest = await read_file_or_resource(
×
235
            metadata_url,
236
            description_of_origin="We squelch errors, so this is never seen by users",
237
        )
238
        digest_contents = await get_digest_contents(metadata_digest)
×
239
        metadata_bytes = digest_contents[0].content
×
240
        json_dict = json.loads(metadata_bytes)
×
241
        metadata = PythonLockfileMetadata.from_json_dict(
×
242
            json_dict,
243
            lockfile_description=f"the lockfile for `{lockfile.resolve_name}`",
244
            error_suffix=softwrap(
245
                f"""
246
                To resolve this error, you will need to regenerate the lockfile by running
247
                `{bin_name()} generate-lockfiles --resolve={lockfile.resolve_name}.
248
                """
249
            ),
250
        )
251
        requirement_estimate = _pex_lockfile_requirement_count(lock_bytes)
×
252
    except (IntrinsicError, FileNotFoundError):
×
253
        # No metadata file or resource found, so fall through to finding a metadata
254
        # header block prepended to the lockfile itself.
255
        pass
×
256

257
    if not metadata:
×
258
        if is_pex_native:
×
259
            header_delimiter = "//"
×
260
            stripped_lock_bytes = strip_comments_from_pex_json_lockfile(lock_bytes)
×
261
            lockfile_digest = await create_digest(
×
262
                CreateDigest([FileContent(lockfile_path, stripped_lock_bytes)])
263
            )
264
            requirement_estimate = _pex_lockfile_requirement_count(lock_bytes)
×
265
        else:
266
            header_delimiter = "#"
×
267
            lock_string = lock_bytes.decode()
×
268
            # Note: this is a very naive heuristic. It will overcount because entries often
269
            # have >1 line due to `--hash`.
270
            requirement_estimate = len(lock_string.splitlines())
×
271
            constraints_strings = FrozenOrderedSet(
×
272
                str(req) for req in parse_requirements_file(lock_string, rel_path=lockfile_path)
273
            )
274

275
        metadata = get_metadata(
×
276
            python_setup,
277
            lock_bytes,
278
            None if synthetic_lock else lockfile_path,
279
            lockfile.resolve_name,
280
            header_delimiter,
281
        )
282

283
    return LoadedLockfile(
×
284
        lockfile_digest,
285
        lockfile_path,
286
        metadata,
287
        requirement_estimate,
288
        is_pex_native,
289
        constraints_strings,
290
        original_lockfile=lockfile,
291
    )
292

293

294
@dataclass(frozen=True)
11✔
295
class EntireLockfile:
11✔
296
    """A request to resolve the entire contents of a lockfile.
297

298
    This resolution mode is used in a few cases:
299
    1. for poetry or handwritten lockfiles (which do not support being natively subsetted the
300
       way that a PEX lockfile can be), in order to build a repository-PEX to subset separately.
301
    2. for tool lockfiles, which (regardless of format), need to resolve the entire lockfile
302
       content anyway.
303
    """
304

305
    lockfile: Lockfile
11✔
306
    # If available, the current complete set of requirement strings that influence this lockfile.
307
    # Used for metadata validation.
308
    complete_req_strings: tuple[str, ...] | None = None
11✔
309

310

311
@dataclass(frozen=True)
11✔
312
class PexRequirements:
11✔
313
    """A request to resolve a series of requirements (optionally from a "superset" resolve)."""
314

315
    req_strings_or_addrs: FrozenOrderedSet[str | Address]
11✔
316
    constraints_strings: FrozenOrderedSet[str]
11✔
317
    # If these requirements should be resolved as a subset of either a repository PEX, or a
318
    # PEX-native lockfile, the superset to use. # NB: Use of a lockfile here asserts that the
319
    # lockfile is PEX-native, because legacy lockfiles do not support subset resolves.
320
    from_superset: Pex | Resolve | None
11✔
321
    description_of_origin: str
11✔
322

323
    def __init__(
11✔
324
        self,
325
        req_strings_or_addrs: Iterable[str | Address] = (),
326
        *,
327
        constraints_strings: Iterable[str] = (),
328
        from_superset: Pex | Resolve | None = None,
329
        description_of_origin: str = "",
330
    ) -> None:
331
        """
332
        :param req_strings_or_addrs: The requirement strings to resolve, or addresses
333
          of targets that refer to them, or string specs of such addresses.
334
        :param constraints_strings: Constraints strings to apply during the resolve.
335
        :param from_superset: An optional superset PEX or lockfile to resolve the req strings from.
336
        :param description_of_origin: A human-readable description of what these requirements
337
          represent, for use in error messages.
338
        """
339
        object.__setattr__(
11✔
340
            self, "req_strings_or_addrs", FrozenOrderedSet(sorted(req_strings_or_addrs))
341
        )
342
        object.__setattr__(
11✔
343
            self, "constraints_strings", FrozenOrderedSet(sorted(constraints_strings))
344
        )
345
        object.__setattr__(self, "from_superset", from_superset)
11✔
346
        object.__setattr__(self, "description_of_origin", description_of_origin)
11✔
347

348
    @classmethod
11✔
349
    def req_strings_from_requirement_fields(
11✔
350
        cls, fields: Iterable[PythonRequirementsField]
351
    ) -> FrozenOrderedSet[str]:
352
        """A convenience when you only need the raw requirement strings from fields and don't need
353
        to consider things like constraints or resolves."""
354
        return FrozenOrderedSet(
×
355
            sorted(str(python_req) for fld in fields for python_req in fld.value)
356
        )
357

358
    def __bool__(self) -> bool:
11✔
359
        return bool(self.req_strings_or_addrs)
×
360

361

362
@dataclass(frozen=True)
11✔
363
class ResolvePexConstraintsFile:
11✔
364
    digest: Digest
11✔
365
    path: str
11✔
366
    constraints: FrozenOrderedSet[PipRequirement]
11✔
367

368

369
@dataclass(frozen=True)
11✔
370
class ResolvePexConfig:
11✔
371
    """Configuration from `[python]` that impacts how the resolve is created."""
372

373
    indexes: tuple[str, ...]
11✔
374
    find_links: tuple[str, ...]
11✔
375
    manylinux: str | None
11✔
376
    constraints_file: ResolvePexConstraintsFile | None
11✔
377
    only_binary: FrozenOrderedSet[str]
11✔
378
    no_binary: FrozenOrderedSet[str]
11✔
379
    excludes: FrozenOrderedSet[str]
11✔
380
    overrides: FrozenOrderedSet[str]
11✔
381
    sources: FrozenOrderedSet[str]
11✔
382
    path_mappings: tuple[str, ...]
11✔
383

384
    def pex_args(self) -> Iterator[str]:
11✔
385
        """Arguments for Pex for indexes/--find-links, manylinux, and path mappings.
386

387
        Does not include arguments for constraints files, which must be set up independently.
388
        """
389
        # NB: In setting `--no-pypi`, we rely on the default value of `[python-repos].indexes`
390
        # including PyPI, which will override `--no-pypi` and result in using PyPI in the default
391
        # case. Why set `--no-pypi`, then? We need to do this so that
392
        # `[python-repos].indexes = ['custom_url']` will only point to that index and not include
393
        # PyPI.
394
        yield "--no-pypi"
2✔
395
        yield from (f"--index={index}" for index in self.indexes)
2✔
396
        yield from (f"--find-links={repo}" for repo in self.find_links)
2✔
397

398
        if self.manylinux:
2✔
399
            yield "--manylinux"
1✔
400
            yield self.manylinux
1✔
401
        else:
402
            yield "--no-manylinux"
2✔
403

404
        # Pex logically plumbs through equivalent settings, but uses a
405
        # separate flag instead of the Pip magic :all:/:none: syntax.  To
406
        # support the exitings Pants config settings we need to go from
407
        # :all:/:none: --> Pex options, which Pex will translate back into Pip
408
        # options.  Note that Pex's --wheel (for example) means "allow
409
        # wheels", not "require wheels".
410
        if self.only_binary and ":all:" in self.only_binary:
2✔
411
            yield "--wheel"
1✔
412
            yield "--no-build"
1✔
413
        elif self.only_binary and ":none:" in self.only_binary:
2✔
414
            yield "--no-wheel"
1✔
415
            yield "--build"
1✔
416
        elif self.only_binary:
2✔
417
            yield from (f"--only-binary={pkg}" for pkg in self.only_binary)
1✔
418

419
        if self.no_binary and ":all:" in self.no_binary:
2✔
420
            yield "--no-wheel"
1✔
421
            yield "--build"
1✔
422
        elif self.no_binary and ":none:" in self.no_binary:
2✔
423
            yield "--wheel"
1✔
424
            yield "--no-build"
1✔
425
        elif self.no_binary:
2✔
426
            yield from (f"--only-build={pkg}" for pkg in self.no_binary)
1✔
427

428
        yield from (f"--path-mapping={v}" for v in self.path_mappings)
2✔
429

430
        yield from (f"--exclude={exclude}" for exclude in self.excludes)
2✔
431
        yield from (f"--override={override}" for override in self.overrides)
2✔
432
        yield from (f"--source={source}" for source in self.sources)
2✔
433

434

435
@dataclass(frozen=True)
11✔
436
class ResolvePexConfigRequest(EngineAwareParameter):
11✔
437
    """Find all configuration from `[python]` that impacts how the resolve is created.
438

439
    If `resolve_name` is None, then most per-resolve options will be ignored because there is no way
440
    for users to configure them. However, some options like `[python-repos].indexes` will still be
441
    loaded.
442
    """
443

444
    resolve_name: str | None
11✔
445

446
    def debug_hint(self) -> str:
11✔
447
        return self.resolve_name or "<no resolve>"
×
448

449

450
@rule
11✔
451
async def determine_resolve_pex_config(
11✔
452
    request: ResolvePexConfigRequest,
453
    python_setup: PythonSetup,
454
    python_repos: PythonRepos,
455
    union_membership: UnionMembership,
456
) -> ResolvePexConfig:
457
    if request.resolve_name is None:
×
458
        return ResolvePexConfig(
×
459
            indexes=python_repos.indexes,
460
            find_links=python_repos.find_links,
461
            manylinux=python_setup.manylinux,
462
            constraints_file=None,
463
            no_binary=FrozenOrderedSet(),
464
            only_binary=FrozenOrderedSet(),
465
            excludes=FrozenOrderedSet(),
466
            overrides=FrozenOrderedSet(),
467
            sources=FrozenOrderedSet(),
468
            path_mappings=python_repos.path_mappings,
469
        )
470

471
    no_binary = python_setup.resolves_to_no_binary().get(request.resolve_name) or []
×
472
    only_binary = python_setup.resolves_to_only_binary().get(request.resolve_name) or []
×
473
    excludes = python_setup.resolves_to_excludes().get(request.resolve_name) or []
×
474
    overrides = python_setup.resolves_to_overrides().get(request.resolve_name) or []
×
NEW
475
    sources = python_setup.resolves_to_sources().get(request.resolve_name) or []
×
476

477
    constraints_file: ResolvePexConstraintsFile | None = None
×
478
    _constraints_file_path = python_setup.resolves_to_constraints_file().get(request.resolve_name)
×
479
    if _constraints_file_path:
×
480
        _constraints_origin = softwrap(
×
481
            f"""
482
            the option `[python].resolves_to_constraints_file` for the resolve
483
            '{request.resolve_name}'
484
            """
485
        )
486
        _constraints_path_globs = PathGlobs(
×
487
            [_constraints_file_path] if _constraints_file_path else [],
488
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
489
            description_of_origin=_constraints_origin,
490
        )
491
        # TODO: Probably re-doing work here - instead of just calling one, then the next
492
        _constraints_digest, _constraints_digest_contents = await concurrently(
×
493
            path_globs_to_digest(_constraints_path_globs),
494
            get_digest_contents(**implicitly({_constraints_path_globs: PathGlobs})),
495
        )
496

497
        if len(_constraints_digest_contents) != 1:
×
498
            raise ValueError(
×
499
                softwrap(
500
                    f"""
501
                    Expected only one file from {_constraints_origin}, but matched:
502
                    {sorted(fc.path for fc in _constraints_digest_contents)}
503

504
                    Did you use a glob like `*`?
505
                    """
506
                )
507
            )
508
        _constraints_file_content = next(iter(_constraints_digest_contents))
×
509
        constraints = parse_requirements_file(
×
510
            _constraints_file_content.content.decode("utf-8"), rel_path=_constraints_file_path
511
        )
512
        constraints_file = ResolvePexConstraintsFile(
×
513
            _constraints_digest, _constraints_file_path, FrozenOrderedSet(constraints)
514
        )
515

516
    return ResolvePexConfig(
×
517
        indexes=python_repos.indexes,
518
        find_links=python_repos.find_links,
519
        manylinux=python_setup.manylinux,
520
        constraints_file=constraints_file,
521
        no_binary=FrozenOrderedSet(no_binary),
522
        only_binary=FrozenOrderedSet(only_binary),
523
        excludes=FrozenOrderedSet(excludes),
524
        overrides=FrozenOrderedSet(overrides),
525
        sources=FrozenOrderedSet(sources),
526
        path_mappings=python_repos.path_mappings,
527
    )
528

529

530
def validate_metadata(
11✔
531
    metadata: PythonLockfileMetadata,
532
    interpreter_constraints: InterpreterConstraints,
533
    lockfile: Lockfile,
534
    consumed_req_strings: Iterable[str],
535
    validate_consumed_req_strings: bool,
536
    python_setup: PythonSetup,
537
    resolve_config: ResolvePexConfig,
538
) -> None:
539
    """Given interpreter constraints and requirements to be consumed, validate lockfile metadata."""
540

541
    # TODO(#12314): Improve the exception if invalid strings
542
    user_requirements = [PipRequirement.parse(i) for i in consumed_req_strings]
1✔
543
    validation = metadata.is_valid_for(
1✔
544
        expected_invalidation_digest=lockfile.lockfile_hex_digest,
545
        user_interpreter_constraints=interpreter_constraints,
546
        interpreter_universe=python_setup.interpreter_versions_universe,
547
        user_requirements=user_requirements if validate_consumed_req_strings else {},
548
        manylinux=resolve_config.manylinux,
549
        requirement_constraints=(
550
            resolve_config.constraints_file.constraints
551
            if resolve_config.constraints_file
552
            else set()
553
        ),
554
        only_binary=resolve_config.only_binary,
555
        no_binary=resolve_config.no_binary,
556
        excludes=resolve_config.excludes,
557
        overrides=resolve_config.overrides,
558
        sources=resolve_config.sources,
559
    )
560
    if validation:
1✔
561
        return
×
562

563
    error_msg_kwargs = dict(
1✔
564
        metadata=metadata,
565
        validation=validation,
566
        lockfile=lockfile,
567
        is_default_user_lockfile=lockfile.resolve_name == python_setup.default_resolve,
568
        user_interpreter_constraints=interpreter_constraints,
569
        user_requirements=user_requirements,
570
        maybe_constraints_file_path=(
571
            resolve_config.constraints_file.path if resolve_config.constraints_file else None
572
        ),
573
    )
574
    msg_iter = _invalid_lockfile_error(**error_msg_kwargs)  # type: ignore[arg-type]
1✔
575
    msg = "".join(msg_iter).strip()
1✔
576
    if python_setup.invalid_lockfile_behavior == InvalidLockfileBehavior.error:
1✔
577
        raise InvalidLockfileError(msg)
×
578
    logger.warning(msg)
1✔
579

580

581
def _common_failure_reasons(
11✔
582
    failure_reasons: set[InvalidPythonLockfileReason], maybe_constraints_file_path: str | None
583
) -> Iterator[str]:
584
    if InvalidPythonLockfileReason.CONSTRAINTS_FILE_MISMATCH in failure_reasons:
1✔
585
        if maybe_constraints_file_path is None:
1✔
586
            yield softwrap(
×
587
                """
588
                - Constraint file expected from lockfile metadata but no
589
                constraints file configured.  See the option
590
                `[python].resolves_to_constraints_file`.
591
                """
592
            )
593
        else:
594
            yield softwrap(
1✔
595
                f"""
596
                - The constraints file at {maybe_constraints_file_path} has changed from when the
597
                lockfile was generated. (Constraints files are set via the option
598
                `[python].resolves_to_constraints_file`)
599
                """
600
            )
601
    if InvalidPythonLockfileReason.ONLY_BINARY_MISMATCH in failure_reasons:
1✔
602
        yield softwrap(
1✔
603
            """
604
            - The `only_binary` arguments have changed from when the lockfile was generated.
605
            (`only_binary` is set via the options `[python].resolves_to_only_binary` and deprecated
606
            `[python].only_binary`)
607
            """
608
        )
609
    if InvalidPythonLockfileReason.NO_BINARY_MISMATCH in failure_reasons:
1✔
610
        yield softwrap(
1✔
611
            """
612
            - The `no_binary` arguments have changed from when the lockfile was generated.
613
            (`no_binary` is set via the options `[python].resolves_to_no_binary` and deprecated
614
            `[python].no_binary`)
615
            """
616
        )
617
    if InvalidPythonLockfileReason.MANYLINUX_MISMATCH in failure_reasons:
1✔
618
        yield softwrap(
1✔
619
            """
620
            - The `manylinux` argument has changed from when the lockfile was generated.
621
            (manylinux is set via the option `[python].resolver_manylinux`)
622
            """
623
        )
624

625

626
def _invalid_lockfile_error(
11✔
627
    metadata: PythonLockfileMetadata,
628
    validation: LockfileMetadataValidation,
629
    lockfile: Lockfile,
630
    *,
631
    is_default_user_lockfile: bool,
632
    user_requirements: list[PipRequirement],
633
    user_interpreter_constraints: InterpreterConstraints,
634
    maybe_constraints_file_path: str | None,
635
) -> Iterator[str]:
636
    resolve = lockfile.resolve_name
1✔
637
    consumed_msg_parts = [f"`{str(r)}`" for r in user_requirements[0:2]]
1✔
638
    if len(user_requirements) > 2:
1✔
639
        consumed_msg_parts.append(
×
640
            f"{len(user_requirements) - 2} other "
641
            f"{pluralize(len(user_requirements) - 2, 'requirement', include_count=False)}"
642
        )
643

644
    yield f"\n\nYou are consuming {comma_separated_list(consumed_msg_parts)} from "
1✔
645
    if lockfile.url.startswith("resource://"):
1✔
646
        yield f"the built-in `{resolve}` lockfile provided by Pants "
×
647
    else:
648
        yield f"the `{resolve}` lockfile at {lockfile.url} "
1✔
649
    yield "with incompatible inputs.\n\n"
1✔
650

651
    if any(
1✔
652
        i
653
        in (
654
            InvalidPythonLockfileReason.INVALIDATION_DIGEST_MISMATCH,
655
            InvalidPythonLockfileReason.REQUIREMENTS_MISMATCH,
656
        )
657
        for i in validation.failure_reasons
658
    ):
659
        yield (
1✔
660
            softwrap(
661
                """
662
            - The lockfile does not provide all the necessary requirements. You must
663
            modify the input requirements and/or regenerate the lockfile (see below).
664
            """
665
            )
666
            + "\n\n"
667
        )
668
        if is_default_user_lockfile:
1✔
669
            yield softwrap(
1✔
670
                f"""
671
                - The necessary requirements are specified by requirements targets marked with
672
                `resolve="{resolve}"`, or those with no explicit resolve (since `{resolve}` is the
673
                default for this repo).
674

675
                - The lockfile destination is specified by the `{resolve}` key in `[python].resolves`.
676
                """
677
            )
678
        else:
679
            yield softwrap(
×
680
                f"""
681
                - The necessary requirements are specified by requirements targets marked with
682
                `resolve="{resolve}"`.
683

684
                - The lockfile destination is specified by the `{resolve}` key in
685
                `[python].resolves`.
686
                """
687
            )
688

689
        if isinstance(metadata, PythonLockfileMetadataV2):
1✔
690
            # Note that by the time we have gotten to this error message, we should have already
691
            # validated that the transitive closure is using the same resolve, via
692
            # pex_from_targets.py. This implies that we don't need to worry about users depending
693
            # on python_requirement targets that aren't in that code's resolve.
694
            not_in_lock = sorted(str(r) for r in set(user_requirements) - metadata.requirements)
1✔
695
            yield f"\n\n- The requirements not provided by the `{resolve}` resolve are:\n  "
1✔
696
            yield str(not_in_lock)
1✔
697

698
    if InvalidPythonLockfileReason.INTERPRETER_CONSTRAINTS_MISMATCH in validation.failure_reasons:
1✔
699
        yield "\n\n"
1✔
700
        yield softwrap(
1✔
701
            f"""
702
            - The inputs use interpreter constraints (`{user_interpreter_constraints}`) that
703
            are not a subset of those used to generate the lockfile
704
            (`{metadata.valid_for_interpreter_constraints}`).
705

706
            - The input interpreter constraints are specified by your code, using
707
            the `[python].interpreter_constraints` option and the `interpreter_constraints`
708
            target field.
709

710
            - To create a lockfile with new interpreter constraints, update the option
711
            `[python].resolves_to_interpreter_constraints`, and then generate the lockfile
712
            (see below).
713
            """
714
        )
715
        yield f"\n\nSee {doc_url('docs/python/overview/interpreter-compatibility')} for details."
1✔
716

717
    yield "\n\n"
1✔
718
    yield from (
1✔
719
        f"{fail}\n"
720
        for fail in _common_failure_reasons(validation.failure_reasons, maybe_constraints_file_path)
721
    )
722
    yield "To regenerate your lockfile, "
1✔
723
    yield f"run `{bin_name()} generate-lockfiles --resolve={resolve}`."
1✔
724
    yield f"\n\nSee {doc_url('docs/python/overview/third-party-dependencies')} for details.\n\n"
1✔
725

726

727
def rules():
11✔
728
    return collect_rules()
11✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc