• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 19015773527

02 Nov 2025 05:33PM UTC coverage: 17.872% (-62.4%) from 80.3%
19015773527

Pull #22816

github

web-flow
Merge a12d75757 into 6c024e162
Pull Request #22816: Update Pants internal Python to 3.14

4 of 5 new or added lines in 3 files covered. (80.0%)

28452 existing lines in 683 files now uncovered.

9831 of 55007 relevant lines covered (17.87%)

0.18 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

40.25
/src/python/pants/backend/python/util_rules/interpreter_constraints.py
1
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3

4
from __future__ import annotations
1✔
5

6
import itertools
1✔
7
import logging
1✔
8
import re
1✔
9
from collections import defaultdict
1✔
10
from collections.abc import Iterable, Iterator, Sequence
1✔
11
from typing import Protocol, TypeVar
1✔
12

13
from packaging.requirements import InvalidRequirement, Requirement
1✔
14

15
from pants.backend.python.subsystems.setup import PythonSetup
1✔
16
from pants.backend.python.target_types import InterpreterConstraintsField, PythonResolveField
1✔
17
from pants.build_graph.address import Address
1✔
18
from pants.engine.engine_aware import EngineAwareParameter
1✔
19
from pants.engine.target import Target
1✔
20
from pants.util.docutil import bin_name
1✔
21
from pants.util.frozendict import FrozenDict
1✔
22
from pants.util.memo import memoized
1✔
23
from pants.util.ordered_set import FrozenOrderedSet, OrderedSet
1✔
24
from pants.util.strutil import softwrap
1✔
25

26
logger = logging.getLogger(__name__)
1✔
27

28

29
# This protocol allows us to work with any arbitrary FieldSet. See
30
# https://mypy.readthedocs.io/en/stable/protocols.html.
31
class FieldSetWithInterpreterConstraints(Protocol):
1✔
32
    @property
33
    def address(self) -> Address: ...
34

35
    @property
36
    def interpreter_constraints(self) -> InterpreterConstraintsField: ...
37

38
    @property
39
    def resolve(self) -> PythonResolveField: ...
40

41

42
_FS = TypeVar("_FS", bound=FieldSetWithInterpreterConstraints)
1✔
43

44

45
RawConstraints = tuple[str, ...]
1✔
46

47

48
# The current maxes are 2.7.18 and 3.6.15.  We go much higher, for safety.
49
_PATCH_VERSION_UPPER_BOUND = 30
1✔
50

51

52
@memoized
1✔
53
def interpreter_constraints_contains(
1✔
54
    a: RawConstraints, b: RawConstraints, interpreter_universe: tuple[str, ...]
55
) -> bool:
56
    """A memoized version of `InterpreterConstraints.contains`.
57

58
    This is a function in order to keep the memoization cache on the module rather than on an
59
    instance. It can't go on `PythonSetup`, since that would cause a cycle with this module.
60
    """
61
    return InterpreterConstraints(a).contains(InterpreterConstraints(b), interpreter_universe)
×
62

63

64
@memoized
1✔
65
def parse_constraint(constraint: str) -> Requirement:
1✔
66
    """Parse an interpreter constraint, e.g., CPython>=2.7,<3.
67

68
    We allow shorthand such as `>=3.7`, which gets expanded to `CPython>=3.7`. See Pex's
69
    interpreter.py's `parse_requirement()`.
70
    """
71
    try:
1✔
72
        parsed_requirement = Requirement(constraint)
1✔
UNCOV
73
    except InvalidRequirement as err:
×
UNCOV
74
        try:
×
UNCOV
75
            parsed_requirement = Requirement(f"CPython{constraint}")
×
UNCOV
76
        except InvalidRequirement:
×
UNCOV
77
            raise InvalidRequirement(
×
78
                f"Failed to parse Python interpreter constraint `{constraint}`: {err}"
79
            )
80

81
    return parsed_requirement
1✔
82

83

84
# Normally we would subclass `DeduplicatedCollection`, but we want a custom constructor.
85
class InterpreterConstraints(FrozenOrderedSet[Requirement], EngineAwareParameter):
1✔
86
    @classmethod
1✔
87
    def for_fixed_python_version(
1✔
88
        cls, python_version_str: str, interpreter_type: str = "CPython"
89
    ) -> InterpreterConstraints:
UNCOV
90
        return cls([f"{interpreter_type}=={python_version_str}"])
×
91

92
    def __init__(self, constraints: Iterable[str | Requirement] = ()) -> None:
1✔
93
        # #12578 `parse_constraint` will sort the requirement's component constraints into a stable form.
94
        # We need to sort the component constraints for each requirement _before_ sorting the entire list
95
        # for the ordering to be correct.
96
        parsed_constraints = (
1✔
97
            i if isinstance(i, Requirement) else parse_constraint(i) for i in constraints
98
        )
99
        super().__init__(sorted(parsed_constraints, key=lambda c: str(c)))
1✔
100

101
    def __str__(self) -> str:
1✔
UNCOV
102
        return " OR ".join(str(constraint) for constraint in self)
×
103

104
    def debug_hint(self) -> str:
1✔
105
        return str(self)
×
106

107
    @property
1✔
108
    def description(self) -> str:
1✔
UNCOV
109
        return str(sorted(str(c) for c in self))
×
110

111
    @classmethod
1✔
112
    def merge(cls, ics: Iterable[InterpreterConstraints]) -> InterpreterConstraints:
1✔
113
        return InterpreterConstraints(
×
114
            cls.merge_constraint_sets(tuple(str(requirement) for requirement in ic) for ic in ics)
115
        )
116

117
    @classmethod
1✔
118
    def merge_constraint_sets(
1✔
119
        cls, constraint_sets: Iterable[Iterable[str]]
120
    ) -> frozenset[Requirement]:
121
        """Given a collection of constraints sets, merge by ORing within each individual constraint
122
        set and ANDing across each distinct constraint set.
123

124
        For example, given `[["CPython>=2.7", "CPython<=3"], ["CPython==3.6.*"]]`, return
125
        `["CPython>=2.7,==3.6.*", "CPython<=3,==3.6.*"]`.
126
        """
127
        # A sentinel to indicate a requirement that is impossible to satisfy (i.e., one that
128
        # requires two different interpreter types).
UNCOV
129
        impossible = parse_constraint("IMPOSSIBLE")
×
130

131
        # Each element (a Set[ParsedConstraint]) will get ANDed. We use sets to deduplicate
132
        # identical top-level parsed constraint sets.
133

134
        # First filter out any empty constraint_sets, as those represent "no constraints", i.e.,
135
        # any interpreters are allowed, so omitting them has the logical effect of ANDing them with
136
        # the others, without having to deal with the vacuous case below.
UNCOV
137
        constraint_sets = [cs for cs in constraint_sets if cs]
×
UNCOV
138
        if not constraint_sets:
×
UNCOV
139
            return frozenset()
×
140

UNCOV
141
        parsed_constraint_sets: set[frozenset[Requirement]] = set()
×
UNCOV
142
        for constraint_set in constraint_sets:
×
143
            # Each element (a ParsedConstraint) will get ORed.
UNCOV
144
            parsed_constraint_set = frozenset(
×
145
                parse_constraint(constraint) for constraint in constraint_set
146
            )
UNCOV
147
            parsed_constraint_sets.add(parsed_constraint_set)
×
148

UNCOV
149
        if len(parsed_constraint_sets) == 1:
×
UNCOV
150
            return next(iter(parsed_constraint_sets))
×
151

UNCOV
152
        def and_constraints(parsed_requirements: Sequence[Requirement]) -> Requirement:
×
UNCOV
153
            assert len(parsed_requirements) > 0, "At least one `Requirement` must be supplied."
×
UNCOV
154
            expected_name = parsed_requirements[0].name
×
UNCOV
155
            current_requirement_specifier = parsed_requirements[0].specifier
×
UNCOV
156
            for requirement in parsed_requirements[1:]:
×
UNCOV
157
                if requirement.name != expected_name:
×
UNCOV
158
                    return impossible
×
UNCOV
159
                current_requirement_specifier &= requirement.specifier
×
UNCOV
160
            return Requirement(f"{expected_name}{current_requirement_specifier}")
×
161

UNCOV
162
        ored_constraints = (
×
163
            and_constraints(constraints_product)
164
            for constraints_product in itertools.product(*parsed_constraint_sets)
165
        )
UNCOV
166
        ret = frozenset(cs for cs in ored_constraints if cs != impossible)
×
UNCOV
167
        if not ret:
×
168
            # There are no possible combinations.
UNCOV
169
            attempted_str = " AND ".join(f"({' OR '.join(cs)})" for cs in constraint_sets)
×
UNCOV
170
            raise ValueError(
×
171
                softwrap(
172
                    f"""
173
                    These interpreter constraints cannot be merged, as they require
174
                    conflicting interpreter types: {attempted_str}
175
                    """
176
                )
177
            )
UNCOV
178
        return ret
×
179

180
    @classmethod
1✔
181
    def create_from_targets(
1✔
182
        cls, targets: Iterable[Target], python_setup: PythonSetup
183
    ) -> InterpreterConstraints | None:
184
        """Returns merged InterpreterConstraints for the given Targets.
185

186
        If none of the given Targets have InterpreterConstraintsField, returns None.
187

188
        NB: Because Python targets validate that they have ICs which are a subset of their
189
        dependencies, merging constraints like this is only necessary when you are _mixing_ code
190
        which might not have any interdependencies, such as when you're merging unrelated roots.
191
        """
192
        fields = [
×
193
            (
194
                tgt[InterpreterConstraintsField],
195
                tgt[PythonResolveField] if tgt.has_field(PythonResolveField) else None,
196
            )
197
            for tgt in targets
198
            if tgt.has_field(InterpreterConstraintsField)
199
        ]
200
        if not fields:
×
201
            return None
×
202
        return cls.create_from_compatibility_fields(fields, python_setup)
×
203

204
    @classmethod
1✔
205
    def create_from_field_sets(
1✔
206
        cls, fs: Iterable[_FS], python_setup: PythonSetup
207
    ) -> InterpreterConstraints:
UNCOV
208
        return cls.create_from_compatibility_fields(
×
209
            [(field_set.interpreter_constraints, field_set.resolve) for field_set in fs],
210
            python_setup,
211
        )
212

213
    @classmethod
1✔
214
    def create_from_compatibility_fields(
1✔
215
        cls,
216
        fields: Iterable[tuple[InterpreterConstraintsField, PythonResolveField | None]],
217
        python_setup: PythonSetup,
218
    ) -> InterpreterConstraints:
219
        """Returns merged InterpreterConstraints for the given `InterpreterConstraintsField`s.
220

221
        NB: Because Python targets validate that they have ICs which are a subset of their
222
        dependencies, merging constraints like this is only necessary when you are _mixing_ code
223
        which might not have any inter-dependencies, such as when you're merging un-related roots.
224
        """
UNCOV
225
        constraint_sets = {
×
226
            ics.value_or_configured_default(python_setup, resolve) for ics, resolve in fields
227
        }
228
        # This will OR within each field and AND across fields.
UNCOV
229
        merged_constraints = cls.merge_constraint_sets(constraint_sets)
×
UNCOV
230
        return InterpreterConstraints(merged_constraints)
×
231

232
    @classmethod
1✔
233
    def group_field_sets_by_constraints(
1✔
234
        cls, field_sets: Iterable[_FS], python_setup: PythonSetup
235
    ) -> FrozenDict[InterpreterConstraints, tuple[_FS, ...]]:
UNCOV
236
        results = defaultdict(set)
×
UNCOV
237
        for fs in field_sets:
×
UNCOV
238
            constraints = cls.create_from_compatibility_fields(
×
239
                [(fs.interpreter_constraints, fs.resolve)], python_setup
240
            )
UNCOV
241
            results[constraints].add(fs)
×
UNCOV
242
        return FrozenDict(
×
243
            {
244
                constraints: tuple(sorted(field_sets, key=lambda fs: fs.address))
245
                for constraints, field_sets in sorted(results.items())
246
            }
247
        )
248

249
    def generate_pex_arg_list(self) -> list[str]:
1✔
UNCOV
250
        args = []
×
UNCOV
251
        for constraint in self:
×
UNCOV
252
            args.extend(["--interpreter-constraint", str(constraint)])
×
UNCOV
253
        return args
×
254

255
    def _valid_patch_versions(self, major: int, minor: int) -> Iterator[int]:
1✔
256
        for p in range(0, _PATCH_VERSION_UPPER_BOUND + 1):
1✔
257
            for req in self:
1✔
258
                if req.specifier.contains(f"{major}.{minor}.{p}"):
1✔
259
                    yield p
1✔
260

261
    def _includes_version(self, major: int, minor: int) -> bool:
1✔
UNCOV
262
        return any(True for _ in self._valid_patch_versions(major, minor))
×
263

264
    def includes_python2(self) -> bool:
1✔
265
        """Checks if any of the constraints include Python 2.
266

267
        This will return True even if the code works with Python 3 too, so long as at least one of
268
        the constraints works with Python 2.
269
        """
UNCOV
270
        return self._includes_version(2, 7)
×
271

272
    def minimum_python_version(self, interpreter_universe: Iterable[str]) -> str | None:
1✔
273
        """Find the lowest major.minor Python version that will work with these constraints.
274

275
        The constraints may also be compatible with later versions; this is the lowest version that
276
        still works.
277
        """
UNCOV
278
        for major, minor in sorted(_major_minor_to_int(s) for s in interpreter_universe):
×
UNCOV
279
            if self._includes_version(major, minor):
×
UNCOV
280
                return f"{major}.{minor}"
×
UNCOV
281
        return None
×
282

283
    def snap_to_minimum(self, interpreter_universe: Iterable[str]) -> InterpreterConstraints | None:
1✔
284
        """Snap to the lowest Python major.minor version that works with these constraints.
285

286
        Will exclude patch versions that are expressly incompatible.
287
        """
UNCOV
288
        for major, minor in sorted(_major_minor_to_int(s) for s in interpreter_universe):
×
UNCOV
289
            for p in range(0, _PATCH_VERSION_UPPER_BOUND + 1):
×
UNCOV
290
                for req in self:
×
UNCOV
291
                    if req.specifier.contains(f"{major}.{minor}.{p}"):
×
292
                        # We've found the minimum major.minor that is compatible.
UNCOV
293
                        req_strs = [f"{req.name}=={major}.{minor}.*"]
×
294
                        # Now find any patches within that major.minor that we must exclude.
UNCOV
295
                        invalid_patches = sorted(
×
296
                            set(range(0, _PATCH_VERSION_UPPER_BOUND + 1))
297
                            - set(self._valid_patch_versions(major, minor))
298
                        )
UNCOV
299
                        req_strs.extend(f"!={major}.{minor}.{p}" for p in invalid_patches)
×
UNCOV
300
                        req_str = ",".join(req_strs)
×
UNCOV
301
                        snapped = parse_constraint(req_str)
×
UNCOV
302
                        return InterpreterConstraints([snapped])
×
UNCOV
303
        return None
×
304

305
    def _requires_python3_version_or_newer(
1✔
306
        self, *, allowed_versions: Iterable[str], prior_version: str
307
    ) -> bool:
UNCOV
308
        if not self:
×
UNCOV
309
            return False
×
UNCOV
310
        patch_versions = list(reversed(range(0, _PATCH_VERSION_UPPER_BOUND)))
×
311
        # We only look at the prior Python release. For example, consider Python 3.8+
312
        # looking at 3.7. If using something like `>=3.5`, Py37 will be included.
313
        # `==3.6.*,!=3.7.*,==3.8.*` is unlikely, and even that will work correctly as
314
        # it's an invalid constraint so setuptools returns False always. `['==2.7.*', '==3.8.*']`
315
        # will fail because not every single constraint is exclusively 3.8.
UNCOV
316
        prior_versions = [f"{prior_version}.{p}" for p in patch_versions]
×
UNCOV
317
        allowed_versions = [
×
318
            f"{major_minor}.{p}" for major_minor in allowed_versions for p in patch_versions
319
        ]
320

UNCOV
321
        def valid_constraint(constraint: Requirement) -> bool:
×
UNCOV
322
            if any(constraint.specifier.contains(prior) for prior in prior_versions):
×
UNCOV
323
                return False
×
UNCOV
324
            if not any(constraint.specifier.contains(allowed) for allowed in allowed_versions):
×
UNCOV
325
                return False
×
UNCOV
326
            return True
×
327

UNCOV
328
        return all(valid_constraint(c) for c in self)
×
329

330
    def requires_python38_or_newer(self, interpreter_universe: Iterable[str]) -> bool:
1✔
331
        """Checks if the constraints are all for Python 3.8+.
332

333
        This will return False if Python 3.8 is allowed, but prior versions like 3.7 are also
334
        allowed.
335
        """
UNCOV
336
        py38_and_later = [
×
337
            interp for interp in interpreter_universe if _major_minor_to_int(interp) >= (3, 8)
338
        ]
UNCOV
339
        return self._requires_python3_version_or_newer(
×
340
            allowed_versions=py38_and_later, prior_version="3.7"
341
        )
342

343
    def to_poetry_constraint(self) -> str:
1✔
UNCOV
344
        specifiers = []
×
UNCOV
345
        wildcard_encountered = False
×
UNCOV
346
        for constraint in self:
×
UNCOV
347
            specifier = str(constraint.specifier)
×
UNCOV
348
            if specifier:
×
UNCOV
349
                specifiers.append(specifier)
×
350
            else:
UNCOV
351
                wildcard_encountered = True
×
UNCOV
352
        if not specifiers or wildcard_encountered:
×
UNCOV
353
            return "*"
×
UNCOV
354
        return " || ".join(specifiers)
×
355

356
    def enumerate_python_versions(
1✔
357
        self, interpreter_universe: Iterable[str]
358
    ) -> FrozenOrderedSet[tuple[int, int, int]]:
359
        """Return a set of all plausible (major, minor, patch) tuples for all Python 2.7/3.x in the
360
        specified interpreter universe that matches this set of interpreter constraints.
361

362
        This also validates our assumptions around the `interpreter_universe`:
363

364
        - Python 2.7 is the only Python 2 version in the universe, if at all.
365
        - Python 3 is the last major release of Python, which the core devs have committed to in
366
          public several times.
367
        """
368
        if not self:
1✔
UNCOV
369
            return FrozenOrderedSet()
×
370

371
        minors = []
1✔
372
        for major_minor in interpreter_universe:
1✔
373
            major, minor = _major_minor_to_int(major_minor)
1✔
374
            if major == 2:
1✔
375
                if minor != 7:
1✔
UNCOV
376
                    raise AssertionError(
×
377
                        softwrap(
378
                            f"""
379
                            Unexpected value in `[python].interpreter_versions_universe`:
380
                            {major_minor}. Expected the only Python 2 value to be '2.7', given that
381
                            all other versions are unmaintained or do not exist.
382
                            """
383
                        )
384
                    )
385
                minors.append((2, minor))
1✔
386
            elif major == 3:
1✔
387
                minors.append((3, minor))
1✔
388
            else:
UNCOV
389
                raise AssertionError(
×
390
                    softwrap(
391
                        f"""
392
                        Unexpected value in `[python].interpreter_versions_universe`:
393
                        {major_minor}. Expected to only include '2.7' and/or Python 3 versions,
394
                        given that Python 3 will be the last major Python version. Please open an
395
                        issue at https://github.com/pantsbuild/pants/issues/new if this is no longer
396
                        true.
397
                        """
398
                    )
399
                )
400

401
        valid_patches = FrozenOrderedSet(
1✔
402
            (major, minor, patch)
403
            for (major, minor) in sorted(minors)
404
            for patch in self._valid_patch_versions(major, minor)
405
        )
406

407
        if not valid_patches:
1✔
UNCOV
408
            raise ValueError(
×
409
                softwrap(
410
                    f"""
411
                    The interpreter constraints `{self}` are not compatible with any of the
412
                    interpreter versions from `[python].interpreter_versions_universe`.
413

414
                    Please either change these interpreter constraints or update the
415
                    `interpreter_versions_universe` to include the interpreters set in these
416
                    constraints. Run `{bin_name()} help-advanced python` for more information on the
417
                    `interpreter_versions_universe` option.
418
                    """
419
                )
420
            )
421

422
        return valid_patches
1✔
423

424
    def contains(self, other: InterpreterConstraints, interpreter_universe: Iterable[str]) -> bool:
1✔
425
        """Returns True if the `InterpreterConstraints` specified in `other` is a subset of these
426
        `InterpreterConstraints`.
427

428
        This is restricted to the set of minor Python versions specified in `universe`.
429
        """
UNCOV
430
        if self == other:
×
UNCOV
431
            return True
×
UNCOV
432
        this = self.enumerate_python_versions(interpreter_universe)
×
UNCOV
433
        that = other.enumerate_python_versions(interpreter_universe)
×
UNCOV
434
        return this.issuperset(that)
×
435

436
    def partition_into_major_minor_versions(
1✔
437
        self, interpreter_universe: Iterable[str]
438
    ) -> tuple[str, ...]:
439
        """Return all the valid major.minor versions, e.g. `('2.7', '3.6')`."""
440
        result: OrderedSet[str] = OrderedSet()
1✔
441
        for major, minor, _ in self.enumerate_python_versions(interpreter_universe):
1✔
442
            result.add(f"{major}.{minor}")
1✔
443
        return tuple(result)
1✔
444

445
    def major_minor_version_when_single_and_entire(self) -> None | tuple[int, int]:
1✔
446
        """Returns the (major, minor) version that these constraints cover, if they cover all of
447
        exactly one major minor version, without rules about patch versions.
448

449
        This is a best effort function, e.g. for using during inference that can be overridden.
450

451
        Examples:
452

453
        All of these return (3, 9): `==3.9.*`, `CPython==3.9.*`, `>=3.9,<3.10`, `<3.10,>=3.9`
454

455
        All of these return None:
456

457
        - `==3.9.10`: restricted to a single patch version
458
        - `==3.9`: restricted to a single patch version (0, implicitly)
459
        - `==3.9.*,!=3.9.2`: excludes a patch
460
        - `>=3.9,<3.11`: more than one major version
461
        - `>=3.9,<3.11,!=3.10`: too complicated to understand it only includes 3.9
462
        - more than one requirement in the list: too complicated
463
        """
464

UNCOV
465
        try:
×
UNCOV
466
            return _major_minor_version_when_single_and_entire(self)
×
UNCOV
467
        except _NonSimpleMajorMinor:
×
UNCOV
468
            return None
×
469

470

471
def _major_minor_to_int(major_minor: str) -> tuple[int, int]:
1✔
472
    return tuple(int(x) for x in major_minor.split(".", maxsplit=1))  # type: ignore[return-value]
1✔
473

474

475
class _NonSimpleMajorMinor(Exception):
1✔
476
    pass
1✔
477

478

479
_ANY_PATCH_VERSION = re.compile(r"^(?P<major>\d+)\.(?P<minor>\d+)(?P<any_patch>\.\*)?$")
1✔
480

481

482
def _parse_simple_version(version: str, require_any_patch: bool) -> tuple[int, int]:
1✔
UNCOV
483
    match = _ANY_PATCH_VERSION.fullmatch(version)
×
UNCOV
484
    if match is None or (require_any_patch and match.group("any_patch") is None):
×
UNCOV
485
        raise _NonSimpleMajorMinor()
×
486

UNCOV
487
    return int(match.group("major")), int(match.group("minor"))
×
488

489

490
def _major_minor_version_when_single_and_entire(ics: InterpreterConstraints) -> tuple[int, int]:
1✔
UNCOV
491
    if len(ics) != 1:
×
UNCOV
492
        raise _NonSimpleMajorMinor()
×
493

UNCOV
494
    req = next(iter(ics))
×
495

UNCOV
496
    just_cpython = req.name == "CPython" and not req.extras and not req.marker
×
UNCOV
497
    if not just_cpython:
×
498
        raise _NonSimpleMajorMinor()
×
499

500
    # ==major.minor or ==major.minor.*
UNCOV
501
    if len(req.specifier) == 1:
×
UNCOV
502
        specifier = next(iter(req.specifier))
×
UNCOV
503
        if specifier.operator != "==":
×
UNCOV
504
            raise _NonSimpleMajorMinor()
×
505

UNCOV
506
        return _parse_simple_version(specifier.version, require_any_patch=True)
×
507

508
    # >=major.minor,<major.(minor+1)
UNCOV
509
    if len(req.specifier) == 2:
×
UNCOV
510
        specifiers = sorted(req.specifier, key=lambda s: s.version)
×
UNCOV
511
        operator_lo, version_lo = (specifiers[0].operator, specifiers[0].version)
×
UNCOV
512
        operator_hi, version_hi = (specifiers[1].operator, specifiers[1].version)
×
513

UNCOV
514
        if operator_lo != ">=":
×
515
            # if the lo operator isn't >=, they might be in the wrong order (or, if not, the check
516
            # below will catch them)
UNCOV
517
            operator_lo, operator_hi = operator_hi, operator_lo
×
UNCOV
518
            version_lo, version_hi = version_hi, version_lo
×
519

UNCOV
520
        if operator_lo != ">=" and operator_hi != "<":
×
UNCOV
521
            raise _NonSimpleMajorMinor()
×
522

UNCOV
523
        major_lo, minor_lo = _parse_simple_version(version_lo, require_any_patch=False)
×
UNCOV
524
        major_hi, minor_hi = _parse_simple_version(version_hi, require_any_patch=False)
×
525

UNCOV
526
        if major_lo == major_hi and minor_lo + 1 == minor_hi:
×
UNCOV
527
            return major_lo, minor_lo
×
528

UNCOV
529
        raise _NonSimpleMajorMinor()
×
530

531
    # anything else we don't understand
UNCOV
532
    raise _NonSimpleMajorMinor()
×
533

534

535
@memoized
1✔
536
def _warn_on_python2_usage_in_interpreter_constraints(
1✔
537
    interpreter_constraints: tuple[str, ...], *, description_of_origin: str
538
) -> None:
539
    ics = InterpreterConstraints(interpreter_constraints)
×
540
    if ics.includes_python2():
×
541
        logger.warning(
×
542
            f"The Python interpreter constraints from {description_of_origin} includes Python 2.x as a selected Python version. "
543
            "Please note that Pants will no longer be proactively tested with Python 2.x starting with Pants v2.24.x because "
544
            "Python 2 support ended on 1 January 2020. Please consider upgrading to Python 3.x for your code."
545
        )
546

547

548
def warn_on_python2_usage_in_interpreter_constraints(
1✔
549
    interpreter_constraints: Iterable[str], *, description_of_origin: str
550
) -> None:
551
    _warn_on_python2_usage_in_interpreter_constraints(
×
552
        tuple(interpreter_constraints), description_of_origin=description_of_origin
553
    )
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc