• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 18517631058

15 Oct 2025 04:18AM UTC coverage: 69.207% (-11.1%) from 80.267%
18517631058

Pull #22745

github

web-flow
Merge 642a76ca1 into 99919310e
Pull Request #22745: [windows] Add windows support in the stdio crate.

53815 of 77759 relevant lines covered (69.21%)

2.42 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

30.17
/src/python/pants/backend/python/macros/poetry_requirements.py
1
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3

4
from __future__ import annotations
2✔
5

6
import itertools
2✔
7
import logging
2✔
8
import urllib.parse
2✔
9
from collections.abc import Iterator, Mapping, Sequence
2✔
10
from dataclasses import dataclass
2✔
11
from functools import partial
2✔
12
from pathlib import Path, PurePath
2✔
13
from typing import Any, cast
2✔
14

15
import toml
2✔
16
from packaging.version import InvalidVersion, Version
2✔
17
from typing_extensions import TypedDict
2✔
18

19
from pants.backend.python.macros.common_fields import (
2✔
20
    ModuleMappingField,
21
    RequirementsOverrideField,
22
    TypeStubsModuleMappingField,
23
)
24
from pants.backend.python.macros.common_requirements_rule import _generate_requirements
2✔
25
from pants.backend.python.subsystems.setup import PythonSetup
2✔
26
from pants.backend.python.target_types import PythonRequirementResolveField, PythonRequirementTarget
2✔
27
from pants.base.build_root import BuildRoot
2✔
28
from pants.engine.rules import collect_rules, rule
2✔
29
from pants.engine.target import (
2✔
30
    COMMON_TARGET_FIELDS,
31
    GeneratedTargets,
32
    GenerateTargetsRequest,
33
    SingleSourceField,
34
    TargetGenerator,
35
)
36
from pants.engine.unions import UnionMembership, UnionRule
2✔
37
from pants.util.logging import LogLevel
2✔
38
from pants.util.pip_requirement import PipRequirement
2✔
39
from pants.util.strutil import softwrap
2✔
40

41
logger = logging.getLogger(__name__)
2✔
42

43
# ---------------------------------------------------------------------------------
44
# pyproject.toml parsing
45
# ---------------------------------------------------------------------------------
46

47

48
class PyprojectAttr(TypedDict, total=False):
2✔
49
    extras: list[str]
2✔
50
    git: str
2✔
51
    rev: str
2✔
52
    branch: str
2✔
53
    python: str
2✔
54
    markers: str
2✔
55
    tag: str
2✔
56
    version: str
2✔
57
    url: str
2✔
58
    path: str
2✔
59

60

61
def get_max_caret(parsed_version: Version) -> str:
2✔
62
    major = 0
×
63
    minor = 0
×
64
    micro = 0
×
65

66
    if parsed_version.major != 0:
×
67
        major = parsed_version.major + 1
×
68
    elif parsed_version.minor != 0:
×
69
        minor = parsed_version.minor + 1
×
70
    elif parsed_version.micro != 0:
×
71
        micro = parsed_version.micro + 1
×
72
    else:
73
        base_len = len(parsed_version.base_version.split("."))
×
74
        if base_len >= 3:
×
75
            micro = 1
×
76
        elif base_len == 2:
×
77
            minor = 1
×
78
        elif base_len == 1:
×
79
            major = 1
×
80

81
    return f"{major}.{minor}.{micro}"
×
82

83

84
def get_max_tilde(parsed_version: Version) -> str:
2✔
85
    major = 0
×
86
    minor = 0
×
87
    base_len = len(parsed_version.base_version.split("."))
×
88
    if base_len >= 2:
×
89
        minor = int(str(parsed_version.minor)) + 1
×
90
        major = int(str(parsed_version.major))
×
91
    elif base_len == 1:
×
92
        major = int(str(parsed_version.major)) + 1
×
93

94
    return f"{major}.{minor}.0"
×
95

96

97
def get_max_wildcard(parsed_version: Version) -> str:
2✔
98
    # Note: Assumes this is not a global wildcard, so parsed_version.release has
99
    # at least two components.
100
    release = list(parsed_version.release)
×
101
    release[-2] += 1
×
102
    major = release[0]
×
103
    minor = release[1]
×
104
    return f"{major}.{minor}.0"
×
105

106

107
def parse_str_version(attributes: str, **kwargs: str) -> str:
2✔
108
    valid_specifiers = "<>!~="
×
109
    pep440_reqs = []
×
110
    proj_name = kwargs["proj_name"]
×
111
    fp = kwargs["file_path"]
×
112
    extras_str = kwargs["extras_str"]
×
113
    comma_split_reqs = (i.strip() for i in attributes.split(","))
×
114
    for req in comma_split_reqs:
×
115

116
        def parse_version(version_str: str) -> Version:
×
117
            try:
×
118
                return Version(version_str)
×
119
            except InvalidVersion:
×
120
                raise InvalidVersion(
×
121
                    softwrap(
122
                        f"""
123
                        Failed to parse requirement {proj_name} = "{req}" in {fp} loaded by the
124
                        poetry_requirements macro.
125

126
                        If you believe this requirement is valid, consider opening an issue at
127
                        https://github.com/pantsbuild/pants/issues so that we can update Pants'
128
                        Poetry macro to support this.
129
                        """
130
                    )
131
                )
132

133
        if not req:
×
134
            continue
×
135
        if req[0] == "^":
×
136
            parsed_version = parse_version(req[1:])
×
137
            max_ver = get_max_caret(parsed_version)
×
138
            min_ver = f"{parsed_version.public}"
×
139
            pep440_reqs.append(f">={min_ver},<{max_ver}")
×
140
        elif req[0] == "~" and req[1] != "=":
×
141
            # ~= is an acceptable default operator; however, ~ is not, and IS NOT the same as ~=
142
            parsed_version = parse_version(req[1:])
×
143
            max_ver = get_max_tilde(parsed_version)
×
144
            min_ver = f"{parsed_version.public}"
×
145
            pep440_reqs.append(f">={min_ver},<{max_ver}")
×
146
        elif req[-1] == "*":
×
147
            if req != "*":  # This is not a global wildcard.
×
148
                # To parse we replace the * with a 0.
149
                parsed_version = parse_version(f"{req[:-1]}0")
×
150
                max_ver = get_max_wildcard(parsed_version)
×
151
                min_ver = f"{parsed_version.public}"
×
152
                pep440_reqs.append(f">={min_ver},<{max_ver}")
×
153
        else:
154
            pep440_reqs.append(req if req[0] in valid_specifiers else f"=={req}")
×
155
    return f"{proj_name}{extras_str} {','.join(pep440_reqs)}".rstrip()
×
156

157

158
def parse_python_constraint(constr: str | None, fp: str) -> str:
2✔
159
    if constr is None:
×
160
        return ""
×
161
    valid_specifiers = "<>!~= "
×
162
    # If the user passes multiple Python constraints, they're separated by
163
    # either '||' signifying a logical 'or', or a comma signifying a logical
164
    # 'and'. Hence, or_and_split is a 2D list where each inner list is a set of and-ed
165
    # requirements; every list in the second layer is then or-ed together.
166
    or_and_split = [[j.strip() for j in i.split(",")] for i in constr.split("||")]
×
167

168
    # We only use parse_str_version to address the version parsing; we don't
169
    # care about having an actual Requirement object so things like the project name
170
    # and extras that would ordinarily exist for a project with a string version are left blank here.
171
    ver_parsed = [
×
172
        [parse_str_version(j, proj_name="", file_path=fp, extras_str="") for j in i]
173
        for i in or_and_split
174
    ]
175

176
    def conv_and(lst: list[str]) -> list:
×
177
        return list(itertools.chain(*[i.split(",") for i in lst]))
×
178

179
    def prepend(version: str) -> str:
×
180
        valid_versions = "".join(i for i in version if i in valid_specifiers)
×
181
        invalid_versions = "".join(i for i in version if i not in valid_specifiers)
×
182
        return f"python_version{valid_versions} '{invalid_versions}'"
×
183

184
    prepend_and_clean = [
×
185
        [prepend(".".join(j.split(".")[:2])) for j in conv_and(i)] for i in ver_parsed
186
    ]
187
    return (
×
188
        ("(" if len(or_and_split) > 1 else "")
189
        + (") or (".join([" and ".join(i) for i in prepend_and_clean]))
190
        + (")" if len(or_and_split) > 1 else "")
191
    )
192

193

194
@dataclass(frozen=True)
2✔
195
class PyProjectToml:
2✔
196
    build_root: PurePath
2✔
197
    toml_relpath: PurePath
2✔
198
    toml_contents: str
2✔
199

200
    def parse(self) -> Mapping[str, Any]:
2✔
201
        return toml.loads(self.toml_contents)
×
202

203
    def _non_pants_project_abs_path(self, path: Path) -> Path | None:
2✔
204
        resolved = path.resolve()
×
205
        if resolved.is_file():
×
206
            return resolved
×
207

208
        try:
×
209
            resolved.relative_to(self.build_root)
×
210
        except ValueError:
×
211
            return resolved
×
212

213
        return None
×
214

215
    def non_pants_project_abs_path(self, path: str) -> Path | None:
2✔
216
        """Determine if the given path represents a non-Pants controlled project.
217

218
        If the path points to a file, it's assumed the file is a distribution ( a wheel or sdist)
219
        and the absolute path of that file is returned.
220

221
        If the path points to a directory and that directory is outside of the build root, it's
222
        assumed the directory is the root of a buildable Python project (i.e.: it contains a
223
        pyproject.toml or setup.py) and the absolute path of the project is returned.
224

225
        Otherwise, `None` is returned since the directory lies inside the build root and is assumed
226
        to be a Pants controlled project.
227
        """
228
        # TODO(John Sirois): This leaves the case where the path is a Python project directory
229
        #  inside the build root that the user actually wants Pex / Pip to build. A concrete case
230
        #  for this would be a repo where third party is partially handled with vendored exploded
231
        #  source distributions. If someone in the wild needs the described case, plumb a
232
        #  PoetryRequirements parameter that can list paths to treat as Pants controlled or
233
        #  vice-versa.
234
        given_path = Path(path)
×
235
        if given_path.is_absolute():
×
236
            return self._non_pants_project_abs_path(given_path)
×
237
        else:
238
            return self._non_pants_project_abs_path(
×
239
                Path(self.build_root / self.toml_relpath).parent / given_path
240
            )
241

242

243
def produce_match(sep: str, feat: Any) -> str:
2✔
244
    return f"{sep}{feat}" if feat else ""
×
245

246

247
def add_markers(base: str, attributes: PyprojectAttr, fp) -> str:
2✔
248
    markers_lookup = produce_match("", attributes.get("markers"))
×
249
    python_lookup = parse_python_constraint(attributes.get("python"), fp)
×
250

251
    # Python constraints are passed as a `python_version` environment marker; if we have multiple
252
    # markers, we evaluate them as one whole, and then AND with the new marker for the Python constraint.
253
    # E.g. (marker1 AND marker2 OR marker3...) AND (python_version)
254
    # rather than (marker1 AND marker2 OR marker3 AND python_version)
255
    if not markers_lookup and not python_lookup:
×
256
        return base
×
257

258
    result = f"{base} ;("
×
259

260
    if markers_lookup:
×
261
        result += f"{markers_lookup})"
×
262
    if python_lookup and markers_lookup:
×
263
        result += " and ("
×
264
    if python_lookup:
×
265
        result += f"{python_lookup})"
×
266

267
    return result
×
268

269

270
def handle_dict_attr(
2✔
271
    proj_name: str, attributes: PyprojectAttr, pyproject_toml: PyProjectToml
272
) -> str | None:
273
    base = ""
×
274
    fp = str(pyproject_toml.toml_relpath)
×
275

276
    extras_lookup = attributes.get("extras")
×
277
    if isinstance(extras_lookup, list):
×
278
        extras_str = f"[{','.join(extras_lookup)}]"
×
279
    else:
280
        extras_str = ""
×
281

282
    git_lookup = attributes.get("git")
×
283
    if git_lookup is not None:
×
284
        # If no URL scheme (e.g., `{git = "git@github.com:foo/bar.git"}`) we assume ssh,
285
        # i.e., we convert to git+ssh://git@github.com/foo/bar.git.
286
        if not urllib.parse.urlsplit(git_lookup).scheme:
×
287
            git_lookup = f"ssh://{git_lookup.replace(':', '/', 1)}"
×
288
        rev_lookup = produce_match("#", attributes.get("rev"))
×
289
        branch_lookup = produce_match("@", attributes.get("branch"))
×
290
        tag_lookup = produce_match("@", attributes.get("tag"))
×
291

292
        base = f"{proj_name}{extras_str} @ git+{git_lookup}{tag_lookup}{branch_lookup}{rev_lookup}"
×
293

294
    path_lookup = attributes.get("path")
×
295
    if path_lookup is not None:
×
296
        non_pants_project_abs_path = pyproject_toml.non_pants_project_abs_path(path_lookup)
×
297
        if non_pants_project_abs_path:
×
298
            base = f"{proj_name}{extras_str} @ file://{non_pants_project_abs_path}"
×
299
        else:
300
            # An internal path will be handled by normal Pants dependencies and dependency inference;
301
            # i.e.: it never represents a third party requirement.
302
            return None
×
303

304
    url_lookup = attributes.get("url")
×
305
    if url_lookup is not None:
×
306
        base = f"{proj_name}{extras_str} @ {url_lookup}"
×
307

308
    version_lookup = attributes.get("version")
×
309
    if version_lookup is not None:
×
310
        base = parse_str_version(
×
311
            version_lookup, file_path=fp, extras_str=extras_str, proj_name=proj_name
312
        )
313

314
    if len(base) == 0:
×
315
        raise ValueError(
×
316
            softwrap(
317
                f"""
318
                {proj_name} is not formatted correctly; at minimum provide either a version, url,
319
                path or git location for your dependency.
320
                """
321
            )
322
        )
323

324
    return add_markers(base, attributes, fp)
×
325

326

327
def parse_single_dependency(
2✔
328
    proj_name: str,
329
    attributes: str | Mapping[str, str | Sequence] | Sequence[Mapping[str, str | Sequence]],
330
    pyproject_toml: PyProjectToml,
331
) -> Iterator[PipRequirement]:
332
    if isinstance(attributes, str):
×
333
        # E.g. `foo = "~1.1~'.
334
        yield PipRequirement.parse(
×
335
            parse_str_version(
336
                attributes,
337
                proj_name=proj_name,
338
                file_path=str(pyproject_toml.toml_relpath),
339
                extras_str="",
340
            )
341
        )
342
    elif isinstance(attributes, dict):
×
343
        # E.g. `foo = {version = "~1.1"}`.
344
        pyproject_attr = cast(PyprojectAttr, attributes)
×
345
        req_str = handle_dict_attr(proj_name, pyproject_attr, pyproject_toml)
×
346
        if req_str:
×
347
            yield PipRequirement.parse(req_str)
×
348
    elif isinstance(attributes, list):
×
349
        # E.g. ` foo = [{version = "1.1","python" = "2.7"}, {version = "1.1","python" = "2.7"}]
350
        for attr in attributes:
×
351
            req_str = handle_dict_attr(proj_name, attr, pyproject_toml)
×
352
            if req_str:
×
353
                yield PipRequirement.parse(req_str)
×
354
    else:
355
        raise AssertionError(
×
356
            softwrap(
357
                f"""
358
                Error: invalid Poetry requirement format. Expected type of requirement attributes to
359
                be string, dict, or list, but was of type {type(attributes).__name__}.
360
                """
361
            )
362
        )
363

364

365
def parse_pyproject_toml(pyproject_toml: PyProjectToml) -> set[PipRequirement]:
2✔
366
    parsed = pyproject_toml.parse()
×
367
    try:
×
368
        poetry_vals = parsed["tool"]["poetry"]
×
369
    except KeyError:
×
370
        raise KeyError(
×
371
            softwrap(
372
                f"""
373
                No section `tool.poetry` found in {pyproject_toml.toml_relpath}, which
374
                is loaded by Pants from a `poetry_requirements` macro.
375

376
                Did you mean to set up Poetry?
377
                """
378
            )
379
        )
380
    dependencies = poetry_vals.get("dependencies", {})
×
381
    # N.B.: The "python" dependency is a special dependency required by Poetry that only serves to
382
    # constraint the python interpreter versions the project works with; so we skip that.
383
    # See: https://python-poetry.org/docs/pyproject/#dependencies-and-dev-dependencies
384
    dependencies.pop("python", None)
×
385

386
    groups = poetry_vals.get("group", {})
×
387
    group_deps: dict[str, PyprojectAttr] = {}
×
388

389
    for group in groups.values():
×
390
        group_deps.update(group.get("dependencies", {}))
×
391

392
    dev_dependencies = poetry_vals.get("dev-dependencies", {})
×
393
    if not dependencies and not dev_dependencies and not group_deps:
×
394
        logger.warning(
×
395
            softwrap(
396
                f"""
397
                No requirements defined in any Poetry dependency groups, tool.poetry.dependencies
398
                and tool.poetry.dev-dependencies in {pyproject_toml.toml_relpath}, which is loaded
399
                by Pants from a poetry_requirements macro. Did you mean to populate these
400
                with requirements?
401
                """
402
            )
403
        )
404

405
    return set(
×
406
        itertools.chain.from_iterable(
407
            parse_single_dependency(proj, attr, pyproject_toml)
408
            for proj, attr in {**dependencies, **dev_dependencies, **group_deps}.items()
409
        )
410
    )
411

412

413
def parse_poetry_requirements(
2✔
414
    build_root: BuildRoot, file_contents: bytes, file_path: str
415
) -> set[PipRequirement]:
416
    return parse_pyproject_toml(
×
417
        PyProjectToml(
418
            build_root=PurePath(build_root.path),
419
            toml_relpath=PurePath(file_path),
420
            toml_contents=file_contents.decode(),
421
        )
422
    )
423

424

425
# ---------------------------------------------------------------------------------
426
# Target generator
427
# ---------------------------------------------------------------------------------
428

429

430
class PoetryRequirementsSourceField(SingleSourceField):
2✔
431
    default = "pyproject.toml"
2✔
432
    required = False
2✔
433

434

435
class PoetryRequirementsTargetGenerator(TargetGenerator):
2✔
436
    alias = "poetry_requirements"
2✔
437
    help = "Generate a `python_requirement` for each entry in a Poetry `pyproject.toml`."
2✔
438
    generated_target_cls = PythonRequirementTarget
2✔
439
    # Note that this does not have a `dependencies` field.
440
    core_fields = (
2✔
441
        *COMMON_TARGET_FIELDS,
442
        ModuleMappingField,
443
        TypeStubsModuleMappingField,
444
        PoetryRequirementsSourceField,
445
        RequirementsOverrideField,
446
    )
447
    copied_fields = COMMON_TARGET_FIELDS
2✔
448
    moved_fields = (PythonRequirementResolveField,)
2✔
449

450

451
class GenerateFromPoetryRequirementsRequest(GenerateTargetsRequest):
2✔
452
    generate_from = PoetryRequirementsTargetGenerator
2✔
453

454

455
@rule(desc="Generate `python_requirement` targets from Poetry pyproject.toml", level=LogLevel.DEBUG)
2✔
456
async def generate_from_python_requirement(
2✔
457
    request: GenerateFromPoetryRequirementsRequest,
458
    build_root: BuildRoot,
459
    union_membership: UnionMembership,
460
    python_setup: PythonSetup,
461
) -> GeneratedTargets:
462
    result = await _generate_requirements(
×
463
        request,
464
        union_membership,
465
        python_setup,
466
        parse_requirements_callback=partial(parse_poetry_requirements, build_root),
467
    )
468
    return GeneratedTargets(request.generator, result)
×
469

470

471
def rules():
2✔
472
    return (
2✔
473
        *collect_rules(),
474
        UnionRule(GenerateTargetsRequest, GenerateFromPoetryRequirementsRequest),
475
    )
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc