• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 25441711719

06 May 2026 02:31PM UTC coverage: 92.915%. Remained the same
25441711719

push

github

web-flow
use sha pin (with comment) format for generated actions (#23312)

Per the GitHub Action best practices we recently enabled at #23249, we
should pin each action to a SHA so that the reference is actually
immutable.

This will -- I hope -- knock out a large chunk of the 421 alerts we
currently get from zizmor. The next followup would then be upgrades and
harmonizing the generated and none-generated pins.

Notice: This idea was suggested by Claude while going over pinact output
and I was surprised to see that post processing the yaml wasn't too
gross.

92206 of 99237 relevant lines covered (92.91%)

4.04 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

97.86
/src/python/pants/core/target_types.py
1
# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3
from __future__ import annotations
12✔
4

5
import builtins
12✔
6
import dataclasses
12✔
7
import os
12✔
8
import urllib.parse
12✔
9
from collections import defaultdict
12✔
10
from collections.abc import Sequence
12✔
11
from dataclasses import dataclass
12✔
12
from pathlib import PurePath
12✔
13
from typing import Generic, TypeVar, cast
12✔
14

15
from pants.core.goals import package
12✔
16
from pants.core.goals.package import (
12✔
17
    BuiltPackage,
18
    BuiltPackageArtifact,
19
    EnvironmentAwarePackageRequest,
20
    OutputPathField,
21
    PackageFieldSet,
22
    environment_aware_package,
23
)
24
from pants.core.util_rules.archive import ArchiveFormat, CreateArchive, create_archive
12✔
25
from pants.core.util_rules.archive import rules as archive_rules
12✔
26
from pants.engine.addresses import Address, UnparsedAddressInputs
12✔
27
from pants.engine.download_file import download_file
12✔
28
from pants.engine.environment import EnvironmentName
12✔
29
from pants.engine.fs import (
12✔
30
    AddPrefix,
31
    CreateDigest,
32
    DownloadFile,
33
    FileDigest,
34
    FileEntry,
35
    MergeDigests,
36
    PathGlobs,
37
    RemovePrefix,
38
)
39
from pants.engine.internals.graph import find_valid_field_sets, hydrate_sources, resolve_targets
12✔
40
from pants.engine.internals.mapper import DELETED_TARGET_TYPE
12✔
41
from pants.engine.internals.native_engine import StringSequenceField
12✔
42
from pants.engine.intrinsics import digest_to_snapshot
12✔
43
from pants.engine.platform import Platform
12✔
44
from pants.engine.rules import collect_rules, concurrently, implicitly, rule
12✔
45
from pants.engine.target import (
12✔
46
    COMMON_TARGET_FIELDS,
47
    AllTargets,
48
    Dependencies,
49
    FieldSet,
50
    FieldSetsPerTargetRequest,
51
    GeneratedSources,
52
    GenerateSourcesRequest,
53
    HydrateSourcesRequest,
54
    InvalidFieldTypeException,
55
    MultipleSourcesField,
56
    OptionalSingleSourceField,
57
    OverridesField,
58
    SingleSourceField,
59
    SourcesField,
60
    SpecialCasedDependencies,
61
    StringField,
62
    Target,
63
    TargetFilesGenerator,
64
    generate_file_based_overrides_field_help_message,
65
    generate_multiple_sources_field_help_message,
66
)
67
from pants.engine.unions import UnionRule, union
12✔
68
from pants.option.bootstrap_options import UnmatchedBuildFileGlobs
12✔
69
from pants.util.docutil import bin_name
12✔
70
from pants.util.frozendict import FrozenDict
12✔
71
from pants.util.logging import LogLevel
12✔
72
from pants.util.strutil import help_text, softwrap
12✔
73

74
# -----------------------------------------------------------------------------------------------
75
# `per_platform` object
76
# -----------------------------------------------------------------------------------------------
77
_T = TypeVar("_T")
12✔
78

79

80
@dataclass(frozen=True)
12✔
81
class per_platform(Generic[_T]):
12✔
82
    """An object containing differing homogeneous platform-dependent values.
83

84
    The values should be evaluated for the execution environment, and not the host environment
85
    (I.e. it should be evaluated in a `rule` which requests `Platform`).
86

87
    Expected usage is roughly:
88

89
    ```python
90
    class MyFieldType(...):
91
        value = str | per_platform[str]
92

93
        @classmethod
94
        def compute_value(  # type: ignore[override]
95
            cls,
96
            raw_value: Optional[Union[str, per_platform[str]]],
97
            address: Address,
98
        ) -> Optional[Union[str, per_platform[str]]]:
99
            if isinstance(raw_value, per_platform):
100
                # NOTE: Ensure the values are homogeneous
101
                raw_value.check_types(str)
102

103
            return raw_value
104

105
    ...
106

107
    @rule
108
    async def my_rule(..., platform: Platform) -> ...:
109
        field_value = target[MyFieldType].value
110

111
        if isinstance(field_value, per_platform):
112
            field_value = field_value.get_value_for_platform(platform)
113

114
        ...
115
    ```
116

117
    NOTE: Support for this object should be heavily weighed, as it would be inappropriate to use in
118
    certain contexts (such as the `source` field in a `foo_source` target, where the intent is to
119
    support differing source files based on platform. The result would be that dependency inference
120
    (and therefore the dependencies field) wouldn't be knowable on the host, which is not something
121
    the engine can support yet).
122
    """
123

124
    linux_arm64: _T | None = None
12✔
125
    linux_x86_64: _T | None = None
12✔
126
    macos_arm64: _T | None = None
12✔
127
    macos_x86_64: _T | None = None
12✔
128

129
    def check_types(self, type_: type) -> None:
12✔
130
        fields_and_values = [
1✔
131
            (field.name, getattr(self, field.name)) for field in dataclasses.fields(self)
132
        ]
133
        fields_with_values = {name: value for name, value in fields_and_values if value is not None}
1✔
134
        if not fields_with_values:
1✔
135
            raise ValueError("`per_platform` must be given at least one platform value.")
×
136

137
        bad_typed_fields = [
1✔
138
            (name, type(value).__name__)
139
            for name, value in fields_with_values.items()
140
            if not isinstance(value, type_)
141
        ]
142
        if bad_typed_fields:
1✔
143
            raise TypeError(
×
144
                f"The following fields of a `per_platform` object were expected to be of type `{type_.__name__}`:"
145
                + ' "'
146
                + ", ".join(f"{name} of type '{typename}'" for name, typename in bad_typed_fields)
147
                + '".'
148
            )
149

150
    def get_value_for_platform(self, platform: Platform) -> _T:
12✔
151
        value = getattr(self, platform.value)
1✔
152
        if value is None:
1✔
153
            raise ValueError(
×
154
                f"A request was made to resolve a `per_platform` on `{platform.value}`"
155
                + " but the value was `None`. Please specify a value."
156
            )
157
        return cast("_T", value)
1✔
158

159

160
# -----------------------------------------------------------------------------------------------
161
# Asset target helpers
162
# -----------------------------------------------------------------------------------------------
163

164

165
@dataclass(frozen=True)
12✔
166
class http_source:
12✔
167
    url: str
12✔
168
    len: int
12✔
169
    sha256: str
12✔
170
    # Defaults to last part of the URL path (E.g. `index.html`)
171
    filename: str
12✔
172

173
    def __init__(self, url: str, *, len: int, sha256: str, filename: str = ""):
12✔
174
        for field in dataclasses.fields(self):
1✔
175
            value = locals()[field.name]
1✔
176
            if not isinstance(value, getattr(builtins, cast(str, field.type))):
1✔
177
                raise TypeError(f"`{field.name}` must be a `{field.type}`, got `{type(value)!r}`.")
1✔
178

179
        object.__setattr__(self, "url", url)
1✔
180
        object.__setattr__(self, "len", len)
1✔
181
        object.__setattr__(self, "sha256", sha256)
1✔
182
        object.__setattr__(
1✔
183
            self, "filename", filename or urllib.parse.urlparse(url).path.rsplit("/", 1)[-1]
184
        )
185

186
        self.__post_init__()
1✔
187

188
    def __post_init__(self):
12✔
189
        if not self.filename:
1✔
190
            raise ValueError(
1✔
191
                softwrap(
192
                    f"""
193
                    Couldn't deduce filename from `url`: '{self.url}'.
194

195
                    Please specify the `filename` argument.
196
                    """
197
                )
198
            )
199
        if "\\" in self.filename or "/" in self.filename:
1✔
200
            raise ValueError(
1✔
201
                f"`filename` cannot contain a path separator, but was set to '{self.filename}'"
202
            )
203

204

205
class AssetSourceField(SingleSourceField):
12✔
206
    value: str | http_source | per_platform[http_source]  # type: ignore[assignment]
12✔
207
    # @TODO: Don't document http_source, link to it once https://github.com/pantsbuild/pants/issues/14832
208
    # is implemented.
209
    help = help_text(
12✔
210
        """
211
        The source of this target.
212

213
        If a string is provided, represents a path that is relative to the BUILD file's directory,
214
        e.g. `source='example.ext'`.
215

216
        If an `http_source` is provided, represents the network location to download the source from.
217
        The downloaded file will exist in the sandbox in the same directory as the target.
218

219
        `http_source` has the following signature:
220

221
            http_source(url: str, *, len: int, sha256: str, filename: str = "")
222

223
        The filename defaults to the last part of the URL path (e.g. `example.ext`), but can also be
224
        specified if you wish to have control over the file name. You cannot, however, specify a
225
        path separator to download the file into a subdirectory (you must declare a target in desired
226
        subdirectory).
227

228
        You can easily get the len and checksum with the following command:
229

230
            curl -L $URL | tee >(wc -c) >(shasum -a 256) >/dev/null
231

232
        If a `per_platform` is provided, represents a mapping from platform to `http_source`, where
233
        the platform is one of (`linux_arm64`, `linux_x86_64`, `macos_arm64`, `macos_x86_64`) and is
234
        resolved in the execution target. Each `http_source` value MUST have the same filename provided.
235
        """
236
    )
237

238
    @classmethod
12✔
239
    def compute_value(  # type: ignore[override]
12✔
240
        cls,
241
        raw_value: str | http_source | per_platform[http_source] | None,
242
        address: Address,
243
    ) -> str | http_source | per_platform[http_source] | None:
244
        if raw_value is None or isinstance(raw_value, str):
12✔
245
            return super().compute_value(raw_value, address)
12✔
246
        elif isinstance(raw_value, per_platform):
1✔
247
            raw_value.check_types(http_source)
1✔
248
            value_as_dict = dataclasses.asdict(raw_value)
1✔
249
            filenames = {
1✔
250
                source["filename"] for source in value_as_dict.values() if source is not None
251
            }
252
            if len(filenames) > 1:
1✔
253
                raise ValueError(
×
254
                    "Every `http_source` in the `per_platform` must have the same `filename`,"
255
                    + f" but found: {', '.join(sorted(filenames))}"
256
                )
257

258
        elif not isinstance(raw_value, http_source):
1✔
259
            raise InvalidFieldTypeException(
×
260
                address,
261
                cls.alias,
262
                raw_value,
263
                expected_type="a string, an `http_source` object, or a `per_platform[http_source]` object.",
264
            )
265
        return raw_value
1✔
266

267
    def validate_resolved_files(self, files: Sequence[str]) -> None:
12✔
268
        if isinstance(self.value, str):
11✔
269
            super().validate_resolved_files(files)
11✔
270

271
    @property
12✔
272
    def globs(self) -> tuple[str, ...]:
12✔
273
        if isinstance(self.value, str):
12✔
274
            return (self.value,)
12✔
275
        return ()
1✔
276

277
    @property
12✔
278
    def file_path(self) -> str:
12✔
279
        assert self.value
3✔
280
        filename = (
3✔
281
            self.value
282
            if isinstance(self.value, str)
283
            else (
284
                self.value.filename
285
                if isinstance(self.value, http_source)
286
                else next(
287
                    source["filename"]
288
                    for source in dataclasses.asdict(self.value).values()
289
                    if source is not None
290
                )
291
            )
292
        )
293
        return os.path.join(self.address.spec_path, filename)
3✔
294

295

296
async def _hydrate_asset_source(
12✔
297
    request: GenerateSourcesRequest, platform: Platform
298
) -> GeneratedSources:
299
    target = request.protocol_target
11✔
300
    source_field = target[AssetSourceField]
11✔
301
    if isinstance(source_field.value, str):
11✔
302
        return GeneratedSources(request.protocol_sources)
11✔
303

304
    source = source_field.value
1✔
305
    if isinstance(source, per_platform):
1✔
306
        source = source.get_value_for_platform(platform)
1✔
307

308
    file_digest = FileDigest(source.sha256, source.len)
1✔
309
    # NB: This just has to run, we don't actually need the result because we know the Digest's
310
    # FileEntry metadata.
311
    await download_file(DownloadFile(source.url, file_digest), **implicitly())
1✔
312
    snapshot = await digest_to_snapshot(
1✔
313
        **implicitly(
314
            CreateDigest(
315
                [
316
                    FileEntry(
317
                        path=source_field.file_path,
318
                        file_digest=file_digest,
319
                    )
320
                ]
321
            )
322
        )
323
    )
324

325
    return GeneratedSources(snapshot)
1✔
326

327

328
# -----------------------------------------------------------------------------------------------
329
# `file` and `files` targets
330
# -----------------------------------------------------------------------------------------------
331
class FileSourceField(AssetSourceField):
12✔
332
    uses_source_roots = False
12✔
333

334

335
class FileDependenciesField(Dependencies):
12✔
336
    pass
12✔
337

338

339
class FileTarget(Target):
12✔
340
    alias = "file"
12✔
341
    core_fields = (*COMMON_TARGET_FIELDS, FileDependenciesField, FileSourceField)
12✔
342
    help = help_text(
12✔
343
        """
344
        A single loose file that lives outside of code packages.
345

346
        Files are placed directly in archives, outside of code artifacts such as Python wheels
347
        or JVM JARs. The sources of a `file` target are accessed via filesystem APIs, such as
348
        Python's `open()`, via paths relative to the repository root.
349
        """
350
    )
351

352

353
class GenerateFileSourceRequest(GenerateSourcesRequest):
12✔
354
    input = FileSourceField
12✔
355
    output = FileSourceField
12✔
356

357

358
@rule
12✔
359
async def hydrate_file_source(
12✔
360
    request: GenerateFileSourceRequest, platform: Platform
361
) -> GeneratedSources:
362
    return await _hydrate_asset_source(request, platform)
11✔
363

364

365
class FilesGeneratingSourcesField(MultipleSourcesField):
12✔
366
    required = True
12✔
367
    uses_source_roots = False
12✔
368
    help = generate_multiple_sources_field_help_message(
12✔
369
        "Example: `sources=['example.txt', 'new_*.md', '!old_ignore.csv']`"
370
    )
371

372

373
class FilesOverridesField(OverridesField):
12✔
374
    help = generate_file_based_overrides_field_help_message(
12✔
375
        FileTarget.alias,
376
        """
377
        overrides={
378
            "foo.json": {"description": "our customer model"]},
379
            "bar.json": {"description": "our product model"]},
380
            ("foo.json", "bar.json"): {"tags": ["overridden"]},
381
        }
382
        """,
383
    )
384

385

386
class FilesGeneratorTarget(TargetFilesGenerator):
12✔
387
    alias = "files"
12✔
388
    core_fields = (
12✔
389
        *COMMON_TARGET_FIELDS,
390
        FilesGeneratingSourcesField,
391
        FilesOverridesField,
392
    )
393
    generated_target_cls = FileTarget
12✔
394
    copied_fields = COMMON_TARGET_FIELDS
12✔
395
    moved_fields = (FileDependenciesField,)
12✔
396
    help = "Generate a `file` target for each file in the `sources` field."
12✔
397

398

399
# -----------------------------------------------------------------------------------------------
400
# `relocated_files` target
401
# -----------------------------------------------------------------------------------------------
402

403

404
class RelocatedFilesSourcesField(MultipleSourcesField):
12✔
405
    # We solely register this field for codegen to work.
406
    alias = "_sources"
12✔
407
    expected_num_files = 0
12✔
408

409

410
class RelocatedFilesOriginalTargetsField(SpecialCasedDependencies):
12✔
411
    alias = "files_targets"
12✔
412
    required = True
12✔
413
    help = help_text(
12✔
414
        """
415
        Addresses to the original `file` and `files` targets that you want to relocate, such as
416
        `['//:json_files']`.
417

418
        Every target will be relocated using the same mapping. This means
419
        that every target must include the value from the `src` field in their original path.
420
        """
421
    )
422

423

424
class RelocatedFilesSrcField(StringField):
12✔
425
    alias = "src"
12✔
426
    required = True
12✔
427
    help = help_text(
12✔
428
        """
429
        The original prefix that you want to replace, such as `src/resources`.
430

431
        You can set this field to the empty string to preserve the original path; the value in the `dest`
432
        field will then be added to the beginning of this original path.
433
        """
434
    )
435

436

437
class RelocatedFilesDestField(StringField):
12✔
438
    alias = "dest"
12✔
439
    required = True
12✔
440
    help = help_text(
12✔
441
        """
442
        The new prefix that you want to add to the beginning of the path, such as `data`.
443

444
        You can set this field to the empty string to avoid adding any new values to the path; the
445
        value in the `src` field will then be stripped, rather than replaced.
446
        """
447
    )
448

449

450
class RelocatedFiles(Target):
12✔
451
    alias = "relocated_files"
12✔
452
    core_fields = (
12✔
453
        *COMMON_TARGET_FIELDS,
454
        RelocatedFilesSourcesField,
455
        RelocatedFilesOriginalTargetsField,
456
        RelocatedFilesSrcField,
457
        RelocatedFilesDestField,
458
    )
459
    help = help_text(
12✔
460
        """
461
        Loose files with path manipulation applied.
462

463
        Allows you to relocate the files at runtime to something more convenient than their actual
464
        paths in your project.
465

466
        For example, you can relocate `src/resources/project1/data.json` to instead be
467
        `resources/data.json`. Your other target types can then add this target to their
468
        `dependencies` field, rather than using the original `files` target.
469

470
        To remove a prefix:
471

472
            # Results in `data.json`.
473
            relocated_files(
474
                files_targets=["src/resources/project1:target"],
475
                src="src/resources/project1",
476
                dest="",
477
            )
478

479
        To add a prefix:
480

481
            # Results in `images/logo.svg`.
482
            relocated_files(
483
                files_targets=["//:logo"],
484
                src="",
485
                dest="images",
486
            )
487

488
        To replace a prefix:
489

490
            # Results in `new_prefix/project1/data.json`.
491
            relocated_files(
492
                files_targets=["src/resources/project1:target"],
493
                src="src/resources",
494
                dest="new_prefix",
495
            )
496
        """
497
    )
498

499

500
class RelocateFilesViaCodegenRequest(GenerateSourcesRequest):
12✔
501
    input = RelocatedFilesSourcesField
12✔
502
    output = FileSourceField
12✔
503
    exportable = False
12✔
504

505

506
@rule(desc="Relocating loose files for `relocated_files` targets", level=LogLevel.DEBUG)
12✔
507
async def relocate_files(request: RelocateFilesViaCodegenRequest) -> GeneratedSources:
12✔
508
    # Unlike normal codegen, we operate the on the sources of the `files_targets` field, not the
509
    # `sources` of the original `relocated_sources` target.
510
    # TODO(#13086): Because we're using `Targets` instead of `UnexpandedTargets`, the
511
    #  `files` target generator gets replaced by its generated `file` targets. That replacement is
512
    #  necessary because we only hydrate sources for `FileSourcesField`, which is only for the
513
    #  `file` target.  That's really subtle!
514
    original_file_targets = await resolve_targets(
4✔
515
        **implicitly(
516
            {
517
                request.protocol_target.get(
518
                    RelocatedFilesOriginalTargetsField
519
                ).to_unparsed_address_inputs(): UnparsedAddressInputs
520
            }
521
        )
522
    )
523
    original_files_sources = await concurrently(
4✔
524
        hydrate_sources(
525
            HydrateSourcesRequest(
526
                tgt.get(SourcesField),
527
                for_sources_types=(FileSourceField,),
528
                enable_codegen=True,
529
            ),
530
            **implicitly(),
531
        )
532
        for tgt in original_file_targets
533
    )
534
    snapshot = await digest_to_snapshot(
4✔
535
        **implicitly(MergeDigests(sources.snapshot.digest for sources in original_files_sources))
536
    )
537

538
    src_val = request.protocol_target.get(RelocatedFilesSrcField).value
4✔
539
    dest_val = request.protocol_target.get(RelocatedFilesDestField).value
4✔
540
    if src_val:
4✔
541
        snapshot = await digest_to_snapshot(**implicitly(RemovePrefix(snapshot.digest, src_val)))
4✔
542
    if dest_val:
4✔
543
        snapshot = await digest_to_snapshot(**implicitly(AddPrefix(snapshot.digest, dest_val)))
3✔
544
    return GeneratedSources(snapshot)
4✔
545

546

547
# -----------------------------------------------------------------------------------------------
548
# `resource` and `resources` target
549
# -----------------------------------------------------------------------------------------------
550

551

552
class ResourceDependenciesField(Dependencies):
12✔
553
    pass
12✔
554

555

556
class ResourceSourceField(AssetSourceField):
12✔
557
    uses_source_roots = True
12✔
558

559

560
class ResourceTarget(Target):
12✔
561
    alias = "resource"
12✔
562
    core_fields = (*COMMON_TARGET_FIELDS, ResourceDependenciesField, ResourceSourceField)
12✔
563
    help = help_text(
12✔
564
        """
565
        A single resource file embedded in a code package and accessed in a
566
        location-independent manner.
567

568
        Resources are embedded in code artifacts such as Python wheels or JVM JARs. The sources
569
        of a `resources` target are accessed via language-specific resource APIs, such as
570
        Python's `pkgutil` or JVM's ClassLoader, via paths relative to the target's source root.
571
        """
572
    )
573

574

575
class GenerateResourceSourceRequest(GenerateSourcesRequest):
12✔
576
    input = ResourceSourceField
12✔
577
    output = ResourceSourceField
12✔
578

579

580
@rule
12✔
581
async def hydrate_resource_source(
12✔
582
    request: GenerateResourceSourceRequest, platform: Platform
583
) -> GeneratedSources:
584
    return await _hydrate_asset_source(request, platform)
5✔
585

586

587
class ResourcesGeneratingSourcesField(MultipleSourcesField):
12✔
588
    required = True
12✔
589
    help = generate_multiple_sources_field_help_message(
12✔
590
        "Example: `sources=['example.txt', 'new_*.md', '!old_ignore.csv']`"
591
    )
592

593

594
class ResourcesOverridesField(OverridesField):
12✔
595
    help = generate_file_based_overrides_field_help_message(
12✔
596
        ResourceTarget.alias,
597
        """
598
        overrides={
599
            "foo.json": {"description": "our customer model"]},
600
            "bar.json": {"description": "our product model"]},
601
            ("foo.json", "bar.json"): {"tags": ["overridden"]},
602
        }
603
        """,
604
    )
605

606

607
class ResourcesGeneratorTarget(TargetFilesGenerator):
12✔
608
    alias = "resources"
12✔
609
    core_fields = (
12✔
610
        *COMMON_TARGET_FIELDS,
611
        ResourcesGeneratingSourcesField,
612
        ResourcesOverridesField,
613
    )
614
    generated_target_cls = ResourceTarget
12✔
615
    copied_fields = COMMON_TARGET_FIELDS
12✔
616
    moved_fields = (ResourceDependenciesField,)
12✔
617
    help = "Generate a `resource` target for each file in the `sources` field."
12✔
618

619

620
@dataclass(frozen=True)
12✔
621
class ResourcesFieldSet(FieldSet):
12✔
622
    required_fields = (ResourceSourceField,)
12✔
623

624
    sources: ResourceSourceField
12✔
625

626

627
@dataclass(frozen=True)
12✔
628
class ResourcesGeneratorFieldSet(FieldSet):
12✔
629
    required_fields = (ResourcesGeneratingSourcesField,)
12✔
630

631
    sources: ResourcesGeneratingSourcesField
12✔
632

633

634
# -----------------------------------------------------------------------------------------------
635
# `target` generic target
636
# -----------------------------------------------------------------------------------------------
637

638

639
class GenericTargetDependenciesField(Dependencies):
12✔
640
    pass
12✔
641

642

643
class GenericTarget(Target):
12✔
644
    alias = "target"
12✔
645
    core_fields = (*COMMON_TARGET_FIELDS, GenericTargetDependenciesField)
12✔
646
    help = help_text(
12✔
647
        """
648
        A generic target with no specific type.
649

650
        This can be used as a generic "bag of dependencies", i.e. you can group several different
651
        targets into one single target so that your other targets only need to depend on one thing.
652
        """
653
    )
654

655

656
class DeletedSources(StringSequenceField):
12✔
657
    alias = "deleted_sources"
12✔
658
    help = "Deleted files"
12✔
659

660

661
class DeletedTarget(Target):
12✔
662
    alias = DELETED_TARGET_TYPE
12✔
663
    core_fields = (DeletedSources,)
12✔
664
    help = help_text(
12✔
665
        """
666
        A pseudo-target representing all files deleted from the repo, when using
667
        `--changed-since`.
668

669
        Not intended to be user-visible, and does not represent any "real" BUILD entity.
670
        Can be used as a sentinel when inferring dependents of deleted files.
671

672
        See DELETED_ADDRESS in src/python/pants/engine/internals/build_files.py for
673
        details on how this is used.
674
        """
675
    )
676

677

678
# -----------------------------------------------------------------------------------------------
679
# `Asset` targets (resources and files)
680
# -----------------------------------------------------------------------------------------------
681

682

683
@dataclass(frozen=True)
12✔
684
class AllAssetTargets:
12✔
685
    resources: tuple[Target, ...]
12✔
686
    files: tuple[Target, ...]
12✔
687

688

689
@rule(desc="Find all assets in project")
12✔
690
async def find_all_assets(all_targets: AllTargets) -> AllAssetTargets:
12✔
691
    resources = []
2✔
692
    files = []
2✔
693
    for tgt in all_targets:
2✔
694
        if tgt.has_field(ResourceSourceField):
2✔
695
            resources.append(tgt)
1✔
696
        if tgt.has_field(FileSourceField):
2✔
697
            files.append(tgt)
2✔
698
    return AllAssetTargets(tuple(resources), tuple(files))
2✔
699

700

701
@dataclass(frozen=True)
12✔
702
class AllAssetTargetsByPath:
12✔
703
    resources: FrozenDict[PurePath, frozenset[Target]]
12✔
704
    files: FrozenDict[PurePath, frozenset[Target]]
12✔
705

706

707
@rule(desc="Mapping assets by path")
12✔
708
async def map_assets_by_path(
12✔
709
    all_asset_targets: AllAssetTargets,
710
) -> AllAssetTargetsByPath:
711
    resources_by_path: defaultdict[PurePath, set[Target]] = defaultdict(set)
2✔
712
    for resource_tgt in all_asset_targets.resources:
2✔
713
        resources_by_path[PurePath(resource_tgt[ResourceSourceField].file_path)].add(resource_tgt)
1✔
714

715
    files_by_path: defaultdict[PurePath, set[Target]] = defaultdict(set)
2✔
716
    for file_tgt in all_asset_targets.files:
2✔
717
        files_by_path[PurePath(file_tgt[FileSourceField].file_path)].add(file_tgt)
2✔
718

719
    return AllAssetTargetsByPath(
2✔
720
        FrozenDict((key, frozenset(values)) for key, values in resources_by_path.items()),
721
        FrozenDict((key, frozenset(values)) for key, values in files_by_path.items()),
722
    )
723

724

725
# -----------------------------------------------------------------------------------------------
726
# `_target_generator_sources_helper` target
727
# -----------------------------------------------------------------------------------------------
728

729

730
class TargetGeneratorSourcesHelperSourcesField(SingleSourceField):
12✔
731
    uses_source_roots = False
12✔
732
    required = True
12✔
733

734

735
class TargetGeneratorSourcesHelperTarget(Target):
12✔
736
    """Target generators that work by reading in some source file(s) should also generate this
737
    target once per file, and add it as a dependency to every generated target so that `--changed-
738
    since` works properly.
739

740
    See https://github.com/pantsbuild/pants/issues/13118 for discussion of why this is necessary and
741
    alternatives considered.
742
    """
743

744
    alias = "_generator_sources_helper"
12✔
745
    core_fields = (*COMMON_TARGET_FIELDS, TargetGeneratorSourcesHelperSourcesField)
12✔
746
    help = help_text(
12✔
747
        """
748
        A private helper target type used by some target generators.
749

750
        This tracks their `source` / `sources` field so that `--changed-since --changed-dependents`
751
        works properly for generated targets.
752
        """
753
    )
754

755

756
# -----------------------------------------------------------------------------------------------
757
# `archive` target
758
# -----------------------------------------------------------------------------------------------
759

760

761
class ArchivePackagesField(SpecialCasedDependencies):
12✔
762
    alias = "packages"
12✔
763
    help = help_text(
12✔
764
        f"""
765
        Addresses to any targets that can be built with `{bin_name()} package`,
766
        e.g. `["project:app"]`.
767

768
        Pants will build the assets as if you had run `{bin_name()} package`.
769
        It will include the results in your archive using the same name they
770
        would normally have, but without the `--distdir` prefix (e.g. `dist/`).
771

772
        You can include anything that can be built by `{bin_name()} package`,
773
        e.g. a `pex_binary`, `python_awslambda`, or even another `archive`.
774
        """
775
    )
776

777

778
class ArchiveFilesField(SpecialCasedDependencies):
12✔
779
    alias = "files"
12✔
780
    help = help_text(
12✔
781
        """
782
        Addresses to any `file`, `files`, or `relocated_files` targets to include in the
783
        archive, e.g. `["resources:logo"]`.
784

785
        This is useful to include any loose files, like data files,
786
        image assets, or config files.
787

788
        This will ignore any targets that are not `file`, `files`, or
789
        `relocated_files` targets.
790

791
        If you instead want those files included in any packages specified in the `packages`
792
        field for this target, then use a `resource` or `resources` target and have the original
793
        package depend on the resources.
794
        """
795
    )
796

797

798
class ArchiveFormatField(StringField):
12✔
799
    alias = "format"
12✔
800
    valid_choices = ArchiveFormat
12✔
801
    required = True
12✔
802
    value: str
12✔
803
    help = "The type of archive file to be generated."
12✔
804

805

806
class ArchiveTarget(Target):
12✔
807
    alias = "archive"
12✔
808
    core_fields = (
12✔
809
        *COMMON_TARGET_FIELDS,
810
        OutputPathField,
811
        ArchivePackagesField,
812
        ArchiveFilesField,
813
        ArchiveFormatField,
814
    )
815
    help = "A ZIP or TAR file containing loose files and code packages."
12✔
816

817

818
@dataclass(frozen=True)
12✔
819
class ArchiveFieldSet(PackageFieldSet):
12✔
820
    required_fields = (ArchiveFormatField,)
12✔
821

822
    packages: ArchivePackagesField
12✔
823
    files: ArchiveFilesField
12✔
824
    format_field: ArchiveFormatField
12✔
825
    output_path: OutputPathField
12✔
826

827

828
@rule(level=LogLevel.DEBUG)
12✔
829
async def package_archive_target(field_set: ArchiveFieldSet) -> BuiltPackage:
12✔
830
    # TODO(#13086): Because we're using `Targets` instead of `UnexpandedTargets`, the
831
    #  `files` target generator gets replaced by its generated `file` targets. That replacement is
832
    #  necessary because we only hydrate sources for `FileSourcesField`, which is only for the
833
    #  `file` target.  That's really subtle!
834
    package_targets, file_targets = await concurrently(
3✔
835
        resolve_targets(**implicitly(field_set.packages.to_unparsed_address_inputs())),
836
        resolve_targets(**implicitly(field_set.files.to_unparsed_address_inputs())),
837
    )
838

839
    package_field_sets_per_target = await find_valid_field_sets(
3✔
840
        FieldSetsPerTargetRequest(PackageFieldSet, package_targets), **implicitly()
841
    )
842
    packages = await concurrently(
3✔
843
        environment_aware_package(EnvironmentAwarePackageRequest(field_set))
844
        for field_set in package_field_sets_per_target.field_sets
845
    )
846

847
    file_sources = await concurrently(
3✔
848
        hydrate_sources(
849
            HydrateSourcesRequest(
850
                tgt.get(SourcesField),
851
                for_sources_types=(FileSourceField,),
852
                enable_codegen=True,
853
            ),
854
            **implicitly(),
855
        )
856
        for tgt in file_targets
857
    )
858

859
    input_snapshot = await digest_to_snapshot(
3✔
860
        **implicitly(
861
            MergeDigests(
862
                (
863
                    *(package.digest for package in packages),
864
                    *(sources.snapshot.digest for sources in file_sources),
865
                )
866
            )
867
        )
868
    )
869

870
    output_filename = field_set.output_path.value_or_default(
3✔
871
        file_ending=field_set.format_field.value
872
    )
873
    archive = await create_archive(
3✔
874
        CreateArchive(
875
            input_snapshot,
876
            output_filename=output_filename,
877
            format=ArchiveFormat(field_set.format_field.value),
878
        ),
879
        **implicitly(),
880
    )
881
    return BuiltPackage(archive, (BuiltPackageArtifact(output_filename),))
3✔
882

883

884
# -----------------------------------------------------------------------------------------------
885
# `_lockfile` and `_lockfiles` targets
886
# -----------------------------------------------------------------------------------------------
887

888

889
class LockfileSourceField(OptionalSingleSourceField):
12✔
890
    """Source field for synthesized `_lockfile` targets.
891

892
    It is special in that it always ignores any missing files, regardless of the global
893
    `--unmatched-build-file-globs` option.
894
    """
895

896
    uses_source_roots = False
12✔
897
    required = True
12✔
898
    value: str
12✔
899

900
    def path_globs(self, unmatched_build_file_globs: UnmatchedBuildFileGlobs) -> PathGlobs:  # type: ignore[misc]
12✔
901
        return super().path_globs(UnmatchedBuildFileGlobs.ignore())
1✔
902

903

904
class LockfileDependenciesField(Dependencies):
12✔
905
    pass
12✔
906

907

908
class LockfileTarget(Target):
12✔
909
    alias = "_lockfile"
12✔
910
    core_fields = (*COMMON_TARGET_FIELDS, LockfileSourceField, LockfileDependenciesField)
12✔
911
    help = help_text(
12✔
912
        """
913
        A target for lockfiles in order to include them in the dependency graph of other targets.
914

915
        This tracks them so that `--changed-since --changed-dependents` works properly for targets
916
        relying on a particular lockfile.
917
        """
918
    )
919

920

921
class LockfilesGeneratorSourcesField(MultipleSourcesField):
12✔
922
    """Sources field for synthesized `_lockfiles` targets.
923

924
    It is special in that it always ignores any missing files, regardless of the global
925
    `--unmatched-build-file-globs` option.
926
    """
927

928
    help = generate_multiple_sources_field_help_message("Example: `sources=['example.lock']`")
12✔
929

930
    def path_globs(self, unmatched_build_file_globs: UnmatchedBuildFileGlobs) -> PathGlobs:  # type: ignore[misc]
12✔
931
        return super().path_globs(UnmatchedBuildFileGlobs.ignore())
2✔
932

933

934
class LockfilesGeneratorTarget(TargetFilesGenerator):
12✔
935
    alias = "_lockfiles"
12✔
936
    core_fields = (
12✔
937
        *COMMON_TARGET_FIELDS,
938
        LockfilesGeneratorSourcesField,
939
    )
940
    generated_target_cls = LockfileTarget
12✔
941
    copied_fields = COMMON_TARGET_FIELDS
12✔
942
    moved_fields = (LockfileDependenciesField,)
12✔
943
    help = "Generate a `_lockfile` target for each file in the `sources` field."
12✔
944

945

946
# -----------------------------------------------------------------------------------------------
947
#  Resolve-like fields
948
# -----------------------------------------------------------------------------------------------
949

950

951
@union(in_scope_types=[EnvironmentName])
12✔
952
@dataclass(frozen=True)
12✔
953
class ResolveLikeFieldToValueRequest:
12✔
954
    target: Target
12✔
955

956

957
@dataclass(frozen=True)
12✔
958
class ResolveLikeFieldToValueResult:
12✔
959
    """Result of resolving a resolve-like field to the resolve name as a string.
960

961
    The value will be the actual resolve name (e.g., "python-default", "jvm-default"), or None if
962
    the language backend has disabled resolves (in which case all targets should be treated as
963
    belonging to a single implicit resolve).
964
    """
965

966
    value: str | None
12✔
967

968

969
@rule(polymorphic=True)
12✔
970
async def get_resolve_from_resolve_like_field_request(
12✔
971
    request: ResolveLikeFieldToValueRequest,
972
) -> ResolveLikeFieldToValueResult:
973
    raise NotImplementedError()
×
974

975

976
class ResolveLikeField:
12✔
977
    """Mix-in for any field which behaves like a `resolve` field."""
978

979
    def get_resolve_like_field_to_value_request(self) -> type[ResolveLikeFieldToValueRequest]:
12✔
980
        """Return a `ResolveLikeFieldToValueRequest` subclass which can be used to obtain a string
981
        field value."""
982
        raise NotImplementedError()
×
983

984

985
def rules():
12✔
986
    return (
12✔
987
        *collect_rules(),
988
        *archive_rules(),
989
        *package.rules(),
990
        UnionRule(GenerateSourcesRequest, GenerateResourceSourceRequest),
991
        UnionRule(GenerateSourcesRequest, GenerateFileSourceRequest),
992
        UnionRule(GenerateSourcesRequest, RelocateFilesViaCodegenRequest),
993
        UnionRule(PackageFieldSet, ArchiveFieldSet),
994
    )
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc