• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 18517631058

15 Oct 2025 04:18AM UTC coverage: 69.207% (-11.1%) from 80.267%
18517631058

Pull #22745

github

web-flow
Merge 642a76ca1 into 99919310e
Pull Request #22745: [windows] Add windows support in the stdio crate.

53815 of 77759 relevant lines covered (69.21%)

2.42 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

47.8
/src/python/pants/core/goals/tailor.py
1
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3

4
from __future__ import annotations
7✔
5

6
import dataclasses
7✔
7
import itertools
7✔
8
import logging
7✔
9
import os
7✔
10
from abc import ABCMeta
7✔
11
from collections import defaultdict
7✔
12
from collections.abc import Iterable, Iterator, Mapping
7✔
13
from dataclasses import dataclass
7✔
14
from pathlib import Path
7✔
15
from typing import cast
7✔
16

17
from pants.base.specs import AncestorGlobSpec, DirLiteralSpec, RawSpecs, Specs
7✔
18
from pants.build_graph.address import Address
7✔
19
from pants.engine.collection import DeduplicatedCollection
7✔
20
from pants.engine.console import Console
7✔
21
from pants.engine.environment import EnvironmentName
7✔
22
from pants.engine.fs import CreateDigest, Digest, FileContent, PathGlobs, Workspace
7✔
23
from pants.engine.goal import Goal, GoalSubsystem
7✔
24
from pants.engine.internals.build_files import BuildFileOptions
7✔
25
from pants.engine.internals.graph import resolve_source_paths, resolve_unexpanded_targets
7✔
26
from pants.engine.internals.selectors import concurrently
7✔
27
from pants.engine.internals.specs_rules import resolve_specs_paths
7✔
28
from pants.engine.intrinsics import create_digest, get_digest_contents, path_globs_to_paths
7✔
29
from pants.engine.rules import collect_rules, goal_rule, implicitly, rule
7✔
30
from pants.engine.target import (
7✔
31
    AllUnexpandedTargets,
32
    MultipleSourcesField,
33
    OptionalSingleSourceField,
34
    SourcesField,
35
    SourcesPathsRequest,
36
    Target,
37
)
38
from pants.engine.unions import UnionMembership, union
7✔
39
from pants.option.option_types import BoolOption, DictOption, StrListOption, StrOption
7✔
40
from pants.source.filespec import FilespecMatcher
7✔
41
from pants.util.docutil import bin_name, doc_url
7✔
42
from pants.util.frozendict import FrozenDict
7✔
43
from pants.util.logging import LogLevel
7✔
44
from pants.util.memo import memoized
7✔
45
from pants.util.strutil import help_text, softwrap
7✔
46

47
logger = logging.getLogger(__name__)
7✔
48

49

50
@union(in_scope_types=[EnvironmentName])
7✔
51
@dataclass(frozen=True)
7✔
52
class PutativeTargetsRequest(metaclass=ABCMeta):
7✔
53
    dirs: tuple[str, ...]
7✔
54

55
    def path_globs(self, *filename_globs: str) -> PathGlobs:
7✔
56
        return PathGlobs(os.path.join(d, glob) for d in self.dirs for glob in filename_globs)
×
57

58

59
@dataclass(frozen=True)
7✔
60
class PutativeTargetsSearchPaths:
7✔
61
    dirs: tuple[str, ...]
7✔
62

63
    def path_globs(self, filename_glob: str) -> PathGlobs:
7✔
64
        return PathGlobs(globs=(os.path.join(d, filename_glob) for d in self.dirs))
×
65

66

67
@memoized
7✔
68
def default_sources_for_target_type(tgt_type: type[Target]) -> tuple[str, ...]:
7✔
69
    for field in tgt_type.core_fields:
×
70
        if issubclass(field, OptionalSingleSourceField):
×
71
            return (field.default,) if field.default else tuple()
×
72
        if issubclass(field, MultipleSourcesField):
×
73
            return field.default or tuple()
×
74
    return tuple()
×
75

76

77
def has_source_or_sources_field(tgt_type: type[Target]) -> bool:
7✔
78
    """Tell whether a given target type has a `source` or `sources` field.
79

80
    This may be useful when determining whether it's possible to tailor a target with the passed
81
    source(s) field value if the target doesn't have such a field in the first place.
82
    """
83
    for field in tgt_type.core_fields:
5✔
84
        if issubclass(field, (OptionalSingleSourceField, MultipleSourcesField)):
5✔
85
            return True
5✔
86
    return False
×
87

88

89
@dataclass(order=True, frozen=True)
7✔
90
class PutativeTarget:
7✔
91
    """A potential target to add, detected by various heuristics.
92

93
    This class uses the term "target" in the loose sense. It can also represent an invocation of a
94
    target-generating macro.
95
    """
96

97
    # Note that field order is such that the dataclass order will be by address (path+name).
98
    path: str
7✔
99
    name: str
7✔
100
    type_alias: str
7✔
101

102
    # The sources that triggered creating this putative target.
103
    # The putative target will own these sources, but may also glob over other sources.
104
    # If the putative target does not have a `sources` field, then this value must be the
105
    # empty tuple.
106
    triggering_sources: tuple[str, ...]
7✔
107

108
    # The globs of sources owned by this target.
109
    # If kwargs contains an explicit sources key, it should be identical to this value.
110
    # Otherwise, this field should contain the default globs that the target type will apply.
111
    # If the putative target does not have a `sources` field, then this value must be the
112
    # empty tuple.
113
    # TODO: We can derive the default source globs for that type from BuildConfiguration.
114
    #  However that is fiddly and not a high priority.
115
    owned_sources: tuple[str, ...]
7✔
116

117
    # Note that we generate the BUILD file target entry from these kwargs, the
118
    # `name`, and `type_alias`.
119
    kwargs: FrozenDict[str, str | int | bool | tuple[str, ...]]
7✔
120

121
    # Any comment lines to add above the BUILD file stanza we generate for this putative target.
122
    # Should include the `#` prefix, which will not be added.
123
    comments: tuple[str, ...]
7✔
124

125
    @classmethod
7✔
126
    def for_target_type(
7✔
127
        cls,
128
        target_type: type[Target],
129
        path: str,
130
        name: str | None,
131
        triggering_sources: Iterable[str],
132
        kwargs: Mapping[str, str | int | bool | tuple[str, ...]] | None = None,
133
        comments: Iterable[str] = tuple(),
134
    ) -> PutativeTarget:
135
        if name is None:
5✔
136
            name = os.path.basename(path)
2✔
137

138
        kwargs = kwargs or {}
5✔
139
        explicit_sources = cast(
5✔
140
            "tuple[str, ...] | None",
141
            (kwargs["source"],) if "source" in kwargs else kwargs.get("sources"),
142
        )
143
        if explicit_sources is not None and not isinstance(explicit_sources, tuple):
5✔
144
            raise TypeError(
×
145
                softwrap(
146
                    f"""
147
                    `source` or `sources` passed to PutativeTarget.for_target_type(kwargs=)`, but
148
                    it was not the correct type. `source` must be `str` and `sources` must be
149
                    `tuple[str, ...]`. Was `{explicit_sources}` with type `{type(explicit_sources)}`.
150
                    """
151
                )
152
            )
153

154
        if (explicit_sources or triggering_sources) and not has_source_or_sources_field(
5✔
155
            target_type
156
        ):
157
            raise AssertionError(
×
158
                softwrap(
159
                    f"""
160
                    A target of type {target_type.__name__} was proposed at
161
                    address {path}:{name} with explicit sources {", ".join(explicit_sources or triggering_sources)},
162
                    but this target type does not have a `source` or `sources` field.
163
                    """
164
                )
165
            )
166
        default_sources = default_sources_for_target_type(target_type)
5✔
167
        owned_sources = explicit_sources or default_sources or tuple()
5✔
168
        return cls(
5✔
169
            path,
170
            name,
171
            target_type.alias,
172
            triggering_sources,
173
            owned_sources,
174
            kwargs=kwargs,
175
            comments=comments,
176
        )
177

178
    def __init__(
7✔
179
        self,
180
        path: str,
181
        name: str,
182
        type_alias: str,
183
        triggering_sources: Iterable[str],
184
        owned_sources: Iterable[str],
185
        *,
186
        kwargs: Mapping[str, str | int | bool | tuple[str, ...]] | None = None,
187
        comments: Iterable[str] = tuple(),
188
    ) -> None:
189
        object.__setattr__(self, "path", path)
5✔
190
        object.__setattr__(self, "name", name)
5✔
191
        object.__setattr__(self, "type_alias", type_alias)
5✔
192
        object.__setattr__(self, "triggering_sources", tuple(triggering_sources))
5✔
193
        object.__setattr__(self, "owned_sources", tuple(owned_sources))
5✔
194
        object.__setattr__(self, "kwargs", FrozenDict(kwargs or {}))
5✔
195
        object.__setattr__(self, "comments", tuple(comments))
5✔
196

197
    @property
7✔
198
    def address(self) -> Address:
7✔
199
        return Address(self.path, target_name=self.name)
×
200

201
    def realias(self, new_alias: str | None) -> PutativeTarget:
7✔
202
        """A copy of this object with the alias replaced to the given alias.
203

204
        Returns this object if the alias is None or is identical to this object's existing alias.
205
        """
206
        return (
×
207
            self
208
            if (new_alias is None or new_alias == self.type_alias)
209
            else dataclasses.replace(self, type_alias=new_alias)
210
        )
211

212
    def rename(self, new_name: str) -> PutativeTarget:
7✔
213
        """A copy of this object with the name replaced to the given name."""
214
        return dataclasses.replace(self, name=new_name)
×
215

216
    def restrict_sources(self) -> PutativeTarget:
7✔
217
        """A copy of this object with the sources explicitly set to just the triggering sources."""
218
        owned_sources = self.triggering_sources
×
219
        return dataclasses.replace(
×
220
            self,
221
            owned_sources=owned_sources,
222
            kwargs=FrozenDict({**self.kwargs, "sources": owned_sources}),
223
        )
224

225
    def add_comments(self, comments: Iterable[str]) -> PutativeTarget:
7✔
226
        return dataclasses.replace(self, comments=self.comments + tuple(comments))
×
227

228
    def generate_build_file_stanza(self, indent: str) -> str:
7✔
229
        def fmt_val(v) -> str:
×
230
            if isinstance(v, str):
×
231
                return f'"{v}"'
×
232
            if isinstance(v, tuple):
×
233
                val_parts = [f"\n{indent * 2}{fmt_val(x)}" for x in v]
×
234
                val_str = ",".join(val_parts) + ("," if v else "")
×
235
                return f"[{val_str}\n{indent}]"
×
236
            return repr(v)
×
237

238
        has_name = self.name != os.path.basename(self.path)
×
239
        if self.kwargs or has_name:
×
240
            _kwargs = {
×
241
                **({"name": self.name} if has_name else {}),
242
                **self.kwargs,
243
            }
244
            _kwargs_str_parts = [f"\n{indent}{k}={fmt_val(v)}" for k, v in _kwargs.items()]
×
245
            kwargs_str = ",".join(_kwargs_str_parts) + ",\n"
×
246
        else:
247
            kwargs_str = ""
×
248

249
        comment_str = ("\n".join(self.comments) + "\n") if self.comments else ""
×
250
        return f"{comment_str}{self.type_alias}({kwargs_str})\n"
×
251

252

253
class PutativeTargets(DeduplicatedCollection[PutativeTarget]):
7✔
254
    sort_input = True
7✔
255

256
    @classmethod
7✔
257
    def merge(cls, tgts_iters: Iterable[PutativeTargets]) -> PutativeTargets:
7✔
258
        all_tgts: list[PutativeTarget] = []
×
259
        for tgts in tgts_iters:
×
260
            all_tgts.extend(tgts)
×
261
        return cls(all_tgts)
×
262

263

264
@rule(polymorphic=True)
7✔
265
async def generate_putative_targets(
7✔
266
    req: PutativeTargetsRequest, env_name: EnvironmentName
267
) -> PutativeTargets:
268
    raise NotImplementedError()
×
269

270

271
class TailorSubsystem(GoalSubsystem):
7✔
272
    name = "tailor"
7✔
273
    help = help_text(
7✔
274
        """
275
        Auto-generate BUILD file targets for new source files.
276

277
        Each specific `tailor` implementation may be disabled through language-specific options,
278
        e.g. `[python].tailor_pex_binary_targets` and `[shell-setup].tailor`.
279
        """
280
    )
281

282
    @classmethod
7✔
283
    def activated(cls, union_membership: UnionMembership) -> bool:
7✔
284
        return PutativeTargetsRequest in union_membership
×
285

286
    check = BoolOption(
7✔
287
        default=False,
288
        help=softwrap(
289
            """
290
            Do not write changes to disk, only write back what would change. Return code
291
            0 means there would be no changes, and 1 means that there would be.
292
            """
293
        ),
294
    )
295
    build_file_name = StrOption(
7✔
296
        default="BUILD",
297
        help=softwrap(
298
            """
299
            The name to use for generated BUILD files.
300

301
            This must be compatible with `[GLOBAL].build_patterns`.
302
            """
303
        ),
304
        advanced=True,
305
    )
306
    build_file_header = StrOption(
7✔
307
        default=None,
308
        help="A header, e.g., a copyright notice, to add to the content of created BUILD files.",
309
        advanced=True,
310
    )
311
    build_file_indent = StrOption(
7✔
312
        default="    ",
313
        help="The indent to use when auto-editing BUILD files.",
314
        advanced=True,
315
    )
316
    _alias_mapping = DictOption[str](
7✔
317
        help=softwrap(
318
            f"""
319
            A mapping from standard target type to custom type to use instead. The custom
320
            type can be a custom target type or a macro that offers compatible functionality
321
            to the one it replaces (see {doc_url("docs/writing-plugins/macros")}).
322
            """
323
        ),
324
        advanced=True,
325
    )
326
    ignore_paths = StrListOption(
7✔
327
        help=softwrap(
328
            """
329
            Do not edit or create BUILD files at these paths.
330

331
            Can use literal file names and/or globs, e.g. `['project/BUILD, 'ignore_me/**']`.
332

333
            This augments the option `[GLOBAL].build_ignore`, which tells Pants to also not
334
            _read_ BUILD files at certain paths. In contrast, this option only tells Pants to
335
            not edit/create BUILD files at the specified paths.
336
            """
337
        ),
338
        advanced=True,
339
    )
340
    _ignore_adding_targets = StrListOption(
7✔
341
        help=softwrap(
342
            """
343
            Do not add these target definitions.
344

345
            Expects a list of target addresses that would normally be added by `tailor`,
346
            e.g. `['project:tgt']`. To find these names, you can run `tailor --check`, then
347
            combine the BUILD file path with the target's name. For example, if `tailor`
348
            would add the target `bin` to `project/BUILD`, then the address would be
349
            `project:bin`. If the BUILD file is at the root of your repository, use `//` for
350
            the path, e.g. `//:bin`.
351

352
            Does not work with macros.
353
            """
354
        ),
355
        advanced=True,
356
    )
357

358
    @property
7✔
359
    def ignore_adding_targets(self) -> set[str]:
7✔
360
        return set(self._ignore_adding_targets)
×
361

362
    def alias_for(self, standard_type: str) -> str | None:
7✔
363
        # The get() could return None, but casting to str | None errors.
364
        # This cast suffices to avoid typecheck errors.
365
        return cast(str, self._alias_mapping.get(standard_type))
×
366

367
    def validate_build_file_name(self, build_file_patterns: tuple[str, ...]) -> None:
7✔
368
        """Check that the specified BUILD file name works with the repository's BUILD file
369
        patterns."""
370
        filespec_matcher = FilespecMatcher(build_file_patterns, ())
×
371
        if not bool(filespec_matcher.matches([self.build_file_name])):
×
372
            raise ValueError(
×
373
                softwrap(
374
                    f"""
375
                    The option `[{self.options_scope}].build_file_name` is set to
376
                    `{self.build_file_name}`, which is not compatible with
377
                    `[GLOBAL].build_patterns`: {sorted(build_file_patterns)}. This means that
378
                    generated BUILD files would be ignored.
379

380
                    To fix, please update the options so that they are compatible.
381
                    """
382
                )
383
            )
384

385
    def filter_by_ignores(
7✔
386
        self, putative_targets: Iterable[PutativeTarget], build_file_ignores: tuple[str, ...]
387
    ) -> Iterator[PutativeTarget]:
388
        ignore_paths_filespec_matcher = FilespecMatcher(
×
389
            (*self.ignore_paths, *build_file_ignores), ()
390
        )
391
        for ptgt in putative_targets:
×
392
            is_ignored_file = bool(
×
393
                ignore_paths_filespec_matcher.matches(
394
                    [os.path.join(ptgt.path, self.build_file_name)]
395
                ),
396
            )
397
            if is_ignored_file:
×
398
                continue
×
399
            # Note that `tailor` can only generate explicit targets, so we don't need to
400
            # worry about generated address syntax (`#`) or file address syntax.
401
            address = f"{ptgt.path or '//'}:{ptgt.name}"
×
402
            if address in self.ignore_adding_targets:
×
403
                continue
×
404
            yield ptgt
×
405

406

407
class TailorGoal(Goal):
7✔
408
    subsystem_cls = TailorSubsystem
7✔
409
    environment_behavior = Goal.EnvironmentBehavior.LOCAL_ONLY
7✔
410

411

412
def group_by_build_file(
7✔
413
    build_file_name: str, ptgts: Iterable[PutativeTarget]
414
) -> dict[str, list[PutativeTarget]]:
415
    ret = defaultdict(list)
×
416
    for ptgt in ptgts:
×
417
        ret[os.path.join(ptgt.path, build_file_name)].append(ptgt)
×
418
    return ret
×
419

420

421
class AllOwnedSources(DeduplicatedCollection[str]):
7✔
422
    """All files in the project already owned by targets."""
423

424

425
@rule(desc="Determine all files already owned by targets", level=LogLevel.DEBUG)
7✔
426
async def determine_all_owned_sources(all_tgts: AllUnexpandedTargets) -> AllOwnedSources:
7✔
427
    all_sources_paths = await concurrently(
×
428
        resolve_source_paths(SourcesPathsRequest(tgt.get(SourcesField)), **implicitly())
429
        for tgt in all_tgts
430
    )
431
    return AllOwnedSources(
×
432
        itertools.chain.from_iterable(sources_paths.files for sources_paths in all_sources_paths)
433
    )
434

435

436
@dataclass(frozen=True)
7✔
437
class UniquelyNamedPutativeTargets:
7✔
438
    """Putative targets that have no name conflicts with existing targets (or each other)."""
439

440
    putative_targets: PutativeTargets
7✔
441

442

443
@rule
7✔
444
async def rename_conflicting_targets(
7✔
445
    ptgts: PutativeTargets, all_existing_tgts: AllUnexpandedTargets
446
) -> UniquelyNamedPutativeTargets:
447
    """Ensure that no target addresses collide."""
448
    existing_addrs: set[str] = {tgt.address.spec for tgt in all_existing_tgts}
×
449
    uniquely_named_putative_targets: list[PutativeTarget] = []
×
450
    for ptgt in ptgts:
×
451
        idx = 0
×
452
        possibly_renamed_ptgt = ptgt
×
453
        # Targets in root-level BUILD files must be named explicitly.
454
        if possibly_renamed_ptgt.path == "" and possibly_renamed_ptgt.kwargs.get("name") is None:
×
455
            possibly_renamed_ptgt = possibly_renamed_ptgt.rename("root")
×
456
        # Eliminate any address collisions.
457
        while possibly_renamed_ptgt.address.spec in existing_addrs:
×
458
            possibly_renamed_ptgt = ptgt.rename(f"{ptgt.name}{idx}")
×
459
            idx += 1
×
460
        uniquely_named_putative_targets.append(possibly_renamed_ptgt)
×
461
        existing_addrs.add(possibly_renamed_ptgt.address.spec)
×
462

463
    return UniquelyNamedPutativeTargets(PutativeTargets(uniquely_named_putative_targets))
×
464

465

466
@dataclass(frozen=True)
7✔
467
class DisjointSourcePutativeTarget:
7✔
468
    """Putative target whose sources don't overlap with those of any existing targets."""
469

470
    putative_target: PutativeTarget
7✔
471

472

473
@rule
7✔
474
async def restrict_conflicting_sources(ptgt: PutativeTarget) -> DisjointSourcePutativeTarget:
7✔
475
    source_paths = await path_globs_to_paths(
×
476
        PathGlobs(
477
            SourcesField.prefix_glob_with_dirpath(ptgt.path, glob) for glob in ptgt.owned_sources
478
        )
479
    )
480
    source_path_set = set(source_paths.files)
×
481
    source_dirs = {os.path.dirname(path) for path in source_path_set}
×
482
    possible_owners = await resolve_unexpanded_targets(
×
483
        **implicitly(
484
            RawSpecs(
485
                ancestor_globs=tuple(AncestorGlobSpec(d) for d in source_dirs),
486
                description_of_origin="the `tailor` goal",
487
            )
488
        )
489
    )
490
    possible_owners_sources = await concurrently(
×
491
        resolve_source_paths(SourcesPathsRequest(t.get(SourcesField)), **implicitly())
492
        for t in possible_owners
493
    )
494
    conflicting_targets = []
×
495
    for tgt, sources in zip(possible_owners, possible_owners_sources):
×
496
        if source_path_set.intersection(sources.files):
×
497
            conflicting_targets.append(tgt)
×
498

499
    if conflicting_targets:
×
500
        conflicting_addrs = sorted(tgt.address.spec for tgt in conflicting_targets)
×
501
        explicit_srcs_str = ", ".join(ptgt.kwargs.get("sources") or [])  # type: ignore[arg-type]
×
502
        orig_sources_str = (
×
503
            f"[{explicit_srcs_str}]" if explicit_srcs_str else f"the default for {ptgt.type_alias}"
504
        )
505
        ptgt = ptgt.restrict_sources().add_comments(
×
506
            [f"# NOTE: Sources restricted from {orig_sources_str} due to conflict with"]
507
            + [f"#   - {caddr}" for caddr in conflicting_addrs]
508
        )
509
    return DisjointSourcePutativeTarget(ptgt)
×
510

511

512
@dataclass(frozen=True)
7✔
513
class EditBuildFilesRequest:
7✔
514
    putative_targets: PutativeTargets
7✔
515

516

517
@dataclass(frozen=True)
7✔
518
class EditedBuildFiles:
7✔
519
    digest: Digest
7✔
520
    created_paths: tuple[str, ...]
7✔
521
    updated_paths: tuple[str, ...]
7✔
522

523

524
def make_content_str(
7✔
525
    existing_content: str | None, indent: str, pts: Iterable[PutativeTarget]
526
) -> str:
527
    new_content = ([] if existing_content is None else [existing_content]) + [
×
528
        pt.generate_build_file_stanza(indent) for pt in pts
529
    ]
530
    new_content = [s.rstrip() for s in new_content]
×
531
    return "\n\n".join(new_content) + "\n"
×
532

533

534
@rule(desc="Edit BUILD files with new targets", level=LogLevel.DEBUG)
7✔
535
async def edit_build_files(
7✔
536
    req: EditBuildFilesRequest, tailor_subsystem: TailorSubsystem
537
) -> EditedBuildFiles:
538
    ptgts_by_build_file = group_by_build_file(
×
539
        tailor_subsystem.build_file_name, req.putative_targets
540
    )
541
    # There may be an existing *directory* whose name collides with that of a BUILD file
542
    # we want to create. This is more likely on a system with case-insensitive paths,
543
    # such as MacOS. We detect such cases and use an alt BUILD file name to fix.
544
    existing_paths = await path_globs_to_paths(PathGlobs(ptgts_by_build_file.keys()))
×
545
    existing_dirs = set(existing_paths.dirs)
×
546
    # Technically there could be a dir named "BUILD.pants" as well, but that's pretty unlikely.
547
    ptgts_by_build_file = {
×
548
        (f"{bf}.pants" if bf in existing_dirs else bf): pts
549
        for bf, pts in ptgts_by_build_file.items()
550
    }
551
    existing_build_files_contents = await get_digest_contents(
×
552
        **implicitly(PathGlobs(ptgts_by_build_file.keys()))
553
    )
554
    existing_build_files_contents_by_path = {
×
555
        ebfc.path: ebfc.content for ebfc in existing_build_files_contents
556
    }
557

558
    def make_content(bf_path: str, pts: Iterable[PutativeTarget]) -> FileContent:
×
559
        existing_content_bytes = existing_build_files_contents_by_path.get(bf_path)
×
560
        existing_content = (
×
561
            tailor_subsystem.build_file_header
562
            if existing_content_bytes is None
563
            else existing_content_bytes.decode()
564
        )
565
        new_content_bytes = make_content_str(
×
566
            existing_content, tailor_subsystem.build_file_indent, pts
567
        ).encode()
568
        return FileContent(bf_path, new_content_bytes)
×
569

570
    new_digest = await create_digest(
×
571
        CreateDigest([make_content(path, ptgts) for path, ptgts in ptgts_by_build_file.items()]),
572
    )
573

574
    updated = set(existing_build_files_contents_by_path.keys())
×
575
    created = set(ptgts_by_build_file.keys()) - updated
×
576
    return EditedBuildFiles(new_digest, tuple(sorted(created)), tuple(sorted(updated)))
×
577

578

579
def spec_with_build_to_dir(spec: RawSpecs, build_file_patterns: tuple[str, ...]) -> RawSpecs:
7✔
580
    """Convert a spec like `path/to/BUILD` into `path/to`, which is probably the intention."""
581

582
    filespec_matcher = FilespecMatcher(build_file_patterns, ())
×
583

584
    def is_build_file(s: str):
×
585
        return bool(filespec_matcher.matches([s]))
×
586

587
    new_file_literals = []
×
588
    new_dir_literals = []
×
589

590
    # handles existing BUILD files
591
    for file_literal in spec.file_literals:
×
592
        path = Path(file_literal.file)
×
593
        if is_build_file(path.name):
×
594
            # convert FileLiteralSpec into DirLiteralSpec
595
            new_dir_literals.append(DirLiteralSpec(path.parent.as_posix()))
×
596
        else:
597
            new_file_literals.append(file_literal)
×
598

599
    # If the BUILD file doesn't exist (possibly because it was deleted)
600
    # it will appear as a dir_literal
601
    for dir_literal in spec.dir_literals:
×
602
        path = Path(dir_literal.directory)
×
603
        if is_build_file(path.name):
×
604
            new_dir_literals.append(DirLiteralSpec(path.parent.as_posix()))
×
605
        else:
606
            new_dir_literals.append(dir_literal)
×
607

608
    return dataclasses.replace(
×
609
        spec, dir_literals=tuple(new_dir_literals), file_literals=tuple(new_file_literals)
610
    )
611

612

613
def resolve_specs_with_build(specs: Specs, build_file_patterns: tuple[str, ...]) -> Specs:
7✔
614
    """Convert Specs with specs like `path/to/BUILD` into `path/to`, which is probably the
615
    intention."""
616
    new_includes = spec_with_build_to_dir(specs.includes, build_file_patterns)
×
617
    new_ignores = spec_with_build_to_dir(specs.ignores, build_file_patterns)
×
618
    return dataclasses.replace(specs, includes=new_includes, ignores=new_ignores)
×
619

620

621
@goal_rule
7✔
622
async def tailor(
7✔
623
    tailor_subsystem: TailorSubsystem,
624
    console: Console,
625
    workspace: Workspace,
626
    union_membership: UnionMembership,
627
    env_name: EnvironmentName,
628
    specs: Specs,
629
    build_file_options: BuildFileOptions,
630
) -> TailorGoal:
631
    tailor_subsystem.validate_build_file_name(build_file_options.patterns)
×
632

633
    specs = resolve_specs_with_build(specs, build_file_options.patterns)
×
634

635
    if not specs:
×
636
        if not specs.includes.from_change_detection:
×
637
            logger.warning(
×
638
                softwrap(
639
                    f"""\
640
                    No arguments specified with `{bin_name()} tailor`, so the goal will do nothing.
641

642
                    Instead, you should provide arguments like this:
643

644
                      * `{bin_name()} tailor ::` to run on everything
645
                      * `{bin_name()} tailor dir::` to run on `dir` and subdirs
646
                      * `{bin_name()} tailor dir` to run on `dir`
647
                      * `{bin_name()} tailor dir/{tailor_subsystem.build_file_name}` to run on `dir`
648
                      * `{bin_name()} --changed-since=HEAD tailor` to only run on changed and new files
649
                    """
650
                )
651
            )
652
        return TailorGoal(exit_code=0)
×
653

654
    specs_paths = await resolve_specs_paths(specs)
×
655
    dir_search_paths = tuple(sorted({os.path.dirname(f) for f in specs_paths.files}))
×
656

657
    putative_targets_results = await concurrently(
×
658
        generate_putative_targets(
659
            **implicitly(
660
                {req_type(dir_search_paths): PutativeTargetsRequest, env_name: EnvironmentName}
661
            )
662
        )
663
        for req_type in union_membership[PutativeTargetsRequest]
664
    )
665
    putative_targets = PutativeTargets.merge(putative_targets_results)
×
666
    putative_targets = PutativeTargets(
×
667
        pt.realias(tailor_subsystem.alias_for(pt.type_alias)) for pt in putative_targets
668
    )
669
    fixed_names_ptgts = await rename_conflicting_targets(putative_targets, **implicitly())
×
670
    fixed_sources_ptgts = await concurrently(
×
671
        restrict_conflicting_sources(ptgt) for ptgt in fixed_names_ptgts.putative_targets
672
    )
673

674
    valid_putative_targets = list(
×
675
        tailor_subsystem.filter_by_ignores(
676
            (disjoint_source_ptgt.putative_target for disjoint_source_ptgt in fixed_sources_ptgts),
677
            build_file_options.ignores,
678
        )
679
    )
680
    if not valid_putative_targets:
×
681
        return TailorGoal(exit_code=0)
×
682

683
    edited_build_files = await edit_build_files(
×
684
        EditBuildFilesRequest(PutativeTargets(valid_putative_targets)), **implicitly()
685
    )
686
    if not tailor_subsystem.check:
×
687
        workspace.write_digest(edited_build_files.digest)
×
688

689
    updated_build_files = set(edited_build_files.updated_paths)
×
690
    ptgts_by_build_file = group_by_build_file(
×
691
        tailor_subsystem.build_file_name, valid_putative_targets
692
    )
693
    for build_file_path, ptgts in ptgts_by_build_file.items():
×
694
        formatted_changes = "\n".join(
×
695
            f"  - Add {console.green(ptgt.type_alias)} target {console.cyan(ptgt.name)}"
696
            for ptgt in ptgts
697
        )
698
        if build_file_path in updated_build_files:
×
699
            verb = "Would update" if tailor_subsystem.check else "Updated"
×
700
        else:
701
            verb = "Would create" if tailor_subsystem.check else "Created"
×
702
        console.print_stdout(f"{verb} {console.blue(build_file_path)}:\n{formatted_changes}")
×
703

704
    if tailor_subsystem.check:
×
705
        console.print_stdout(f"\nTo fix `tailor` failures, run `{bin_name()} tailor`.")
×
706

707
    return TailorGoal(exit_code=1 if tailor_subsystem.check else 0)
×
708

709

710
def rules():
7✔
711
    return collect_rules()
3✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc