• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 19050398641

03 Nov 2025 09:48PM UTC coverage: 80.3% (-0.003%) from 80.303%
19050398641

push

github

web-flow
A new "pants next-gen" command-line parser. (#22808)

This supports the command/subcommand paradigm, rather than
the "list of goals" paradigm of the existing cli parser.

This is experimental, not wired up to anything yet, and intended to
support a hypothetical "next-generation Pants" (pants_ng for short).

It is not intended to replicate all current Pants CLI nuances, but is a
much simpler design based on our experience with the complexities
of the current CLI parser.

In particular,  this new CLI parser does not need to know in advance 
about any registered goals and options. The old parser needed this
to tell the difference between specs and goals, and this added a
lot of complexity for little gain. This was also ambiguous, since
introduction of a new goal could change the meaning of an existing
invocation.

This parser instead *requires* specs to contain a filepath separator,
anything else is a flag or a command or a subcommand, and those
are easily distinguished by syntax alone. A spec path in the root dir
can be prefixed with `./` to meet this requirement.

This parser supports multiple cmd/subcommand pairs, each with 
flags, by separating them with a standalone `+` character. This,
again, removes ambiguity.

This parser supports only long flags (prefixed by `--`). We currently
support a handful of short flags (such as `-l`), but those cause
ambiguity since we also use a single dash prefix for negating
specs.

See https://github.com/pantsbuild/pants/discussions/22692 for context.

77992 of 97126 relevant lines covered (80.3%)

3.35 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

34.97
/src/python/pants/engine/internals/graph.py
1
# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3

4
from __future__ import annotations
12✔
5

6
import dataclasses
12✔
7
import functools
12✔
8
import itertools
12✔
9
import json
12✔
10
import logging
12✔
11
import os.path
12✔
12
from collections import defaultdict
12✔
13
from collections.abc import Coroutine, Iterable, Iterator, Mapping, Sequence
12✔
14
from dataclasses import dataclass
12✔
15
from pathlib import PurePath
12✔
16
from typing import Any, DefaultDict, NamedTuple, Type, TypeVar, cast
12✔
17

18
from pants.base.deprecated import warn_or_error
12✔
19
from pants.base.specs import AncestorGlobSpec, RawSpecsWithoutFileOwners, RecursiveGlobSpec
12✔
20
from pants.build_graph.address import BuildFileAddressRequest, ResolveError
12✔
21
from pants.engine.addresses import Address, Addresses, AddressInput, UnparsedAddressInputs
12✔
22
from pants.engine.collection import Collection
12✔
23
from pants.engine.environment import ChosenLocalEnvironmentName, EnvironmentName
12✔
24
from pants.engine.fs import EMPTY_SNAPSHOT, GlobMatchErrorBehavior, PathGlobs
12✔
25
from pants.engine.internals import native_engine
12✔
26
from pants.engine.internals.build_files import (
12✔
27
    AddressFamilyDir,
28
    BuildFileOptions,
29
    ensure_address_family,
30
    find_build_file,
31
    find_target_adaptor,
32
    maybe_resolve_address,
33
    parse_address_family,
34
    resolve_address,
35
)
36
from pants.engine.internals.mapper import AddressFamilies, SpecsFilter
12✔
37
from pants.engine.internals.native_engine import AddressParseException
12✔
38
from pants.engine.internals.parametrize import Parametrize, _TargetParametrization
12✔
39
from pants.engine.internals.parametrize import (  # noqa: F401
12✔
40
    _TargetParametrizations as _TargetParametrizations,
41
)
42
from pants.engine.internals.parametrize import (  # noqa: F401
12✔
43
    _TargetParametrizationsRequest as _TargetParametrizationsRequest,
44
)
45
from pants.engine.internals.synthetic_targets import (
12✔
46
    SyntheticTargetsSpecPathsRequest,
47
    get_synthetic_targets_spec_paths,
48
)
49
from pants.engine.internals.target_adaptor import SourceBlocks, TargetAdaptor, TargetAdaptorRequest
12✔
50
from pants.engine.intrinsics import digest_to_snapshot, path_globs_to_paths
12✔
51
from pants.engine.rules import collect_rules, concurrently, implicitly, rule
12✔
52
from pants.engine.target import (
12✔
53
    AllTargets,
54
    AllUnexpandedTargets,
55
    CoarsenedTarget,
56
    CoarsenedTargets,
57
    CoarsenedTargetsRequest,
58
    Dependencies,
59
    DependenciesRequest,
60
    DepsTraversalBehavior,
61
    ExplicitlyProvidedDependencies,
62
    ExplicitlyProvidedDependenciesRequest,
63
    Field,
64
    FieldDefaultFactoryRequest,
65
    FieldDefaultFactoryResult,
66
    FieldDefaults,
67
    FieldSetsPerTarget,
68
    FieldSetsPerTargetRequest,
69
    FilteredTargets,
70
    GeneratedTargets,
71
    GenerateSourcesRequest,
72
    GenerateTargetsRequest,
73
    HydratedSources,
74
    HydrateSourcesRequest,
75
    InferDependenciesRequest,
76
    InferredDependencies,
77
    InvalidFieldException,
78
    MultipleSourcesField,
79
    OverridesField,
80
    RegisteredTargetTypes,
81
    SourcesField,
82
    SourcesPaths,
83
    SourcesPathsRequest,
84
    SpecialCasedDependencies,
85
    Target,
86
    TargetFilesGenerator,
87
    TargetFilesGeneratorSettings,
88
    TargetFilesGeneratorSettingsRequest,
89
    TargetGenerator,
90
    Targets,
91
    TargetTypesToGenerateTargetsRequests,
92
    TransitivelyExcludeDependencies,
93
    TransitivelyExcludeDependenciesRequest,
94
    TransitiveTargets,
95
    TransitiveTargetsRequest,
96
    UnexpandedTargets,
97
    UnrecognizedTargetTypeException,
98
    ValidatedDependencies,
99
    ValidateDependenciesRequest,
100
    WrappedTarget,
101
    WrappedTargetRequest,
102
    _generate_file_level_targets,
103
    generate_sources,
104
    generate_targets,
105
)
106
from pants.engine.unions import UnionMembership, UnionRule
12✔
107
from pants.option.bootstrap_options import UnmatchedBuildFileGlobs
12✔
108
from pants.option.global_options import GlobalOptions
12✔
109
from pants.util.docutil import bin_name, doc_url
12✔
110
from pants.util.frozendict import FrozenDict
12✔
111
from pants.util.logging import LogLevel
12✔
112
from pants.util.memo import memoized
12✔
113
from pants.util.ordered_set import FrozenOrderedSet, OrderedSet
12✔
114
from pants.util.strutil import bullet_list, pluralize, softwrap
12✔
115
from pants.vcs.hunk import TextBlocks
12✔
116

117
logger = logging.getLogger(__name__)
12✔
118

119

120
# -----------------------------------------------------------------------------------------------
121
# Address -> Target(s)
122
# -----------------------------------------------------------------------------------------------
123

124

125
@rule(_masked_types=[EnvironmentName])
12✔
126
async def filter_targets(targets: Targets, specs_filter: SpecsFilter) -> FilteredTargets:
12✔
127
    return FilteredTargets(tgt for tgt in targets if specs_filter.matches(tgt))
×
128

129

130
@rule
12✔
131
async def target_types_to_generate_targets_requests(
12✔
132
    union_membership: UnionMembership,
133
) -> TargetTypesToGenerateTargetsRequests:
134
    return TargetTypesToGenerateTargetsRequests(
×
135
        {
136
            request_cls.generate_from: request_cls  # type: ignore[misc]
137
            for request_cls in union_membership.get(GenerateTargetsRequest)
138
        }
139
    )
140

141

142
@memoized
12✔
143
def warn_deprecated_target_type(tgt_type: type[Target]) -> None:
12✔
144
    assert tgt_type.deprecated_alias_removal_version is not None
×
145
    warn_or_error(
×
146
        removal_version=tgt_type.deprecated_alias_removal_version,
147
        entity=f"the target name {tgt_type.deprecated_alias}",
148
        hint=(
149
            f"Instead, use `{tgt_type.alias}`, which behaves the same. Run `{bin_name()} "
150
            "update-build-files` to automatically fix your BUILD files."
151
        ),
152
    )
153

154

155
@memoized
12✔
156
def warn_deprecated_field_type(field_type: type[Field]) -> None:
12✔
157
    assert field_type.deprecated_alias_removal_version is not None
×
158
    warn_or_error(
×
159
        removal_version=field_type.deprecated_alias_removal_version,
160
        entity=f"the field name {field_type.deprecated_alias}",
161
        hint=(
162
            f"Instead, use `{field_type.alias}`, which behaves the same. Run `{bin_name()} "
163
            "update-build-files` to automatically fix your BUILD files."
164
        ),
165
    )
166

167

168
@dataclass(frozen=True)
12✔
169
class _AdaptorAndType:
12✔
170
    adaptor: TargetAdaptor
12✔
171
    target_type: type[Target]
12✔
172

173

174
@dataclass(frozen=True)
12✔
175
class _RequestAdaptorAndType:
12✔
176
    address: Address
12✔
177
    description_of_origin: str
12✔
178

179

180
@rule
12✔
181
async def _determine_target_adaptor_and_type(
12✔
182
    req: _RequestAdaptorAndType, registered_target_types: RegisteredTargetTypes
183
) -> _AdaptorAndType:
184
    target_adaptor = await find_target_adaptor(
×
185
        TargetAdaptorRequest(req.address, description_of_origin=req.description_of_origin)
186
    )
187
    target_type = registered_target_types.aliases_to_types.get(target_adaptor.type_alias, None)
×
188
    if target_type is None:
×
189
        raise UnrecognizedTargetTypeException(
×
190
            target_adaptor.type_alias,
191
            registered_target_types,
192
            req.address,
193
            target_adaptor.description_of_origin,
194
        )
195
    if (
×
196
        target_type.deprecated_alias is not None
197
        and target_type.deprecated_alias == target_adaptor.type_alias
198
        and not req.address.is_generated_target
199
    ):
200
        warn_deprecated_target_type(target_type)
×
201
    return _AdaptorAndType(target_adaptor, target_type)
×
202

203

204
@dataclass(frozen=True)
12✔
205
class _TargetGeneratorOverridesRequest:
12✔
206
    target_generator: TargetGenerator
12✔
207

208

209
@dataclass(frozen=True)
12✔
210
class ResolvedTargetGeneratorRequests:
12✔
211
    requests: tuple[GenerateTargetsRequest, ...] = tuple()
12✔
212

213

214
@dataclass(frozen=True)
12✔
215
class ResolveTargetGeneratorRequests:
12✔
216
    address: Address
12✔
217
    description_of_origin: str = dataclasses.field(hash=False, compare=False)
12✔
218

219

220
@dataclass(frozen=True)
12✔
221
class ResolveAllTargetGeneratorRequests:
12✔
222
    description_of_origin: str = dataclasses.field(hash=False, compare=False)
12✔
223
    of_type: type[TargetGenerator] | None = None
12✔
224

225

226
async def _parametrized_target_generators_with_templates(
12✔
227
    address: Address,
228
    target_adaptor: TargetAdaptor,
229
    target_type: type[TargetGenerator],
230
    generator_fields: dict[str, Any],
231
    union_membership: UnionMembership,
232
) -> list[tuple[TargetGenerator, Mapping[str, Any]]]:
233
    # Pre-load field values from defaults for the target type being generated.
234
    if hasattr(target_type, "generated_target_cls"):
×
235
        family = (
×
236
            await parse_address_family(**implicitly(AddressFamilyDir(address.spec_path)))
237
        ).ensure()
238
        template_fields = dict(family.defaults.get(target_type.generated_target_cls.alias, {}))
×
239
    else:
240
        template_fields = {}
×
241

242
    # Split out the `propagated_fields` before construction.
243
    copied_fields = (
×
244
        *target_type.copied_fields,
245
        *target_type._find_copied_plugin_fields(union_membership),
246
    )
247
    moved_fields = (
×
248
        *target_type.moved_fields,
249
        *target_type._find_moved_plugin_fields(union_membership),
250
    )
251
    for field_type in copied_fields:
×
252
        for alias in (field_type.deprecated_alias, field_type.alias):
×
253
            if alias is None:
×
254
                continue
×
255
            # Any deprecated field use will be checked on the generator target.
256
            field_value = generator_fields.get(alias, None)
×
257
            if field_value is not None:
×
258
                template_fields[alias] = field_value
×
259
    for field_type in moved_fields:
×
260
        # We must check for deprecated field usage here before passing the value to the generator.
261
        if field_type.deprecated_alias is not None:
×
262
            field_value = generator_fields.pop(field_type.deprecated_alias, None)
×
263
            if field_value is not None:
×
264
                warn_deprecated_field_type(field_type)
×
265
                template_fields[field_type.deprecated_alias] = field_value
×
266
        field_value = generator_fields.pop(field_type.alias, None)
×
267
        if field_value is not None:
×
268
            template_fields[field_type.alias] = field_value
×
269

270
    # Move parametrize groups over to `template_fields` in order to expand them.
271
    parametrize_group_field_names = [
×
272
        name
273
        for name, field in generator_fields.items()
274
        if isinstance(field, Parametrize) and field.is_group
275
    ]
276
    for field_name in parametrize_group_field_names:
×
277
        template_fields[field_name] = generator_fields.pop(field_name)
×
278

279
    field_type_aliases = target_type._get_field_aliases_to_field_types(
×
280
        target_type.class_field_types(union_membership)
281
    ).keys()
282
    generator_fields_parametrized = {
×
283
        name
284
        for name, field in generator_fields.items()
285
        if isinstance(field, Parametrize) and name in field_type_aliases
286
    }
287
    if generator_fields_parametrized:
×
288
        noun = pluralize(len(generator_fields_parametrized), "field", include_count=False)
×
289
        generator_fields_parametrized_text = ", ".join(
×
290
            repr(f) for f in generator_fields_parametrized
291
        )
292
        raise InvalidFieldException(
×
293
            f"Only fields which will be moved to generated targets may be parametrized, "
294
            f"so target generator {address} (with type {target_type.alias}) cannot "
295
            f"parametrize the {generator_fields_parametrized_text} {noun}."
296
        )
297
    return [
×
298
        (
299
            _create_target(
300
                address,
301
                target_type,
302
                target_adaptor,
303
                generator_fields,
304
                union_membership,
305
                name_explicitly_set=target_adaptor.name is not None,
306
            ),
307
            template,
308
        )
309
        for address, template in Parametrize.expand(address, template_fields)
310
    ]
311

312

313
async def _target_generator_overrides(
12✔
314
    target_generator: TargetGenerator, unmatched_build_file_globs: UnmatchedBuildFileGlobs
315
) -> dict[str, dict[str, Any]]:
316
    address = target_generator.address
×
317
    if target_generator.has_field(OverridesField):
×
318
        overrides_field = target_generator[OverridesField]
×
319
        overrides_flattened = overrides_field.flatten()
×
320
    else:
321
        overrides_flattened = {}
×
322
    if isinstance(target_generator, TargetFilesGenerator):
×
323
        override_globs = OverridesField.to_path_globs(
×
324
            address, overrides_flattened, unmatched_build_file_globs
325
        )
326
        override_paths = await concurrently(
×
327
            path_globs_to_paths(path_globs) for path_globs in override_globs
328
        )
329
        return OverridesField.flatten_paths(
×
330
            address, zip(override_paths, override_globs, overrides_flattened.values())
331
        )
332
    return overrides_flattened
×
333

334

335
@rule
12✔
336
async def resolve_generator_target_requests(
12✔
337
    req: ResolveTargetGeneratorRequests,
338
    union_membership: UnionMembership,
339
    target_types_to_generate_requests: TargetTypesToGenerateTargetsRequests,
340
    unmatched_build_file_globs: UnmatchedBuildFileGlobs,
341
) -> ResolvedTargetGeneratorRequests:
342
    adaptor_and_type = await _determine_target_adaptor_and_type(
×
343
        _RequestAdaptorAndType(req.address, req.description_of_origin), **implicitly()
344
    )
345
    target_adaptor = adaptor_and_type.adaptor
×
346
    target_type = adaptor_and_type.target_type
×
347
    if not issubclass(target_type, TargetGenerator):
×
348
        return ResolvedTargetGeneratorRequests()
×
349

350
    generate_request = target_types_to_generate_requests.request_for(target_type)
×
351
    if not generate_request:
×
352
        return ResolvedTargetGeneratorRequests()
×
353
    generator_fields = dict(target_adaptor.kwargs)
×
354
    generators = await _parametrized_target_generators_with_templates(
×
355
        req.address,
356
        target_adaptor,
357
        target_type,
358
        generator_fields,
359
        union_membership,
360
    )
361
    base_generator = _create_target(
×
362
        req.address,
363
        target_type,
364
        target_adaptor,
365
        generator_fields,
366
        union_membership,
367
    )
368
    overrides = await _target_generator_overrides(base_generator, unmatched_build_file_globs)
×
369
    return ResolvedTargetGeneratorRequests(
×
370
        requests=tuple(
371
            generate_request(
372
                generator,
373
                template_address=generator.address,
374
                template=template,
375
                overrides={
376
                    name: dict(Parametrize.expand(generator.address, override))
377
                    for name, override in overrides.items()
378
                },
379
            )
380
            for generator, template in generators
381
        )
382
    )
383

384

385
@rule(_masked_types=[EnvironmentName])
12✔
386
async def address_families_from_raw_specs_without_file_owners(
12✔
387
    specs: RawSpecsWithoutFileOwners,
388
    build_file_options: BuildFileOptions,
389
) -> AddressFamilies:
390
    if not (specs.dir_literals or specs.dir_globs or specs.recursive_globs or specs.ancestor_globs):
×
391
        return AddressFamilies()
×
392
    # Resolve all globs.
393
    build_file_globs, validation_globs = specs.to_build_file_path_globs_tuple(
×
394
        build_patterns=build_file_options.patterns,
395
        build_ignore_patterns=build_file_options.ignores,
396
    )
397
    build_file_paths, _ = await concurrently(
×
398
        path_globs_to_paths(build_file_globs),
399
        path_globs_to_paths(validation_globs),
400
    )
401
    dirnames = set(
×
402
        await get_synthetic_targets_spec_paths(
403
            SyntheticTargetsSpecPathsRequest(tuple(specs.glob_specs())), **implicitly()
404
        )
405
    )
406
    dirnames.update(os.path.dirname(f) for f in build_file_paths.files)
×
407
    return AddressFamilies(
×
408
        await concurrently(
409
            ensure_address_family(**implicitly(AddressFamilyDir(d))) for d in dirnames
410
        )
411
    )
412

413

414
@rule
12✔
415
async def resolve_all_generator_target_requests(
12✔
416
    req: ResolveAllTargetGeneratorRequests,
417
) -> ResolvedTargetGeneratorRequests:
418
    address_families = await address_families_from_raw_specs_without_file_owners(
×
419
        RawSpecsWithoutFileOwners(
420
            recursive_globs=(RecursiveGlobSpec(""),),
421
            description_of_origin="the `ResolveAllTargetGeneratorRequests` rule",
422
        ),
423
        **implicitly(),
424
    )
425
    results = await concurrently(
×
426
        resolve_generator_target_requests(
427
            ResolveTargetGeneratorRequests(address, req.description_of_origin), **implicitly()
428
        )
429
        for family in address_families
430
        for address, target_adaptor in family.addresses_to_target_adaptors.items()
431
        if not req.of_type or target_adaptor.type_alias == req.of_type.alias
432
    )
433
    return ResolvedTargetGeneratorRequests(
×
434
        tuple(itertools.chain.from_iterable(result.requests for result in results))
435
    )
436

437

438
@rule
12✔
439
async def resolve_target_parametrizations(
12✔
440
    request: _TargetParametrizationsRequest,
441
    union_membership: UnionMembership,
442
    env_name: EnvironmentName,
443
) -> _TargetParametrizations:
444
    address = request.address
×
445
    adaptor_and_type = await _determine_target_adaptor_and_type(
×
446
        _RequestAdaptorAndType(request.address, request.description_of_origin), **implicitly()
447
    )
448
    target_adaptor = adaptor_and_type.adaptor
×
449
    target_type = adaptor_and_type.target_type
×
450

451
    parametrizations: list[_TargetParametrization] = []
×
452
    requests: ResolvedTargetGeneratorRequests | None = None
×
453
    if issubclass(target_type, TargetGenerator):
×
454
        requests = await resolve_generator_target_requests(
×
455
            ResolveTargetGeneratorRequests(address, request.description_of_origin), **implicitly()
456
        )
457
    if requests and requests.requests:
×
458
        all_generated = await concurrently(
×
459
            generate_targets(
460
                **implicitly({generate_request: GenerateTargetsRequest, env_name: EnvironmentName})
461
            )
462
            for generate_request in requests.requests
463
        )
464
        parametrizations.extend(
×
465
            _TargetParametrization(generate_request.generator, generated_batch)
466
            for generated_batch, generate_request in zip(all_generated, requests.requests)
467
        )
468
    else:
469
        parametrizations.append(
×
470
            _target_parametrizations(address, target_adaptor, target_type, union_membership)
471
        )
472

473
    return _TargetParametrizations(parametrizations)
×
474

475

476
@rule(_masked_types=[EnvironmentName])
12✔
477
async def resolve_target(
12✔
478
    request: WrappedTargetRequest,
479
    target_types_to_generate_requests: TargetTypesToGenerateTargetsRequests,
480
    local_environment_name: ChosenLocalEnvironmentName,
481
) -> WrappedTarget:
482
    address = request.address
×
483
    base_address = address.maybe_convert_to_target_generator()
×
484
    parametrizations = await resolve_target_parametrizations(
×
485
        **implicitly(
486
            {
487
                _TargetParametrizationsRequest(
488
                    base_address, description_of_origin=request.description_of_origin
489
                ): _TargetParametrizationsRequest,
490
                local_environment_name.val: EnvironmentName,
491
            }
492
        )
493
    )
494
    target = parametrizations.get(address, target_types_to_generate_requests)
×
495
    if target is None:
×
496
        raise ResolveError(
×
497
            softwrap(
498
                f"""
499
                The address `{address}` from {request.description_of_origin} was not generated by
500
                the target `{base_address}`. Did you mean one of these addresses?
501

502
                {bullet_list(str(t.address) for t in parametrizations.all)}
503
                """
504
            )
505
        )
506
    return WrappedTarget(target)
×
507

508

509
@rule(_masked_types=[EnvironmentName])
12✔
510
async def resolve_unexpanded_targets(addresses: Addresses) -> UnexpandedTargets:
12✔
511
    wrapped_targets = await concurrently(
×
512
        resolve_target(
513
            WrappedTargetRequest(
514
                a,
515
                # Idiomatic rules should not be manually constructing `Addresses`. Instead, they
516
                # should use `UnparsedAddressInputs` or `Specs` rules.
517
                #
518
                # It is technically more correct for us to require callers of
519
                # `Addresses -> UnexpandedTargets` to specify a `description_of_origin`. But in
520
                # practice, this dramatically increases boilerplate, and it should never be
521
                # necessary.
522
                #
523
                # Note that this contrasts with an individual `Address`, which often is unverified
524
                # because it can come from the rule `AddressInput -> Address`, which only verifies
525
                # that it has legal syntax and does not check the address exists.
526
                description_of_origin="<infallible>",
527
            ),
528
            **implicitly(),
529
        )
530
        for a in addresses
531
    )
532
    return UnexpandedTargets(wrapped_target.target for wrapped_target in wrapped_targets)
×
533

534

535
_TargetType = TypeVar("_TargetType", bound=Target)
12✔
536

537

538
def _create_target(
12✔
539
    address: Address,
540
    target_type: type[_TargetType],
541
    target_adaptor: TargetAdaptor,
542
    field_values: Mapping[str, Any],
543
    union_membership: UnionMembership,
544
    name_explicitly_set: bool | None = None,
545
) -> _TargetType:
546
    target = target_type(
×
547
        field_values,
548
        address,
549
        name_explicitly_set=(
550
            target_adaptor.name_explicitly_set
551
            if name_explicitly_set is None
552
            else name_explicitly_set
553
        ),
554
        union_membership=union_membership,
555
        description_of_origin=target_adaptor.description_of_origin,
556
        origin_sources_blocks=target_adaptor.origin_sources_blocks,
557
    )
558
    # Check for any deprecated field usage.
559
    for field_type in target.field_types:
×
560
        if field_type.deprecated_alias is not None and field_type.deprecated_alias in field_values:
×
561
            warn_deprecated_field_type(field_type)
×
562

563
    return target
×
564

565

566
def _target_parametrizations(
12✔
567
    address: Address,
568
    target_adaptor: TargetAdaptor,
569
    target_type: type[Target],
570
    union_membership: UnionMembership,
571
) -> _TargetParametrization:
572
    expanded_parametrizations = tuple(Parametrize.expand(address, target_adaptor.kwargs))
×
573
    first_address, first_kwargs = expanded_parametrizations[0]
×
574
    if first_address is not address:
×
575
        # The target was parametrized, and so the original Target does not exist.
576
        generated = FrozenDict(
×
577
            (
578
                parameterized_address,
579
                _create_target(
580
                    parameterized_address,
581
                    target_type,
582
                    target_adaptor,
583
                    parameterized_fields,
584
                    union_membership,
585
                ),
586
            )
587
            for parameterized_address, parameterized_fields in expanded_parametrizations
588
        )
589
        return _TargetParametrization(None, generated)
×
590
    else:
591
        # The target was not parametrized.
592
        target = _create_target(
×
593
            address,
594
            target_type,
595
            target_adaptor,
596
            target_adaptor.kwargs,
597
            union_membership,
598
        )
599
        return _TargetParametrization(target, FrozenDict())
×
600

601

602
@dataclass(frozen=True)
12✔
603
class WrappedTargetForBootstrap:
12✔
604
    """Used to avoid a rule graph cycle when evaluating bootstrap targets.
605

606
    This does not work with target generation and parametrization. It also ignores any unrecognized
607
    fields in the target, to accommodate plugin fields which are not yet registered during
608
    bootstrapping.
609

610
    This should only be used by bootstrapping code.
611
    """
612

613
    val: Target
12✔
614

615

616
@rule
12✔
617
async def resolve_target_for_bootstrapping(
12✔
618
    request: WrappedTargetRequest,
619
    union_membership: UnionMembership,
620
) -> WrappedTargetForBootstrap:
621
    adaptor_and_type = await _determine_target_adaptor_and_type(
×
622
        _RequestAdaptorAndType(
623
            request.address,
624
            description_of_origin=request.description_of_origin,
625
        ),
626
        **implicitly(),
627
    )
628
    target_adaptor = adaptor_and_type.adaptor
×
629
    target_type = adaptor_and_type.target_type
×
630
    target = target_type(
×
631
        target_adaptor.kwargs,
632
        request.address,
633
        name_explicitly_set=target_adaptor.name_explicitly_set,
634
        union_membership=union_membership,
635
        ignore_unrecognized_fields=True,
636
        description_of_origin=target_adaptor.description_of_origin,
637
        origin_sources_blocks=target_adaptor.origin_sources_blocks,
638
    )
639
    return WrappedTargetForBootstrap(target)
×
640

641

642
@rule(_masked_types=[EnvironmentName])
12✔
643
async def resolve_targets(
12✔
644
    targets: UnexpandedTargets,
645
    target_types_to_generate_requests: TargetTypesToGenerateTargetsRequests,
646
    local_environment_name: ChosenLocalEnvironmentName,
647
) -> Targets:
648
    # Replace all generating targets with what they generate. Otherwise, keep them. If a target
649
    # generator does not generate any targets, keep the target generator.
650
    # TODO: This method does not preserve the order of inputs.
651
    expanded_targets: OrderedSet[Target] = OrderedSet()
×
652
    generator_targets = []
×
653
    parametrizations_gets = []
×
654
    for tgt in targets:
×
655
        if (
×
656
            target_types_to_generate_requests.is_generator(tgt)
657
            and not tgt.address.is_generated_target
658
        ):
659
            generator_targets.append(tgt)
×
660
            parametrizations_gets.append(
×
661
                resolve_target_parametrizations(
662
                    **implicitly(
663
                        {
664
                            _TargetParametrizationsRequest(
665
                                tgt.address.maybe_convert_to_target_generator(),
666
                                # Idiomatic rules should not be manually creating `UnexpandedTargets`, so
667
                                # we can be confident that the targets actually exist and the addresses
668
                                # are already legitimate.
669
                                description_of_origin="<infallible>",
670
                            ): _TargetParametrizationsRequest,
671
                            local_environment_name.val: EnvironmentName,
672
                        }
673
                    )
674
                )
675
            )
676
        else:
677
            expanded_targets.add(tgt)
×
678

679
    all_generated_targets = await concurrently(parametrizations_gets)
×
680
    expanded_targets.update(
×
681
        tgt
682
        for generator, parametrizations in zip(generator_targets, all_generated_targets)
683
        for tgt in parametrizations.generated_or_generator(generator.address)
684
    )
685
    return Targets(expanded_targets)
×
686

687

688
@rule(desc="Find all targets in the project", level=LogLevel.DEBUG, _masked_types=[EnvironmentName])
12✔
689
async def find_all_targets() -> AllTargets:
12✔
690
    tgts = await resolve_targets(
×
691
        **implicitly(
692
            RawSpecsWithoutFileOwners(
693
                recursive_globs=(RecursiveGlobSpec(""),),
694
                description_of_origin="the `AllTargets` rule",
695
            )
696
        )
697
    )
698
    return AllTargets(tgts)
×
699

700

701
@rule(
12✔
702
    desc="Find all (unexpanded) targets in the project",
703
    level=LogLevel.DEBUG,
704
    _masked_types=[EnvironmentName],
705
)
706
async def find_all_unexpanded_targets() -> AllUnexpandedTargets:
12✔
707
    tgts = await resolve_unexpanded_targets(
×
708
        **implicitly(
709
            RawSpecsWithoutFileOwners(
710
                recursive_globs=(RecursiveGlobSpec(""),),
711
                description_of_origin="the `AllTargets` rule",
712
            )
713
        )
714
    )
715
    return AllUnexpandedTargets(tgts)
×
716

717

718
# -----------------------------------------------------------------------------------------------
719
# TransitiveTargets
720
# -----------------------------------------------------------------------------------------------
721

722

723
class CycleException(Exception):
12✔
724
    def __init__(self, subject: Address, path: tuple[Address, ...]) -> None:
12✔
725
        path_string = "\n".join((f"-> {a}" if a == subject else f"   {a}") for a in path)
×
726
        super().__init__(
×
727
            f"The dependency graph contained a cycle:\n{path_string}\n\nTo fix this, first verify "
728
            "if your code has an actual import cycle. If it does, you likely need to re-architect "
729
            "your code to avoid the cycle.\n\nIf there is no cycle in your code, then you may need "
730
            "to use more granular targets. Split up the problematic targets into smaller targets "
731
            "with more granular `sources` fields so that you can adjust the `dependencies` fields "
732
            "to avoid introducing a cycle.\n\nAlternatively, use Python dependency inference "
733
            "(`--python-infer-imports`), rather than explicit `dependencies`. Pants will infer "
734
            "dependencies on specific files, rather than entire targets. This extra precision "
735
            "means that you will only have cycles if your code actually does have cycles in it."
736
        )
737
        self.subject = subject
×
738
        self.path = path
×
739

740

741
def _detect_cycles(
12✔
742
    roots: tuple[Address, ...], dependency_mapping: dict[Address, tuple[Address, ...]]
743
) -> None:
744
    path_stack: OrderedSet[Address] = OrderedSet()
×
745
    visited: set[Address] = set()
×
746

747
    def maybe_report_cycle(address: Address) -> None:
×
748
        # NB: File-level dependencies are cycle tolerant.
749
        if address.is_file_target or address not in path_stack:
×
750
            return
×
751

752
        # The path of the cycle is shorter than the entire path to the cycle: if the suffix of
753
        # the path representing the cycle contains a file dep, it is ignored.
754
        in_cycle = False
×
755
        for path_address in path_stack:
×
756
            if in_cycle and path_address.is_file_target:
×
757
                # There is a file address inside the cycle: do not report it.
758
                return
×
759
            elif in_cycle:
×
760
                # Not a file address.
761
                continue
×
762
            else:
763
                # We're entering the suffix of the path that contains the cycle if we've reached
764
                # the address in question.
765
                in_cycle = path_address == address
×
766
        # If we did not break out early, it's because there were no file addresses in the cycle.
767
        raise CycleException(address, (*path_stack, address))
×
768

769
    def visit(address: Address):
×
770
        if address in visited:
×
771
            maybe_report_cycle(address)
×
772
            return
×
773
        path_stack.add(address)
×
774
        visited.add(address)
×
775

776
        for dep_address in dependency_mapping[address]:
×
777
            visit(dep_address)
×
778

779
        path_stack.remove(address)
×
780

781
    for root in roots:
×
782
        visit(root)
×
783
        if path_stack:
×
784
            raise AssertionError(
×
785
                f"The stack of visited nodes should have been empty at the end of recursion, "
786
                f"but it still contained: {path_stack}"
787
            )
788

789

790
@dataclass(frozen=True)
12✔
791
class _DependencyMappingRequest:
12✔
792
    tt_request: TransitiveTargetsRequest
12✔
793
    expanded_targets: bool
12✔
794

795

796
@dataclass(frozen=True)
12✔
797
class _DependencyMapping:
12✔
798
    mapping: FrozenDict[Address, tuple[Address, ...]]
12✔
799
    visited: FrozenOrderedSet[Target]
12✔
800
    roots_as_targets: Collection[Target]
12✔
801

802

803
@rule
12✔
804
async def transitive_dependency_mapping(request: _DependencyMappingRequest) -> _DependencyMapping:
12✔
805
    """This uses iteration, rather than recursion, so that we can tolerate dependency cycles.
806

807
    Unlike a traditional BFS algorithm, we batch each round of traversals via `MultiGet` for
808
    improved performance / concurrency.
809
    """
810
    roots_as_targets = await resolve_unexpanded_targets(Addresses(request.tt_request.roots))
×
811
    visited: OrderedSet[Target] = OrderedSet()
×
812
    queued = FrozenOrderedSet(roots_as_targets)
×
813
    dependency_mapping: dict[Address, tuple[Address, ...]] = {}
×
814
    while queued:
×
815
        direct_dependencies: tuple[Collection[Target], ...]
816
        if request.expanded_targets:
×
817
            direct_dependencies = await concurrently(  # noqa: PNT30: this is inherently sequential
×
818
                resolve_targets(
819
                    **implicitly(
820
                        DependenciesRequest(
821
                            tgt.get(Dependencies),
822
                            should_traverse_deps_predicate=request.tt_request.should_traverse_deps_predicate,
823
                        )
824
                    )
825
                )
826
                for tgt in queued
827
            )
828
        else:
829
            direct_dependencies = await concurrently(  # noqa: PNT30: this is inherently sequential
×
830
                resolve_unexpanded_targets(
831
                    **implicitly(
832
                        DependenciesRequest(
833
                            tgt.get(Dependencies),
834
                            should_traverse_deps_predicate=request.tt_request.should_traverse_deps_predicate,
835
                        )
836
                    )
837
                )
838
                for tgt in queued
839
            )
840

841
        dependency_mapping.update(
×
842
            zip(
843
                (t.address for t in queued),
844
                (tuple(t.address for t in deps) for deps in direct_dependencies),
845
            )
846
        )
847

848
        queued = FrozenOrderedSet(itertools.chain.from_iterable(direct_dependencies)).difference(
×
849
            visited
850
        )
851
        visited.update(queued)
×
852

853
    # NB: We use `roots_as_targets` to get the root addresses, rather than `request.roots`. This
854
    # is because expanding from the `Addresses` -> `Targets` may have resulted in generated
855
    # targets being used, so we need to use `roots_as_targets` to have this expansion.
856
    # TODO(#12871): Fix this to not be based on generated targets.
857
    _detect_cycles(tuple(t.address for t in roots_as_targets), dependency_mapping)
×
858
    return _DependencyMapping(
×
859
        FrozenDict(dependency_mapping), FrozenOrderedSet(visited), roots_as_targets
860
    )
861

862

863
class SubprojectRoots(Collection[str]):
12✔
864
    pass
12✔
865

866

867
@rule
12✔
868
async def extract_subproject_roots(global_options: GlobalOptions) -> SubprojectRoots:
12✔
869
    return SubprojectRoots(global_options.subproject_roots)
×
870

871

872
@rule(desc="Resolve addresses")
12✔
873
async def resolve_unparsed_address_inputs(
12✔
874
    request: UnparsedAddressInputs, subproject_roots: SubprojectRoots
875
) -> Addresses:
876
    address_inputs = []
×
877
    invalid_addresses = []
×
878
    for v in request.values:
×
879
        try:
×
880
            address_inputs.append(
×
881
                AddressInput.parse(
882
                    v,
883
                    relative_to=request.relative_to,
884
                    subproject_roots=subproject_roots,
885
                    description_of_origin=request.description_of_origin,
886
                )
887
            )
888
        except AddressParseException:
×
889
            if not request.skip_invalid_addresses:
×
890
                raise
×
891
            invalid_addresses.append(v)
×
892

893
    if request.skip_invalid_addresses:
×
894
        maybe_addresses = await concurrently(maybe_resolve_address(ai) for ai in address_inputs)
×
895
        valid_addresses = []
×
896
        for maybe_address, address_input in zip(maybe_addresses, address_inputs):
×
897
            if isinstance(maybe_address.val, Address):
×
898
                valid_addresses.append(maybe_address.val)
×
899
            else:
900
                invalid_addresses.append(address_input.spec)
×
901

902
        if invalid_addresses:
×
903
            logger.debug(
×
904
                softwrap(
905
                    f"""
906
                    Invalid addresses from {request.description_of_origin}:
907
                    {sorted(invalid_addresses)}. Skipping them.
908
                    """
909
                )
910
            )
911
        return Addresses(valid_addresses)
×
912

913
    addresses = await concurrently(
×
914
        resolve_address(**implicitly({ai: AddressInput})) for ai in address_inputs
915
    )
916
    # Validate that the addresses exist. We do this eagerly here because
917
    # `Addresses -> UnexpandedTargets` does not preserve the `description_of_origin`, so it would
918
    # be too late, per https://github.com/pantsbuild/pants/issues/15858.
919
    await concurrently(
×
920
        resolve_target(
921
            WrappedTargetRequest(addr, description_of_origin=request.description_of_origin),
922
            **implicitly(),
923
        )
924
        for addr in addresses
925
    )
926
    return Addresses(addresses)
×
927

928

929
@rule(polymorphic=True)
12✔
930
async def transitively_exclude_dependencies(
12✔
931
    request: TransitivelyExcludeDependenciesRequest,
932
    environment_name: EnvironmentName,
933
) -> TransitivelyExcludeDependencies:
934
    raise NotImplementedError()
×
935

936

937
@rule(desc="Resolve transitive targets", level=LogLevel.DEBUG, _masked_types=[EnvironmentName])
12✔
938
async def transitive_targets(
12✔
939
    request: TransitiveTargetsRequest,
940
    local_environment_name: ChosenLocalEnvironmentName,
941
    union_membership: UnionMembership,
942
) -> TransitiveTargets:
943
    """Find all the targets transitively depended upon by the target roots."""
944
    environment_name = local_environment_name.val
×
945

946
    dependency_mapping = await transitive_dependency_mapping(
×
947
        _DependencyMappingRequest(request, True)
948
    )
949
    targets = (*dependency_mapping.roots_as_targets, *dependency_mapping.visited)
×
950

951
    # Apply any transitive excludes (`!!` ignores).
952
    unevaluated_transitive_excludes = []
×
953
    for t in targets:
×
954
        unparsed = t.get(Dependencies).unevaluated_transitive_excludes
×
955
        if unparsed.values:
×
956
            unevaluated_transitive_excludes.append(unparsed)
×
957

958
    transitive_exclude_addresses = []
×
959
    if unevaluated_transitive_excludes:
×
960
        all_transitive_exclude_addresses = await concurrently(
×
961
            resolve_unparsed_address_inputs(unparsed, **implicitly())
962
            for unparsed in unevaluated_transitive_excludes
963
        )
964
        transitive_exclude_addresses = [
×
965
            *itertools.chain.from_iterable(all_transitive_exclude_addresses)
966
        ]
967

968
    # Apply plugin-provided transitive excludes
969
    if request_types := cast(
×
970
        "Sequence[Type[TransitivelyExcludeDependenciesRequest]]",
971
        union_membership.get(TransitivelyExcludeDependenciesRequest),
972
    ):
973
        tgts_to_request_types = {
×
974
            tgt: [
975
                inference_request_type
976
                for inference_request_type in request_types
977
                if inference_request_type.infer_from.is_applicable(tgt)
978
            ]
979
            for tgt in targets
980
        }
981

982
        results = await concurrently(
×
983
            transitively_exclude_dependencies(
984
                **implicitly(
985
                    {
986
                        request_type(
987
                            request_type.infer_from.create(tgt)
988
                        ): TransitivelyExcludeDependenciesRequest,
989
                        environment_name: EnvironmentName,
990
                    }
991
                ),
992
            )
993
            for tgt, request_types in tgts_to_request_types.items()
994
            for request_type in request_types
995
        )
996
        transitive_exclude_addresses.extend(
×
997
            itertools.chain.from_iterable(addresses for addresses in results)
998
        )
999

1000
    transitive_excludes = await resolve_targets(
×
1001
        **implicitly(Addresses(transitive_exclude_addresses))
1002
    )
1003

1004
    return TransitiveTargets(
×
1005
        tuple(dependency_mapping.roots_as_targets),
1006
        FrozenOrderedSet(dependency_mapping.visited.difference(transitive_excludes)),
1007
    )
1008

1009

1010
# -----------------------------------------------------------------------------------------------
1011
# CoarsenedTargets
1012
# -----------------------------------------------------------------------------------------------
1013

1014

1015
@rule(_masked_types=[EnvironmentName])
12✔
1016
async def coarsened_targets_request(addresses: Addresses) -> CoarsenedTargetsRequest:
12✔
1017
    return CoarsenedTargetsRequest(addresses)
×
1018

1019

1020
@rule(desc="Resolve coarsened targets", level=LogLevel.DEBUG, _masked_types=[EnvironmentName])
12✔
1021
async def resolve_coarsened_targets(
12✔
1022
    request: CoarsenedTargetsRequest, local_environment_name: ChosenLocalEnvironmentName
1023
) -> CoarsenedTargets:
1024
    dependency_mapping = await transitive_dependency_mapping(
×
1025
        _DependencyMappingRequest(
1026
            TransitiveTargetsRequest(
1027
                request.roots,
1028
                should_traverse_deps_predicate=request.should_traverse_deps_predicate,
1029
            ),
1030
            expanded_targets=request.expanded_targets,
1031
        )
1032
    )
1033
    addresses_to_targets = {
×
1034
        t.address: t for t in [*dependency_mapping.visited, *dependency_mapping.roots_as_targets]
1035
    }
1036

1037
    # Because this is Tarjan's SCC (TODO: update signature to guarantee), components are returned
1038
    # in reverse topological order. We can thus assume when building the structure shared
1039
    # `CoarsenedTarget` instances that each instance will already have had its dependencies
1040
    # constructed.
1041
    components = native_engine.strongly_connected_components(
×
1042
        list(dependency_mapping.mapping.items())
1043
    )
1044

1045
    coarsened_targets: dict[Address, CoarsenedTarget] = {}
×
1046
    root_coarsened_targets = []
×
1047
    root_addresses_set = set(request.roots)
×
1048
    try:
×
1049
        for component in components:
×
1050
            component = sorted(component)
×
1051
            component_set = set(component)
×
1052

1053
            # For each member of the component, include the CoarsenedTarget for each of its external
1054
            # dependencies.
1055
            coarsened_target = CoarsenedTarget(
×
1056
                (addresses_to_targets[a] for a in component),
1057
                (
1058
                    coarsened_targets[d]
1059
                    for a in component
1060
                    for d in dependency_mapping.mapping[a]
1061
                    if d not in component_set
1062
                ),
1063
            )
1064

1065
            # Add to the coarsened_targets mapping under each of the component's Addresses.
1066
            for address in component:
×
1067
                coarsened_targets[address] = coarsened_target
×
1068

1069
            # If any of the input Addresses was a member of this component, it is a root.
1070
            if component_set & root_addresses_set:
×
1071
                root_coarsened_targets.append(coarsened_target)
×
1072
    except KeyError:
×
1073
        # TODO: This output is intended to help uncover a non-deterministic error reported in
1074
        # https://github.com/pantsbuild/pants/issues/17047.
1075
        mapping_str = json.dumps(
×
1076
            {str(a): [str(d) for d in deps] for a, deps in dependency_mapping.mapping.items()}
1077
        )
1078
        components_str = json.dumps([[str(a) for a in component] for component in components])
×
1079
        logger.warning(f"For {request}:\nMapping:\n{mapping_str}\nComponents:\n{components_str}")
×
1080
        raise
×
1081
    return CoarsenedTargets(tuple(root_coarsened_targets))
×
1082

1083

1084
# -----------------------------------------------------------------------------------------------
1085
# Find the owners of a file
1086
# -----------------------------------------------------------------------------------------------
1087

1088

1089
def _log_or_raise_unmatched_owners(
12✔
1090
    file_paths: Sequence[PurePath],
1091
    owners_not_found_behavior: GlobMatchErrorBehavior,
1092
    ignore_option: str | None = None,
1093
) -> None:
1094
    option_msg = (
×
1095
        f"\n\nIf you would like to ignore un-owned files, please pass `{ignore_option}`."
1096
        if ignore_option
1097
        else ""
1098
    )
1099
    if len(file_paths) == 1:
×
1100
        prefix = (
×
1101
            f"No owning targets could be found for the file `{file_paths[0]}`.\n\n"
1102
            f"Please check that there is a BUILD file in the parent directory "
1103
            f"{file_paths[0].parent} with a target whose `sources` field includes the file."
1104
        )
1105
    else:
1106
        prefix = (
×
1107
            f"No owning targets could be found for the files {sorted(map(str, file_paths))}`.\n\n"
1108
            f"Please check that there are BUILD files in each file's parent directory with a "
1109
            f"target whose `sources` field includes the file."
1110
        )
1111
    msg = (
×
1112
        f"{prefix} See {doc_url('docs/using-pants/key-concepts/targets-and-build-files')} for more information on target definitions."
1113
        f"\n\nYou may want to run `{bin_name()} tailor` to autogenerate your BUILD files. See "
1114
        f"{doc_url('create-initial-build-files')}.{option_msg}"
1115
    )
1116

1117
    if owners_not_found_behavior == GlobMatchErrorBehavior.warn:
×
1118
        logger.warning(msg)
×
1119
    else:
1120
        raise ResolveError(msg)
×
1121

1122

1123
@dataclass(frozen=True)
12✔
1124
class TargetSourceBlocks:
12✔
1125
    address: Address
12✔
1126
    source_blocks: SourceBlocks
12✔
1127

1128

1129
class FilenameTargetSourceBlocksMapping(FrozenDict[str, tuple[TargetSourceBlocks, ...]]):
12✔
1130
    """Map file paths to all TargetSourceBlocks owned by that file."""
1131

1132

1133
@dataclass(frozen=True)
12✔
1134
class TargetOriginSourcesBlocksOptions:
12✔
1135
    enable: bool
12✔
1136

1137

1138
@rule
12✔
1139
async def extract_enable_target_origin_sources_blocks(
12✔
1140
    global_options: GlobalOptions,
1141
) -> TargetOriginSourcesBlocksOptions:
1142
    return TargetOriginSourcesBlocksOptions(
×
1143
        enable=global_options.enable_target_origin_sources_blocks
1144
    )
1145

1146

1147
@rule
12✔
1148
async def calc_source_block_mapping(
12✔
1149
    targets: AllTargets,
1150
    options: TargetOriginSourcesBlocksOptions,
1151
) -> FilenameTargetSourceBlocksMapping:
1152
    if not options.enable:
×
1153
        return FilenameTargetSourceBlocksMapping()
×
1154

1155
    result: DefaultDict[str, list[TargetSourceBlocks]] = defaultdict(list)
×
1156
    for target in targets:
×
1157
        for filename, sources_blocks in target.origin_sources_blocks.items():
×
1158
            result[filename].append(
×
1159
                TargetSourceBlocks(address=target.address, source_blocks=sources_blocks)
1160
            )
1161

1162
    return FilenameTargetSourceBlocksMapping(
×
1163
        (filename, tuple(blocks)) for filename, blocks in result.items()
1164
    )
1165

1166

1167
class FilesWithSourceBlocks(frozenset[str]):
12✔
1168
    pass
12✔
1169

1170

1171
@rule
12✔
1172
async def calc_files_with_sources_blocks(
12✔
1173
    mapping: FilenameTargetSourceBlocksMapping,
1174
) -> FilesWithSourceBlocks:
1175
    return FilesWithSourceBlocks(mapping.keys())
×
1176

1177

1178
@dataclass(frozen=True)
12✔
1179
class OwnersRequest:
12✔
1180
    """A request for the owners of a set of file paths.
1181

1182
    The resulting owners will be those identified for the sources as well as those
1183
    for sources_blocks. Do not include a source filename in sources if it is also
1184
    present in sources_blocks, as that will be redundant, and cancel the finer level
1185
    of detail gained by inspecting the originating text blocks.
1186

1187
    TODO: This is widely used as an effectively-public API. It should probably move to
1188
    `pants.engine.target`.
1189
    """
1190

1191
    sources: tuple[str, ...]
12✔
1192
    sources_blocks: FrozenDict[str, TextBlocks] = FrozenDict()
12✔
1193
    owners_not_found_behavior: GlobMatchErrorBehavior = GlobMatchErrorBehavior.ignore
12✔
1194
    filter_by_global_options: bool = False
12✔
1195
    match_if_owning_build_file_included_in_sources: bool = False
12✔
1196

1197

1198
@dataclass(frozen=True)
12✔
1199
class TextBlocksOwnersRequest:
12✔
1200
    """Request for file text block owners."""
1201

1202
    filename: str
12✔
1203
    text_blocks: TextBlocks
12✔
1204

1205

1206
class Owners(FrozenOrderedSet[Address]):
12✔
1207
    pass
12✔
1208

1209

1210
@rule
12✔
1211
def find_source_blocks_owners(
12✔
1212
    request: TextBlocksOwnersRequest, mapping: FilenameTargetSourceBlocksMapping
1213
) -> Owners:
1214
    file_blocks = mapping.get(request.filename)
×
1215
    if not file_blocks:
×
1216
        return Owners()
×
1217

1218
    owners = set()
×
1219

1220
    # Let's say the rule is called to figure out which targets has changed given the `git diff` output.
1221
    # Then `request.source_blocks` is populated with source blocks parsed from `git diff` output
1222
    # and `file_blocks` is holding the list of all source blocks for the given `request.filename`.
1223
    # In order to get an answer we need to find an intersection for all pairs of blocks and return
1224
    # the targets they correspond to.
1225
    #
1226
    # TODO Use interval tree?
1227
    for request_block, target_blocks in itertools.product(request.text_blocks, file_blocks):
×
1228
        for target_block in target_blocks.source_blocks:
×
1229
            if not target_block.is_touched_by(request_block):
×
1230
                continue
×
1231
            owners.add(target_blocks.address)
×
1232
            break  # continue outer loop
×
1233

1234
    return Owners(owners)
×
1235

1236

1237
@rule(desc="Find which targets own certain files", _masked_types=[EnvironmentName])
12✔
1238
async def find_owners(
12✔
1239
    owners_request: OwnersRequest,
1240
    local_environment_name: ChosenLocalEnvironmentName,
1241
) -> Owners:
1242
    block_owners: tuple[Owners, ...] = (
×
1243
        await concurrently(
1244
            find_source_blocks_owners(TextBlocksOwnersRequest(filename, blocks), **implicitly())
1245
            for filename, blocks in owners_request.sources_blocks.items()
1246
        )
1247
        if owners_request.sources_blocks
1248
        else ()
1249
    )
1250

1251
    # Determine which of the sources are live and which are deleted.
1252
    sources_paths = await path_globs_to_paths(PathGlobs(owners_request.sources))
×
1253

1254
    live_files = FrozenOrderedSet(sources_paths.files)
×
1255
    deleted_files = FrozenOrderedSet(s for s in owners_request.sources if s not in live_files)
×
1256
    live_dirs = FrozenOrderedSet(os.path.dirname(s) for s in live_files)
×
1257
    deleted_dirs = FrozenOrderedSet(os.path.dirname(s) for s in deleted_files)
×
1258

1259
    def create_live_and_deleted_gets(
×
1260
        *, filter_by_global_options: bool
1261
    ) -> tuple[
1262
        Coroutine[Any, Any, FilteredTargets | Targets],
1263
        Coroutine[Any, Any, UnexpandedTargets],
1264
    ]:
1265
        """Walk up the buildroot looking for targets that would conceivably claim changed sources.
1266

1267
        For live files, we use Targets, which causes generated targets to be used rather than their
1268
        target generators. For deleted files we use UnexpandedTargets, which have the original
1269
        declared `sources` globs from target generators.
1270

1271
        We ignore unrecognized files, which can happen e.g. when finding owners for deleted files.
1272
        """
1273
        live_raw_specs = RawSpecsWithoutFileOwners(
×
1274
            ancestor_globs=tuple(AncestorGlobSpec(directory=d) for d in live_dirs),
1275
            filter_by_global_options=filter_by_global_options,
1276
            description_of_origin="<owners rule - unused>",
1277
            unmatched_glob_behavior=GlobMatchErrorBehavior.ignore,
1278
        )
1279
        live_get = (
×
1280
            filter_targets(**implicitly({live_raw_specs: RawSpecsWithoutFileOwners}))
1281
            if filter_by_global_options
1282
            else resolve_targets(**implicitly({live_raw_specs: RawSpecsWithoutFileOwners}))
1283
        )
1284
        deleted_get = resolve_unexpanded_targets(
×
1285
            **implicitly(
1286
                RawSpecsWithoutFileOwners(
1287
                    ancestor_globs=tuple(AncestorGlobSpec(directory=d) for d in deleted_dirs),
1288
                    filter_by_global_options=filter_by_global_options,
1289
                    description_of_origin="<owners rule - unused>",
1290
                    unmatched_glob_behavior=GlobMatchErrorBehavior.ignore,
1291
                ),
1292
            )
1293
        )
1294
        return live_get, deleted_get
×
1295

1296
    live_get, deleted_get = create_live_and_deleted_gets(
×
1297
        filter_by_global_options=owners_request.filter_by_global_options
1298
    )
1299
    live_candidate_tgts, deleted_candidate_tgts = await concurrently(live_get, deleted_get)
×
1300

1301
    result = set()
×
1302
    unmatched_sources = set(owners_request.sources)
×
1303
    for live in (True, False):
×
1304
        candidate_tgts: Sequence[Target]
1305
        if live:
×
1306
            candidate_tgts = live_candidate_tgts
×
1307
            sources_set = live_files
×
1308
        else:
1309
            candidate_tgts = deleted_candidate_tgts
×
1310
            sources_set = deleted_files
×
1311

1312
        build_file_addresses = await concurrently(  # noqa: PNT30: requires triage
×
1313
            find_build_file(
1314
                BuildFileAddressRequest(
1315
                    tgt.address, description_of_origin="<owners rule - cannot trigger>"
1316
                )
1317
            )
1318
            for tgt in candidate_tgts
1319
        )
1320

1321
        for candidate_tgt, bfa in zip(candidate_tgts, build_file_addresses):
×
1322
            matching_files = set(
×
1323
                candidate_tgt.get(SourcesField).filespec_matcher.matches(list(sources_set))
1324
            )
1325

1326
            if not matching_files and not (
×
1327
                owners_request.match_if_owning_build_file_included_in_sources
1328
                and bfa.rel_path in sources_set
1329
            ):
1330
                continue
×
1331

1332
            unmatched_sources -= matching_files
×
1333
            result.add(candidate_tgt.address)
×
1334

1335
    if (
×
1336
        unmatched_sources
1337
        and owners_request.owners_not_found_behavior != GlobMatchErrorBehavior.ignore
1338
    ):
1339
        _log_or_raise_unmatched_owners(
×
1340
            [PurePath(path) for path in unmatched_sources], owners_request.owners_not_found_behavior
1341
        )
1342

1343
    return Owners(result.union(*block_owners))
×
1344

1345

1346
# -----------------------------------------------------------------------------------------------
1347
# Resolve SourcesField
1348
# -----------------------------------------------------------------------------------------------
1349

1350

1351
@rule
12✔
1352
async def extract_unmatched_build_file_globs(
12✔
1353
    global_options: GlobalOptions,
1354
) -> UnmatchedBuildFileGlobs:
1355
    return UnmatchedBuildFileGlobs(global_options.unmatched_build_file_globs)
×
1356

1357

1358
class AmbiguousCodegenImplementationsException(Exception):
12✔
1359
    """Exception for when there are multiple codegen implementations and it is ambiguous which to
1360
    use."""
1361

1362
    @classmethod
12✔
1363
    def create(
12✔
1364
        cls,
1365
        generators: Iterable[type[GenerateSourcesRequest]],
1366
        *,
1367
        for_sources_types: Iterable[type[SourcesField]],
1368
    ) -> AmbiguousCodegenImplementationsException:
1369
        all_same_generator_paths = (
1✔
1370
            len({(generator.input, generator.output) for generator in generators}) == 1
1371
        )
1372
        example_generator = list(generators)[0]
1✔
1373
        input = example_generator.input.__name__
1✔
1374
        if all_same_generator_paths:
1✔
1375
            output = example_generator.output.__name__
1✔
1376
            return cls(
1✔
1377
                f"Multiple registered code generators can generate {output} from {input}. "
1378
                "It is ambiguous which implementation to use.\n\nPossible implementations:\n\n"
1379
                f"{bullet_list(sorted(generator.__name__ for generator in generators))}"
1380
            )
1381
        possible_output_types = sorted(
1✔
1382
            generator.output.__name__
1383
            for generator in generators
1384
            if issubclass(generator.output, tuple(for_sources_types))
1385
        )
1386
        possible_generators_with_output = [
1✔
1387
            f"{generator.__name__} -> {generator.output.__name__}"
1388
            for generator in sorted(generators, key=lambda generator: generator.output.__name__)
1389
        ]
1390
        return cls(
1✔
1391
            f"Multiple registered code generators can generate one of "
1392
            f"{possible_output_types} from {input}. It is ambiguous which implementation to "
1393
            f"use. This can happen when the call site requests too many different output types "
1394
            f"from the same original protocol sources.\n\nPossible implementations with their "
1395
            f"output type:\n\n"
1396
            f"{bullet_list(possible_generators_with_output)}"
1397
        )
1398

1399

1400
@rule(desc="Hydrate the `sources` field")
12✔
1401
async def hydrate_sources(
12✔
1402
    request: HydrateSourcesRequest,
1403
    unmatched_build_file_globs: UnmatchedBuildFileGlobs,
1404
    union_membership: UnionMembership,
1405
    env_name: EnvironmentName,
1406
) -> HydratedSources:
1407
    sources_field = request.field
×
1408

1409
    # First, find if there are any code generators for the input `sources_field`. This will be used
1410
    # to determine if the sources_field is valid or not.
1411
    # We could alternatively use `sources_field.can_generate()`, but we want to error if there are
1412
    # 2+ generators due to ambiguity.
1413
    generate_request_types = union_membership.get(GenerateSourcesRequest)
×
1414
    relevant_generate_request_types = [
×
1415
        generate_request_type
1416
        for generate_request_type in generate_request_types
1417
        if isinstance(sources_field, generate_request_type.input)
1418
        and issubclass(generate_request_type.output, request.for_sources_types)
1419
    ]
1420
    if request.enable_codegen and len(relevant_generate_request_types) > 1:
×
1421
        raise AmbiguousCodegenImplementationsException.create(
×
1422
            relevant_generate_request_types, for_sources_types=request.for_sources_types
1423
        )
1424
    generate_request_type = next(iter(relevant_generate_request_types), None)
×
1425

1426
    # Now, determine if any of the `for_sources_types` may be used, either because the
1427
    # sources_field is a direct subclass or can be generated into one of the valid types.
1428
    def compatible_with_sources_field(valid_type: type[SourcesField]) -> bool:
×
1429
        is_instance = isinstance(sources_field, valid_type)
×
1430
        can_be_generated = (
×
1431
            request.enable_codegen
1432
            and generate_request_type is not None
1433
            and issubclass(generate_request_type.output, valid_type)
1434
        )
1435
        return is_instance or can_be_generated
×
1436

1437
    sources_type = next(
×
1438
        (
1439
            valid_type
1440
            for valid_type in request.for_sources_types
1441
            if compatible_with_sources_field(valid_type)
1442
        ),
1443
        None,
1444
    )
1445
    if sources_type is None:
×
1446
        return HydratedSources(EMPTY_SNAPSHOT, sources_field.filespec, sources_type=None)
×
1447

1448
    # Now, hydrate the `globs`. Even if we are going to use codegen, we will need the original
1449
    # protocol sources to be hydrated.
1450
    path_globs = sources_field.path_globs(unmatched_build_file_globs)
×
1451
    snapshot = await digest_to_snapshot(**implicitly({path_globs: PathGlobs}))
×
1452
    sources_field.validate_resolved_files(snapshot.files)
×
1453

1454
    # Finally, return if codegen is not in use; otherwise, run the relevant code generator.
1455
    if not request.enable_codegen or generate_request_type is None:
×
1456
        return HydratedSources(snapshot, sources_field.filespec, sources_type=sources_type)
×
1457
    wrapped_protocol_target = await resolve_target(
×
1458
        WrappedTargetRequest(
1459
            sources_field.address,
1460
            # It's only possible to hydrate sources on a target that we already know exists.
1461
            description_of_origin="<infallible>",
1462
        ),
1463
        **implicitly(),
1464
    )
1465
    req = generate_request_type(snapshot, wrapped_protocol_target.target)
×
1466
    generated_sources = await generate_sources(
×
1467
        **implicitly({req: GenerateSourcesRequest, env_name: EnvironmentName})
1468
    )
1469

1470
    return HydratedSources(
×
1471
        generated_sources.snapshot, sources_field.filespec, sources_type=sources_type
1472
    )
1473

1474

1475
@rule(desc="Resolve `sources` field file names")
12✔
1476
async def resolve_source_paths(
12✔
1477
    request: SourcesPathsRequest, unmatched_build_file_globs: UnmatchedBuildFileGlobs
1478
) -> SourcesPaths:
1479
    sources_field = request.field
×
1480
    path_globs = sources_field.path_globs(unmatched_build_file_globs)
×
1481
    paths = await path_globs_to_paths(path_globs)
×
1482
    sources_field.validate_resolved_files(paths.files)
×
1483
    return SourcesPaths(files=paths.files, dirs=paths.dirs)
×
1484

1485

1486
# -----------------------------------------------------------------------------------------------
1487
# Resolve addresses, including the Dependencies field
1488
# -----------------------------------------------------------------------------------------------
1489

1490

1491
class ParsedDependencies(NamedTuple):
12✔
1492
    addresses: list[AddressInput]
12✔
1493
    ignored_addresses: list[AddressInput]
12✔
1494

1495

1496
class TransitiveExcludesNotSupportedError(ValueError):
12✔
1497
    def __init__(
12✔
1498
        self,
1499
        *,
1500
        bad_value: str,
1501
        address: Address,
1502
        registered_target_types: Iterable[type[Target]],
1503
        union_membership: UnionMembership,
1504
    ) -> None:
1505
        applicable_target_types = sorted(
1✔
1506
            target_type.alias
1507
            for target_type in registered_target_types
1508
            if (
1509
                target_type.class_has_field(Dependencies, union_membership=union_membership)
1510
                and target_type.class_get_field(
1511
                    Dependencies, union_membership=union_membership
1512
                ).supports_transitive_excludes
1513
            )
1514
        )
1515
        super().__init__(
1✔
1516
            f"Bad value '{bad_value}' in the `dependencies` field for {address}. "
1517
            "Transitive excludes with `!!` are not supported for this target type. Did you mean "
1518
            "to use a single `!` for a direct exclude?\n\nTransitive excludes work with these "
1519
            f"target types: {applicable_target_types}"
1520
        )
1521

1522

1523
@rule
12✔
1524
async def convert_dependencies_request_to_explicitly_provided_dependencies_request(
12✔
1525
    request: DependenciesRequest,
1526
) -> ExplicitlyProvidedDependenciesRequest:
1527
    """This rule discards any deps predicate from DependenciesRequest.
1528

1529
    Calculating ExplicitlyProvidedDependencies does not use any deps traversal predicates as it is
1530
    meant to list all explicit deps from the given field. By stripping the predicate from the
1531
    request, we ensure that the cache key for ExplicitlyProvidedDependencies calculation does not
1532
    include the predicate increasing the cache-hit rate.
1533
    """
1534
    # TODO: Maybe require determine_explicitly_provided_dependencies() and deprecate this rule.
1535
    return ExplicitlyProvidedDependenciesRequest(request.field)
×
1536

1537

1538
@rule
12✔
1539
async def determine_explicitly_provided_dependencies(
12✔
1540
    request: ExplicitlyProvidedDependenciesRequest,
1541
    union_membership: UnionMembership,
1542
    registered_target_types: RegisteredTargetTypes,
1543
    subproject_roots: SubprojectRoots,
1544
) -> ExplicitlyProvidedDependencies:
1545
    parse = functools.partial(
×
1546
        AddressInput.parse,
1547
        relative_to=request.field.address.spec_path,
1548
        subproject_roots=subproject_roots,
1549
        description_of_origin=(
1550
            f"the `{request.field.alias}` field from the target {request.field.address}"
1551
        ),
1552
    )
1553

1554
    addresses: list[AddressInput] = []
×
1555
    ignored_addresses: list[AddressInput] = []
×
1556
    for v in request.field.value or ():
×
1557
        is_ignore = v.startswith("!")
×
1558
        if is_ignore:
×
1559
            # Check if it's a transitive exclude, rather than a direct exclude.
1560
            if v.startswith("!!"):
×
1561
                if not request.field.supports_transitive_excludes:
×
1562
                    raise TransitiveExcludesNotSupportedError(
×
1563
                        bad_value=v,
1564
                        address=request.field.address,
1565
                        registered_target_types=registered_target_types.types,
1566
                        union_membership=union_membership,
1567
                    )
1568
                v = v[2:]
×
1569
            else:
1570
                v = v[1:]
×
1571
        result = parse(v)
×
1572
        if is_ignore:
×
1573
            ignored_addresses.append(result)
×
1574
        else:
1575
            addresses.append(result)
×
1576

1577
    parsed_includes = await concurrently(
×
1578
        resolve_address(**implicitly({ai: AddressInput})) for ai in addresses
1579
    )
1580
    parsed_ignores = await concurrently(
×
1581
        resolve_address(**implicitly({ai: AddressInput})) for ai in ignored_addresses
1582
    )
1583
    return ExplicitlyProvidedDependencies(
×
1584
        request.field.address,
1585
        FrozenOrderedSet(sorted(parsed_includes)),
1586
        FrozenOrderedSet(sorted(parsed_ignores)),
1587
    )
1588

1589

1590
async def _fill_parameters(
12✔
1591
    field_alias: str,
1592
    consumer_tgt: Target,
1593
    addresses: Iterable[Address],
1594
    target_types_to_generate_requests: TargetTypesToGenerateTargetsRequests,
1595
    field_defaults: FieldDefaults,
1596
    local_environment_name: ChosenLocalEnvironmentName,
1597
) -> tuple[Address, ...]:
1598
    assert not isinstance(addresses, Iterator)
×
1599

1600
    parametrizations = await concurrently(
×
1601
        resolve_target_parametrizations(
1602
            **implicitly(
1603
                {
1604
                    _TargetParametrizationsRequest(
1605
                        address.maybe_convert_to_target_generator(),
1606
                        description_of_origin=f"the `{field_alias}` field of the target {consumer_tgt.address}",
1607
                    ): _TargetParametrizationsRequest,
1608
                    local_environment_name.val: EnvironmentName,
1609
                }
1610
            )
1611
        )
1612
        for address in addresses
1613
    )
1614

1615
    return tuple(
×
1616
        parametrizations.get_subset(
1617
            address, consumer_tgt, field_defaults, target_types_to_generate_requests
1618
        ).address
1619
        for address, parametrizations in zip(addresses, parametrizations)
1620
    )
1621

1622

1623
@rule(polymorphic=True)
12✔
1624
async def infer_dependencies(
12✔
1625
    request: InferDependenciesRequest,
1626
    environment_name: EnvironmentName,
1627
) -> InferredDependencies:
1628
    raise NotImplementedError()
×
1629

1630

1631
@rule(polymorphic=True)
12✔
1632
async def validate_dependencies(
12✔
1633
    request: ValidateDependenciesRequest,
1634
    environment_name: EnvironmentName,
1635
) -> ValidatedDependencies:
1636
    raise NotImplementedError()
×
1637

1638

1639
@rule(desc="Resolve direct dependencies of target", _masked_types=[EnvironmentName])
12✔
1640
async def resolve_dependencies(
12✔
1641
    request: DependenciesRequest,
1642
    target_types_to_generate_requests: TargetTypesToGenerateTargetsRequests,
1643
    union_membership: UnionMembership,
1644
    subproject_roots: SubprojectRoots,
1645
    field_defaults: FieldDefaults,
1646
    local_environment_name: ChosenLocalEnvironmentName,
1647
) -> Addresses:
1648
    environment_name = local_environment_name.val
×
1649
    wrapped_tgt = await resolve_target(
×
1650
        WrappedTargetRequest(request.field.address, description_of_origin="<infallible>"),
1651
        **implicitly(),
1652
    )
1653
    tgt = wrapped_tgt.target
×
1654

1655
    # This predicate allows the dep graph to ignore dependencies of selected targets
1656
    # including any explicit deps and any inferred deps.
1657
    # For example, to avoid traversing the deps of package targets.
1658
    if request.should_traverse_deps_predicate(tgt, request.field) == DepsTraversalBehavior.EXCLUDE:
×
1659
        return Addresses([])
×
1660

1661
    try:
×
1662
        explicitly_provided = await determine_explicitly_provided_dependencies(
×
1663
            **implicitly(request)
1664
        )
1665
    except Exception as e:
×
1666
        raise InvalidFieldException(
×
1667
            f"{tgt.description_of_origin}: Failed to get dependencies for {tgt.address}: {e}"
1668
        )
1669

1670
    # Infer any dependencies (based on `SourcesField` field).
1671
    inference_request_types = cast(
×
1672
        "Sequence[Type[InferDependenciesRequest]]", union_membership.get(InferDependenciesRequest)
1673
    )
1674
    inferred: tuple[InferredDependencies, ...] = ()
×
1675
    if inference_request_types:
×
1676
        relevant_inference_request_types = [
×
1677
            inference_request_type
1678
            for inference_request_type in inference_request_types
1679
            if inference_request_type.infer_from.is_applicable(tgt)
1680
        ]
1681
        inferred = await concurrently(
×
1682
            infer_dependencies(
1683
                **implicitly(
1684
                    {
1685
                        inference_request_type(
1686
                            inference_request_type.infer_from.create(tgt)
1687
                        ): InferDependenciesRequest,
1688
                        environment_name: EnvironmentName,
1689
                    },
1690
                )
1691
            )
1692
            for inference_request_type in relevant_inference_request_types
1693
        )
1694

1695
    # If it's a target generator, inject dependencies on all of its generated targets.
1696
    generated_addresses: tuple[Address, ...] = ()
×
1697
    if target_types_to_generate_requests.is_generator(tgt) and not tgt.address.is_generated_target:
×
1698
        parametrizations = await resolve_target_parametrizations(
×
1699
            **implicitly(
1700
                {
1701
                    _TargetParametrizationsRequest(
1702
                        tgt.address.maybe_convert_to_target_generator(),
1703
                        description_of_origin=(
1704
                            f"the target generator {tgt.address.maybe_convert_to_target_generator()}"
1705
                        ),
1706
                    ): _TargetParametrizationsRequest,
1707
                    environment_name: EnvironmentName,
1708
                }
1709
            )
1710
        )
1711
        generated_addresses = tuple(parametrizations.generated_for(tgt.address).keys())
×
1712

1713
    # See whether any explicitly provided dependencies are parametrized, but with partial/no
1714
    # parameters. If so, fill them in.
1715
    explicitly_provided_includes: Iterable[Address] = explicitly_provided.includes
×
1716
    if explicitly_provided_includes:
×
1717
        explicitly_provided_includes = await _fill_parameters(
×
1718
            request.field.alias,
1719
            tgt,
1720
            explicitly_provided_includes,
1721
            target_types_to_generate_requests,
1722
            field_defaults,
1723
            local_environment_name,
1724
        )
1725
    explicitly_provided_ignores: FrozenOrderedSet[Address] = explicitly_provided.ignores
×
1726
    if explicitly_provided_ignores:
×
1727
        explicitly_provided_ignores = FrozenOrderedSet(
×
1728
            await _fill_parameters(
1729
                request.field.alias,
1730
                tgt,
1731
                tuple(explicitly_provided_ignores),
1732
                target_types_to_generate_requests,
1733
                field_defaults,
1734
                local_environment_name,
1735
            )
1736
        )
1737

1738
    # If the target has `SpecialCasedDependencies`, such as the `archive` target having
1739
    # `files` and `packages` fields, then we possibly include those too. We don't want to always
1740
    # include those dependencies because they should often be excluded from the result due to
1741
    # being handled elsewhere in the calling code. So, we only include fields based on
1742
    # the should_traverse_deps_predicate.
1743

1744
    # Unlike normal, we don't use `tgt.get()` because there may be >1 subclass of
1745
    # SpecialCasedDependencies.
1746
    special_cased_fields = tuple(
×
1747
        field
1748
        for field in tgt.field_values.values()
1749
        if isinstance(field, SpecialCasedDependencies)
1750
        and request.should_traverse_deps_predicate(tgt, field) == DepsTraversalBehavior.INCLUDE
1751
    )
1752
    # We can't use `resolve_unparsed_address_inputs()` directly due to a graph cycle.
1753
    special_cased = await concurrently(
×
1754
        resolve_address(
1755
            **implicitly(
1756
                {
1757
                    AddressInput.parse(
1758
                        addr,
1759
                        relative_to=tgt.address.spec_path,
1760
                        subproject_roots=subproject_roots,
1761
                        description_of_origin=(
1762
                            f"the `{special_cased_field.alias}` field from the target {tgt.address}"
1763
                        ),
1764
                    ): AddressInput
1765
                }
1766
            )
1767
        )
1768
        for special_cased_field in special_cased_fields
1769
        for addr in special_cased_field.to_unparsed_address_inputs().values
1770
    )
1771

1772
    excluded = explicitly_provided_ignores.union(
×
1773
        *itertools.chain(deps.exclude for deps in inferred)
1774
    )
1775
    result = Addresses(
×
1776
        sorted(
1777
            {
1778
                addr
1779
                for addr in (
1780
                    *generated_addresses,
1781
                    *explicitly_provided_includes,
1782
                    *itertools.chain.from_iterable(deps.include for deps in inferred),
1783
                    *special_cased,
1784
                )
1785
                if addr not in excluded
1786
            }
1787
        )
1788
    )
1789

1790
    # Validate dependencies.
1791
    _ = await concurrently(
×
1792
        validate_dependencies(
1793
            **implicitly(
1794
                {
1795
                    vd_request_type(
1796
                        vd_request_type.field_set_type.create(tgt),  # type: ignore[misc]
1797
                        result,
1798
                    ): ValidateDependenciesRequest,
1799
                    environment_name: EnvironmentName,
1800
                }
1801
            ),
1802
        )
1803
        for vd_request_type in union_membership.get(ValidateDependenciesRequest)
1804
        if vd_request_type.field_set_type.is_applicable(tgt)  # type: ignore[misc]
1805
    )
1806

1807
    return result
×
1808

1809

1810
# -----------------------------------------------------------------------------------------------
1811
# Dynamic Field defaults
1812
# -----------------------------------------------------------------------------------------------
1813

1814

1815
@rule(polymorphic=True)
12✔
1816
async def field_default_factory(request: FieldDefaultFactoryRequest) -> FieldDefaultFactoryResult:
12✔
1817
    raise NotImplementedError()
×
1818

1819

1820
@rule
12✔
1821
async def field_defaults(union_membership: UnionMembership) -> FieldDefaults:
12✔
1822
    requests = list(union_membership.get(FieldDefaultFactoryRequest))
×
1823
    factories = await concurrently(
×
1824
        field_default_factory(**implicitly({impl(): FieldDefaultFactoryRequest}))
1825
        for impl in requests
1826
    )
1827
    return FieldDefaults(
×
1828
        FrozenDict(
1829
            (request.field_type, factory.default_factory)
1830
            for request, factory in zip(requests, factories)
1831
        )
1832
    )
1833

1834

1835
# -----------------------------------------------------------------------------------------------
1836
# Find applicable field sets
1837
# -----------------------------------------------------------------------------------------------
1838

1839

1840
@rule
12✔
1841
async def find_valid_field_sets(
12✔
1842
    request: FieldSetsPerTargetRequest, union_membership: UnionMembership
1843
) -> FieldSetsPerTarget:
1844
    field_set_types = union_membership.get(request.field_set_superclass)
×
1845
    return FieldSetsPerTarget(
×
1846
        (
1847
            field_set_type.create(target)
1848
            for field_set_type in field_set_types
1849
            if field_set_type.is_applicable(target)
1850
        )
1851
        for target in request.targets
1852
    )
1853

1854

1855
class GenerateFileTargets(GenerateTargetsRequest):
12✔
1856
    generate_from = TargetFilesGenerator
12✔
1857

1858

1859
@rule(polymorphic=True)
12✔
1860
async def generate_file_target_settings(
12✔
1861
    request: TargetFilesGeneratorSettingsRequest,
1862
    environment_name: EnvironmentName,
1863
) -> TargetFilesGeneratorSettings:
1864
    raise NotImplementedError()
×
1865

1866

1867
@rule
12✔
1868
async def generate_file_targets(
12✔
1869
    request: GenerateFileTargets,
1870
    union_membership: UnionMembership,
1871
    environment_name: EnvironmentName,
1872
) -> GeneratedTargets:
1873
    try:
×
1874
        sources_paths = await resolve_source_paths(
×
1875
            SourcesPathsRequest(request.generator[MultipleSourcesField]), **implicitly()
1876
        )
1877
    except Exception as e:
×
1878
        tgt = request.generator
×
1879
        fld = tgt[MultipleSourcesField]
×
1880
        raise InvalidFieldException(
×
1881
            softwrap(
1882
                f"""
1883
                {tgt.description_of_origin}: Invalid field value for {fld.alias!r} in target {tgt.address}:
1884
                {e}
1885
                """
1886
            )
1887
        ) from e
1888

1889
    add_dependencies_on_all_siblings = False
×
1890
    if request.generator.settings_request_cls:
×
1891
        generator_settings = await generate_file_target_settings(
×
1892
            **implicitly(
1893
                {
1894
                    request.generator.settings_request_cls(): TargetFilesGeneratorSettingsRequest,
1895
                    environment_name: EnvironmentName,
1896
                }
1897
            )
1898
        )
1899
        add_dependencies_on_all_siblings = generator_settings.add_dependencies_on_all_siblings
×
1900

1901
    return _generate_file_level_targets(
×
1902
        type(request.generator).generated_target_cls,
1903
        request.generator,
1904
        sources_paths.files,
1905
        request.template_address,
1906
        request.template,
1907
        request.overrides,
1908
        union_membership,
1909
        add_dependencies_on_all_siblings=add_dependencies_on_all_siblings,
1910
    )
1911

1912

1913
def rules():
12✔
1914
    return [
12✔
1915
        *collect_rules(),
1916
        UnionRule(GenerateTargetsRequest, GenerateFileTargets),
1917
    ]
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc