• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 20328535594

18 Dec 2025 06:46AM UTC coverage: 57.969% (-22.3%) from 80.295%
20328535594

Pull #22954

github

web-flow
Merge ccc9c5409 into 407284c67
Pull Request #22954: free up disk space in runner image

39083 of 67421 relevant lines covered (57.97%)

0.91 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/src/python/pants/backend/kotlin/dependency_inference/kotlin_parser.py
1
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3
from __future__ import annotations
×
4

5
import json
×
6
import os
×
7
from collections.abc import Iterator
×
8
from dataclasses import dataclass
×
9
from typing import Any
×
10

11
from pants.core.goals.resolves import ExportableTool
×
12
from pants.core.util_rules.source_files import SourceFiles
×
13
from pants.engine.fs import CreateDigest, Directory, FileContent
×
14
from pants.engine.internals.native_engine import AddPrefix, MergeDigests, RemovePrefix
×
15
from pants.engine.internals.selectors import concurrently
×
16
from pants.engine.intrinsics import (
×
17
    add_prefix,
18
    create_digest,
19
    execute_process,
20
    get_digest_contents,
21
    merge_digests,
22
    remove_prefix,
23
)
24
from pants.engine.process import (
×
25
    FallibleProcessResult,
26
    ProductDescription,
27
    fallible_to_exec_result_or_raise,
28
)
29
from pants.engine.rules import collect_rules, implicitly, rule
×
30
from pants.engine.unions import UnionRule
×
31
from pants.jvm.compile import ClasspathEntry
×
32
from pants.jvm.jdk_rules import InternalJdk, JdkRequest, JvmProcess, prepare_jdk_environment
×
33
from pants.jvm.resolve.common import ArtifactRequirements
×
34
from pants.jvm.resolve.coordinate import Coordinate
×
35
from pants.jvm.resolve.coursier_fetch import ToolClasspathRequest, materialize_classpath_for_tool
×
36
from pants.jvm.resolve.jvm_tool import GenerateJvmLockfileFromTool, JvmToolBase
×
37
from pants.util.frozendict import FrozenDict
×
38
from pants.util.logging import LogLevel
×
39
from pants.util.resources import read_resource
×
40

41
_PARSER_KOTLIN_VERSION = "1.6.20"
×
42

43

44
class KotlinParser(JvmToolBase):
×
45
    options_scope = "kotlin-parser"
×
46
    help = "Internal tool for parsing Kotlin sources to identify dependencies"
×
47

48
    default_version = _PARSER_KOTLIN_VERSION
×
49
    default_artifacts = (
×
50
        "org.jetbrains.kotlin:kotlin-compiler:{version}",
51
        "org.jetbrains.kotlin:kotlin-stdlib:{version}",
52
        "com.google.code.gson:gson:2.9.0",
53
    )
54
    default_lockfile_resource = (
×
55
        "pants.backend.kotlin.dependency_inference",
56
        "kotlin_parser.lock",
57
    )
58

59

60
@dataclass(frozen=True)
×
61
class KotlinImport:
×
62
    name: str
×
63
    alias: str | None
×
64
    is_wildcard: bool
×
65

66
    @classmethod
×
67
    def from_json_dict(cls, d: dict) -> KotlinImport:
×
68
        return cls(
×
69
            name=d["name"],
70
            alias=d.get("alias"),
71
            is_wildcard=d["isWildcard"],
72
        )
73

74
    def to_debug_json_dict(self) -> dict[str, Any]:
×
75
        return {
×
76
            "name": self.name,
77
            "alias": self.alias,
78
            "is_wildcard": self.is_wildcard,
79
        }
80

81

82
@dataclass(frozen=True)
×
83
class KotlinSourceDependencyAnalysis:
×
84
    package: str
×
85
    imports: frozenset[KotlinImport]
×
86
    named_declarations: frozenset[str]
×
87
    consumed_symbols_by_scope: FrozenDict[str, frozenset[str]]
×
88
    scopes: frozenset[str]
×
89

90
    def fully_qualified_consumed_symbols(self) -> Iterator[str]:
×
91
        """Consumed symbols qualified in various ways.
92

93
        This method _will_ introduce false-positives, because we will assume that the symbol could
94
        have been provided by any wildcard import in scope, as well as being declared in the current
95
        package.
96
        """
97

98
        def scope_and_parents(scope: str) -> Iterator[str]:
×
99
            while True:
×
100
                yield scope
×
101
                if scope == "":
×
102
                    break
×
103
                scope, _, _ = scope.rpartition(".")
×
104

105
        for consumption_scope, consumed_symbols in self.consumed_symbols_by_scope.items():
×
106
            parent_scopes = tuple(scope_and_parents(consumption_scope))
×
107
            for symbol in consumed_symbols:
×
108
                symbol_rel_prefix, dot_in_symbol, symbol_rel_suffix = symbol.partition(".")
×
109
                if not self.scopes or dot_in_symbol:
×
110
                    # TODO: Similar to #13545: we assume that a symbol containing a dot might already
111
                    # be fully qualified.
112
                    yield symbol
×
113
                for parent_scope in parent_scopes:
×
114
                    if parent_scope in self.scopes:
×
115
                        # A package declaration is a parent of this scope, and any of its symbols
116
                        # could be in scope.
117
                        yield f"{parent_scope}.{symbol}"
×
118

119
                    for imp in self.imports if parent_scope == self.package else ():
×
120
                        if imp.is_wildcard:
×
121
                            # There is a wildcard import in a parent scope.
122
                            yield f"{imp.name}.{symbol}"
×
123
                        if dot_in_symbol:
×
124
                            # If the parent scope has an import which defines the first token of the
125
                            # symbol, then it might be a relative usage of an import.
126
                            if imp.alias:
×
127
                                if imp.alias == symbol_rel_prefix:
×
128
                                    yield f"{imp.name}.{symbol_rel_suffix}"
×
129
                            elif imp.name.endswith(f".{symbol_rel_prefix}"):
×
130
                                yield f"{imp.name}.{symbol_rel_suffix}"
×
131

132
    @classmethod
×
133
    def from_json_dict(cls, d: dict) -> KotlinSourceDependencyAnalysis:
×
134
        return cls(
×
135
            package=d["package"],
136
            imports=frozenset(KotlinImport.from_json_dict(i) for i in d["imports"]),
137
            named_declarations=frozenset(d["namedDeclarations"]),
138
            consumed_symbols_by_scope=FrozenDict(
139
                {k: frozenset(v) for k, v in d["consumedSymbolsByScope"].items()}
140
            ),
141
            scopes=frozenset(d["scopes"]),
142
        )
143

144
    def to_debug_json_dict(self) -> dict[str, Any]:
×
145
        return {
×
146
            "package": self.package,
147
            "imports": [imp.to_debug_json_dict() for imp in self.imports],
148
            "named_declarations": list(self.named_declarations),
149
            "consumed_symbols_by_scope": {
150
                k: sorted(v) for k, v in self.consumed_symbols_by_scope.items()
151
            },
152
            "scopes": list(self.scopes),
153
        }
154

155

156
@dataclass(frozen=True)
×
157
class FallibleKotlinSourceDependencyAnalysisResult:
×
158
    process_result: FallibleProcessResult
×
159

160

161
class KotlinParserCompiledClassfiles(ClasspathEntry):
×
162
    pass
×
163

164

165
@rule(level=LogLevel.DEBUG)
×
166
async def analyze_kotlin_source_dependencies(
×
167
    processor_classfiles: KotlinParserCompiledClassfiles,
168
    source_files: SourceFiles,
169
    tool: KotlinParser,
170
) -> FallibleKotlinSourceDependencyAnalysisResult:
171
    # Use JDK 8 due to https://youtrack.jetbrains.com/issue/KTIJ-17192 and https://youtrack.jetbrains.com/issue/KT-37446.
172
    request = JdkRequest("zulu:8.0.392")
×
173
    env = await prepare_jdk_environment(**implicitly({request: JdkRequest}))
×
174
    jdk = InternalJdk.from_jdk_environment(env)
×
175

176
    if len(source_files.files) > 1:
×
177
        raise ValueError(
×
178
            f"analyze_kotlin_source_dependencies expects sources with exactly 1 source file, but found {len(source_files.snapshot.files)}."
179
        )
180
    elif len(source_files.files) == 0:
×
181
        raise ValueError(
×
182
            "analyze_kotlin_source_dependencies expects sources with exactly 1 source file, but found none."
183
        )
184
    source_prefix = "__source_to_analyze"
×
185
    source_path = os.path.join(source_prefix, source_files.files[0])
×
186
    processorcp_relpath = "__processorcp"
×
187
    toolcp_relpath = "__toolcp"
×
188

189
    (
×
190
        tool_classpath,
191
        prefixed_source_files_digest,
192
    ) = await concurrently(
193
        materialize_classpath_for_tool(
194
            ToolClasspathRequest(lockfile=(GenerateJvmLockfileFromTool.create(tool)))
195
        ),
196
        add_prefix(AddPrefix(source_files.snapshot.digest, source_prefix)),
197
    )
198

199
    extra_immutable_input_digests = {
×
200
        toolcp_relpath: tool_classpath.digest,
201
        processorcp_relpath: processor_classfiles.digest,
202
    }
203

204
    analysis_output_path = "__source_analysis.json"
×
205

206
    process_result = await execute_process(
×
207
        **implicitly(
208
            JvmProcess(
209
                jdk=jdk,
210
                classpath_entries=[
211
                    *tool_classpath.classpath_entries(toolcp_relpath),
212
                    processorcp_relpath,
213
                ],
214
                argv=[
215
                    "org.pantsbuild.backend.kotlin.dependency_inference.KotlinParserKt",
216
                    analysis_output_path,
217
                    source_path,
218
                ],
219
                input_digest=prefixed_source_files_digest,
220
                extra_immutable_input_digests=extra_immutable_input_digests,
221
                output_files=(analysis_output_path,),
222
                extra_nailgun_keys=extra_immutable_input_digests,
223
                description=f"Analyzing {source_files.files[0]}",
224
                level=LogLevel.DEBUG,
225
            )
226
        )
227
    )
228

229
    return FallibleKotlinSourceDependencyAnalysisResult(process_result=process_result)
×
230

231

232
@rule(level=LogLevel.DEBUG)
×
233
async def resolve_fallible_result_to_analysis(
×
234
    fallible_result: FallibleKotlinSourceDependencyAnalysisResult,
235
) -> KotlinSourceDependencyAnalysis:
236
    desc = ProductDescription("Kotlin source dependency analysis failed.")
×
237
    result = await fallible_to_exec_result_or_raise(
×
238
        **implicitly(
239
            {fallible_result.process_result: FallibleProcessResult, desc: ProductDescription}
240
        )
241
    )
242
    analysis_contents = await get_digest_contents(result.output_digest)
×
243
    analysis = json.loads(analysis_contents[0].content)
×
244
    return KotlinSourceDependencyAnalysis.from_json_dict(analysis)
×
245

246

247
@rule
×
248
async def setup_kotlin_parser_classfiles(
×
249
    jdk: InternalJdk, tool: KotlinParser
250
) -> KotlinParserCompiledClassfiles:
251
    dest_dir = "classfiles"
×
252

253
    parser_source_content = read_resource(
×
254
        "pants.backend.kotlin.dependency_inference", "KotlinParser.kt"
255
    )
256
    if not parser_source_content:
×
257
        raise AssertionError("Unable to find KotlinParser.kt resource.")
×
258

259
    parser_source = FileContent("KotlinParser.kt", parser_source_content)
×
260

261
    tool_classpath, parser_classpath, source_digest = await concurrently(
×
262
        materialize_classpath_for_tool(
263
            ToolClasspathRequest(
264
                prefix="__toolcp",
265
                artifact_requirements=ArtifactRequirements.from_coordinates(
266
                    [
267
                        Coordinate(
268
                            group="org.jetbrains.kotlin",
269
                            artifact="kotlin-compiler-embeddable",
270
                            version=tool.version,
271
                        ),
272
                    ]
273
                ),
274
            )
275
        ),
276
        materialize_classpath_for_tool(
277
            ToolClasspathRequest(
278
                prefix="__parsercp", lockfile=(GenerateJvmLockfileFromTool.create(tool))
279
            )
280
        ),
281
        create_digest(CreateDigest([parser_source, Directory(dest_dir)])),
282
    )
283

284
    merged_digest = await merge_digests(
×
285
        MergeDigests(
286
            (
287
                tool_classpath.digest,
288
                parser_classpath.digest,
289
                source_digest,
290
            )
291
        )
292
    )
293

294
    process_result = await fallible_to_exec_result_or_raise(
×
295
        **implicitly(
296
            JvmProcess(
297
                jdk=jdk,
298
                classpath_entries=tool_classpath.classpath_entries(),
299
                argv=[
300
                    "org.jetbrains.kotlin.cli.jvm.K2JVMCompiler",
301
                    "-classpath",
302
                    ":".join(parser_classpath.classpath_entries()),
303
                    "-d",
304
                    dest_dir,
305
                    parser_source.path,
306
                ],
307
                input_digest=merged_digest,
308
                output_directories=(dest_dir,),
309
                description="Compile Kotlin parser for dependency inference with kotlinc",
310
                level=LogLevel.DEBUG,
311
                # NB: We do not use nailgun for this process, since it is launched exactly once.
312
                use_nailgun=False,
313
            )
314
        )
315
    )
316
    stripped_classfiles_digest = await remove_prefix(
×
317
        RemovePrefix(process_result.output_digest, dest_dir)
318
    )
319
    return KotlinParserCompiledClassfiles(digest=stripped_classfiles_digest)
×
320

321

322
def rules():
×
323
    return (
×
324
        *collect_rules(),
325
        UnionRule(ExportableTool, KotlinParser),
326
    )
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc