• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pantsbuild / pants / 21537129705

31 Jan 2026 02:22AM UTC coverage: 80.331% (+0.06%) from 80.275%
21537129705

push

github

web-flow
Remove MultiGet from the codebase (#23057)

Removes the migration goal. Anyone migrating needs to
be on 2.31 or earlier. 

Removes the migration guide, and switches references to
it to point to the version in the 2.30 docs in perpetuity.

Also gets rid of the "await in loop" custom flake8 check, 
as it doesn't work with call-by-name, and getting it to would
be complex and not worth the effort. This may have been
some worthwhile nannying in the early days of the engine
but does not seem important now.

A followup will remove remaining traces of `Get`. A further
followup after that may remove engine code that is no 
longer needed.

16 of 20 new or added lines in 3 files covered. (80.0%)

5 existing lines in 1 file now uncovered.

78558 of 97793 relevant lines covered (80.33%)

3.36 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

97.59
/src/python/pants/engine/fs_test.py
1
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2
# Licensed under the Apache License, Version 2.0 (see LICENSE).
3

4
from __future__ import annotations
1✔
5

6
import hashlib
1✔
7
import os
1✔
8
import pkgutil
1✔
9
import shutil
1✔
10
import socket
1✔
11
import ssl
1✔
12
import tarfile
1✔
13
import time
1✔
14
from collections.abc import Callable, Iterable
1✔
15
from dataclasses import dataclass
1✔
16
from http.server import BaseHTTPRequestHandler
1✔
17
from io import BytesIO
1✔
18
from pathlib import Path
1✔
19
from typing import Any
1✔
20

21
import pytest
1✔
22

23
from pants.engine.console import Console
1✔
24
from pants.engine.fs import (
1✔
25
    EMPTY_DIGEST,
26
    EMPTY_SNAPSHOT,
27
    AddPrefix,
28
    CreateDigest,
29
    Digest,
30
    DigestContents,
31
    DigestEntries,
32
    DigestSubset,
33
    Directory,
34
    DownloadFile,
35
    FileContent,
36
    FileDigest,
37
    FileEntry,
38
    GlobMatchErrorBehavior,
39
    MergeDigests,
40
    PathGlobs,
41
    PathGlobsAndRoot,
42
    PathMetadataRequest,
43
    PathMetadataResult,
44
    RemovePrefix,
45
    Snapshot,
46
    SnapshotDiff,
47
    SymlinkEntry,
48
    Workspace,
49
)
50
from pants.engine.goal import Goal, GoalSubsystem
1✔
51
from pants.engine.internals.native_engine import PathMetadata, PathMetadataKind, PathNamespace
1✔
52
from pants.engine.internals.scheduler import ExecutionError
1✔
53
from pants.engine.intrinsics import digest_to_snapshot
1✔
54
from pants.engine.rules import goal_rule, implicitly, rule
1✔
55
from pants.testutil.rule_runner import QueryRule, RuleRunner
1✔
56
from pants.util.collections import assert_single_element
1✔
57
from pants.util.contextutil import http_server, temporary_dir
1✔
58
from pants.util.dirutil import relative_symlink, safe_file_dump
1✔
59

60

61
@pytest.fixture
1✔
62
def rule_runner() -> RuleRunner:
1✔
63
    return RuleRunner(
1✔
64
        rules=[
65
            QueryRule(Digest, [CreateDigest]),
66
            QueryRule(DigestContents, [PathGlobs]),
67
            QueryRule(DigestEntries, [Digest]),
68
            QueryRule(DigestEntries, [PathGlobs]),
69
            QueryRule(Snapshot, [CreateDigest]),
70
            QueryRule(Snapshot, [DigestSubset]),
71
            QueryRule(Snapshot, [PathGlobs]),
72
            QueryRule(PathMetadataResult, [PathMetadataRequest]),
73
        ],
74
        isolated_local_store=True,
75
    )
76

77

78
ROLAND_FILE_DIGEST = FileDigest(
1✔
79
    "693d8db7b05e99c6b7a7c0616456039d89c555029026936248085193559a0b5d", 16
80
)
81
ROLAND_DIGEST = Digest("63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16", 80)
1✔
82

83

84
def prime_store_with_roland_digest(rule_runner: RuleRunner) -> None:
1✔
85
    """Prime lmdb_store with a directory of a file named 'roland' and contents 'European
86
    Burmese'."""
87
    with temporary_dir() as temp_dir:
1✔
88
        Path(temp_dir, "roland").write_text("European Burmese")
1✔
89
        snapshot = rule_runner.scheduler.capture_snapshots(
1✔
90
            (PathGlobsAndRoot(PathGlobs(["*"]), temp_dir),)
91
        )[0]
92
    assert snapshot.files == ("roland",)
1✔
93
    assert snapshot.digest == ROLAND_DIGEST
1✔
94

95
    # NB: Capturing a Snapshot avoids persisting directory entries to disk, so we have to ensure
96
    # that independently.
97
    rule_runner.scheduler.ensure_directory_digest_persisted(snapshot.digest)
1✔
98

99

100
def setup_fs_test_tar(rule_runner: RuleRunner) -> None:
1✔
101
    """Extract fs_test.tar into the rule_runner's build root.
102

103
    Note that we use a tar, rather than rule_runner.write_files(), because it has symlinks set up a
104
    certain way.
105

106
    Contents:
107

108
        4.txt
109
        a
110
        ├── 3.txt
111
        ├── 4.txt.ln -> ../4.txt
112
        └── b
113
            ├── 1.txt
114
            └── 2
115
        c.ln -> a/b
116
        d.ln -> a
117

118
    NB: The RuleRunner injects a BUILDROOT file in the build_root.
119
    """
120
    data = pkgutil.get_data("pants.engine.internals", "fs_test_data/fs_test.tar")
1✔
121
    assert data is not None
1✔
122
    io = BytesIO()
1✔
123
    io.write(data)
1✔
124
    io.seek(0)
1✔
125
    with tarfile.open(fileobj=io) as tf:
1✔
126
        tf.extractall(rule_runner.build_root)
1✔
127

128

129
FS_TAR_ALL_FILES = (
1✔
130
    "BUILDROOT",  # injected by RuleRunner, not present in tar
131
    "4.txt",
132
    "a/3.txt",
133
    "a/4.txt.ln",
134
    "a/b/1.txt",
135
    "a/b/2",
136
    "c.ln/1.txt",
137
    "c.ln/2",
138
    "d.ln/3.txt",
139
    "d.ln/4.txt.ln",
140
    "d.ln/b/1.txt",
141
    "d.ln/b/2",
142
)
143
FS_TAR_ALL_DIRS = ("a", "a/b", "c.ln", "d.ln", "d.ln/b")
1✔
144

145

146
def try_with_backoff(assertion_fn: Callable[[], bool], count: int = 4) -> bool:
1✔
147
    for i in range(count):
1✔
148
        time.sleep(0.1 * i)
1✔
149
        if assertion_fn():
1✔
150
            return True
1✔
151
    return False
×
152

153

154
# -----------------------------------------------------------------------------------------------
155
# `FileContent`
156
# -----------------------------------------------------------------------------------------------
157

158

159
def test_file_content_non_bytes():
1✔
160
    with pytest.raises(TypeError) as exc:
1✔
161
        FileContent(path="4.txt", content="four")
1✔
162

163
    assert str(exc.value) == "Expected 'content' to be bytes, but got str"
1✔
164

165

166
# -----------------------------------------------------------------------------------------------
167
# `PathGlobs`, including `GlobMatchErrorBehavior` and symlink handling
168
# -----------------------------------------------------------------------------------------------
169

170

171
def assert_path_globs(
1✔
172
    rule_runner: RuleRunner,
173
    globs: Iterable[str],
174
    *,
175
    expected_files: Iterable[str],
176
    expected_dirs: Iterable[str],
177
) -> None:
178
    snapshot = rule_runner.request(Snapshot, [PathGlobs(globs)])
1✔
179
    assert snapshot.files == tuple(sorted(expected_files))
1✔
180
    assert snapshot.dirs == tuple(sorted(expected_dirs))
1✔
181
    if expected_files or expected_dirs:
1✔
182
        assert snapshot.digest != EMPTY_DIGEST
1✔
183
    else:
184
        assert snapshot.digest == EMPTY_DIGEST
1✔
185

186

187
def test_path_globs_literal_files(rule_runner: RuleRunner) -> None:
1✔
188
    setup_fs_test_tar(rule_runner)
1✔
189
    assert_path_globs(rule_runner, ["4.txt"], expected_files=["4.txt"], expected_dirs=[])
1✔
190
    assert_path_globs(
1✔
191
        rule_runner,
192
        ["a/b/1.txt", "a/b/2"],
193
        expected_files=["a/b/1.txt", "a/b/2"],
194
        expected_dirs=["a", "a/b"],
195
    )
196
    assert_path_globs(rule_runner, ["c.ln/2"], expected_files=["c.ln/2"], expected_dirs=["c.ln"])
1✔
197
    assert_path_globs(
1✔
198
        rule_runner,
199
        ["d.ln/b/1.txt"],
200
        expected_files=["d.ln/b/1.txt"],
201
        expected_dirs=["d.ln", "d.ln/b"],
202
    )
203
    assert_path_globs(rule_runner, ["a/3.txt"], expected_files=["a/3.txt"], expected_dirs=["a"])
1✔
204
    assert_path_globs(rule_runner, ["z.fake"], expected_files=[], expected_dirs=[])
1✔
205

206

207
def test_path_globs_literal_directories(rule_runner: RuleRunner) -> None:
1✔
208
    setup_fs_test_tar(rule_runner)
1✔
209
    assert_path_globs(rule_runner, ["c.ln"], expected_files=[], expected_dirs=["c.ln"])
1✔
210
    assert_path_globs(rule_runner, ["a"], expected_files=[], expected_dirs=["a"])
1✔
211
    assert_path_globs(rule_runner, ["a/b"], expected_files=[], expected_dirs=["a", "a/b"])
1✔
212
    assert_path_globs(rule_runner, ["z"], expected_files=[], expected_dirs=[])
1✔
213

214

215
def test_path_globs_glob_pattern(rule_runner: RuleRunner) -> None:
1✔
216
    setup_fs_test_tar(rule_runner)
1✔
217
    assert_path_globs(rule_runner, ["*.txt"], expected_files=["4.txt"], expected_dirs=[])
1✔
218
    assert_path_globs(
1✔
219
        rule_runner, ["a/b/*.txt"], expected_files=["a/b/1.txt"], expected_dirs=["a", "a/b"]
220
    )
221
    assert_path_globs(
1✔
222
        rule_runner, ["c.ln/*.txt"], expected_files=["c.ln/1.txt"], expected_dirs=["c.ln"]
223
    )
224
    assert_path_globs(
1✔
225
        rule_runner, ["a/b/*"], expected_files=["a/b/1.txt", "a/b/2"], expected_dirs=["a", "a/b"]
226
    )
227
    assert_path_globs(rule_runner, ["*/0.txt"], expected_files=[], expected_dirs=[])
1✔
228
    assert_path_globs(
1✔
229
        rule_runner,
230
        ["*"],
231
        expected_files=["BUILDROOT", "4.txt"],
232
        expected_dirs=["a", "c.ln", "d.ln"],
233
    )
234
    assert_path_globs(
1✔
235
        rule_runner,
236
        ["*/*"],
237
        expected_files=[
238
            "a/3.txt",
239
            "a/4.txt.ln",
240
            "c.ln/1.txt",
241
            "c.ln/2",
242
            "d.ln/3.txt",
243
            "d.ln/4.txt.ln",
244
        ],
245
        expected_dirs=FS_TAR_ALL_DIRS,
246
    )
247
    assert_path_globs(
1✔
248
        rule_runner,
249
        ["*/*/*"],
250
        expected_files=["a/b/1.txt", "a/b/2", "d.ln/b/1.txt", "d.ln/b/2"],
251
        expected_dirs=["a", "a/b", "d.ln", "d.ln/b"],
252
    )
253

254

255
def test_path_globs_rglob_pattern(rule_runner: RuleRunner) -> None:
1✔
256
    setup_fs_test_tar(rule_runner)
1✔
257
    assert_path_globs(
1✔
258
        rule_runner,
259
        ["**/*.txt.ln"],
260
        expected_files=["a/4.txt.ln", "d.ln/4.txt.ln"],
261
        expected_dirs=["a", "d.ln"],
262
    )
263
    assert_path_globs(
1✔
264
        rule_runner,
265
        ["**/*.txt"],
266
        expected_files=[
267
            "4.txt",
268
            "a/3.txt",
269
            "a/b/1.txt",
270
            "c.ln/1.txt",
271
            "d.ln/3.txt",
272
            "d.ln/b/1.txt",
273
        ],
274
        expected_dirs=FS_TAR_ALL_DIRS,
275
    )
276
    assert_path_globs(
1✔
277
        rule_runner,
278
        ["**/3.t*t"],
279
        expected_files=["a/3.txt", "d.ln/3.txt"],
280
        expected_dirs=["a", "d.ln"],
281
    )
282
    assert_path_globs(rule_runner, ["**/*.fake"], expected_files=[], expected_dirs=[])
1✔
283
    assert_path_globs(
1✔
284
        rule_runner, ["**"], expected_files=FS_TAR_ALL_FILES, expected_dirs=FS_TAR_ALL_DIRS
285
    )
286
    assert_path_globs(
1✔
287
        rule_runner, ["**/*"], expected_files=FS_TAR_ALL_FILES, expected_dirs=FS_TAR_ALL_DIRS
288
    )
289
    assert_path_globs(
1✔
290
        rule_runner,
291
        ["a/**"],
292
        expected_files=["a/3.txt", "a/4.txt.ln", "a/b/1.txt", "a/b/2"],
293
        expected_dirs=["a", "a/b"],
294
    )
295
    assert_path_globs(
1✔
296
        rule_runner,
297
        ["d.ln/**"],
298
        expected_files=["d.ln/3.txt", "d.ln/4.txt.ln", "d.ln/b/1.txt", "d.ln/b/2"],
299
        expected_dirs=["d.ln", "d.ln/b"],
300
    )
301
    assert_path_globs(rule_runner, ["a/**/3.txt"], expected_files=["a/3.txt"], expected_dirs=["a"])
1✔
302
    assert_path_globs(
1✔
303
        rule_runner, ["a/**/b/1.txt"], expected_files=["a/b/1.txt"], expected_dirs=["a", "a/b"]
304
    )
305
    assert_path_globs(rule_runner, ["a/**/2"], expected_files=["a/b/2"], expected_dirs=["a", "a/b"])
1✔
306

307

308
def test_path_globs_ignore_pattern(rule_runner: RuleRunner) -> None:
1✔
309
    setup_fs_test_tar(rule_runner)
1✔
310
    assert_path_globs(
1✔
311
        rule_runner,
312
        ["**", "!*.ln"],
313
        expected_files=["BUILDROOT", "4.txt", "a/3.txt", "a/b/1.txt", "a/b/2"],
314
        expected_dirs=["a", "a/b"],
315
    )
316

317

318
def test_path_globs_ignore_sock(rule_runner: RuleRunner) -> None:
1✔
319
    sock_path = os.path.join(rule_runner.build_root, "sock.sock")
1✔
320
    with socket.socket(socket.AF_UNIX) as sock:
1✔
321
        sock.bind(sock_path)
1✔
322
    assert os.path.exists(sock_path)
1✔
323
    assert not os.path.isfile(sock_path)
1✔
324

325
    rule_runner.write_files({"non-sock.txt": ""})
1✔
326
    assert_path_globs(
1✔
327
        rule_runner,
328
        ["**"],
329
        expected_files=["BUILDROOT", "non-sock.txt"],
330
        expected_dirs=[],
331
    )
332

333

334
def test_path_globs_remove_duplicates(rule_runner: RuleRunner) -> None:
1✔
335
    setup_fs_test_tar(rule_runner)
1✔
336
    assert_path_globs(
1✔
337
        rule_runner, ["*", "**"], expected_files=FS_TAR_ALL_FILES, expected_dirs=FS_TAR_ALL_DIRS
338
    )
339
    assert_path_globs(
1✔
340
        rule_runner,
341
        ["**/*.txt", "a/b/1.txt", "4.txt"],
342
        expected_files=[
343
            "4.txt",
344
            "a/3.txt",
345
            "c.ln/1.txt",
346
            "d.ln/3.txt",
347
            "a/b/1.txt",
348
            "d.ln/b/1.txt",
349
        ],
350
        expected_dirs=FS_TAR_ALL_DIRS,
351
    )
352

353

354
def test_path_globs_parent_link(rule_runner: RuleRunner) -> None:
1✔
355
    setup_fs_test_tar(rule_runner)
1✔
356
    assert_path_globs(
1✔
357
        rule_runner,
358
        ["c.ln/../3.txt"],
359
        expected_files=["c.ln/../3.txt"],
360
        expected_dirs=["c.ln", "c.ln/.."],
361
    )
362

363

364
def test_path_globs_symlink_escaping_errors(rule_runner: RuleRunner) -> None:
1✔
365
    setup_fs_test_tar(rule_runner)
1✔
366
    link = os.path.join(rule_runner.build_root, "subdir/escaping")
1✔
367
    dest = os.path.join(rule_runner.build_root, "../../..")
1✔
368
    relative_symlink(dest, link)
1✔
369

370
    exc_reg = r".*While expanding link.*subdir/escaping.*may not traverse outside of the buildroot"
1✔
371
    with pytest.raises(Exception, match=exc_reg):
1✔
372
        assert_path_globs(rule_runner, ["subdir/escaping"], expected_files=[], expected_dirs=[])
1✔
373

374

375
def test_path_globs_symlink_dead(rule_runner: RuleRunner) -> None:
1✔
376
    setup_fs_test_tar(rule_runner)
1✔
377
    link = os.path.join(rule_runner.build_root, "subdir/dead")
1✔
378
    dest = os.path.join(rule_runner.build_root, "this_file_does_not_exist")
1✔
379
    relative_symlink(dest, link)
1✔
380

381
    # Because the symlink does not escape, it should be ignored, rather than cause an error.
382
    assert_path_globs(rule_runner, ["subdir/dead"], expected_files=[], expected_dirs=[])
1✔
383

384

385
def test_path_globs_symlink_dead_nested(rule_runner: RuleRunner) -> None:
1✔
386
    setup_fs_test_tar(rule_runner)
1✔
387
    link = os.path.join(rule_runner.build_root, "subdir/dead")
1✔
388
    dest = os.path.join(
1✔
389
        rule_runner.build_root, "this_folder_does_not_exist/this_file_does_not_exist"
390
    )
391
    relative_symlink(dest, link)
1✔
392

393
    # Because the symlink does not escape, it should be ignored, rather than cause an error.
394
    assert_path_globs(rule_runner, ["subdir/dead"], expected_files=[], expected_dirs=[])
1✔
395

396

397
def test_path_globs_symlink_loop(rule_runner: RuleRunner) -> None:
1✔
398
    # Matching a recursive glob against a link which points to its parent directory would cause
399
    # infinite recursion, so we eagerly error instead.
400
    setup_fs_test_tar(rule_runner)
1✔
401
    link = os.path.join(rule_runner.build_root, "subdir/link.ln")
1✔
402
    dest = os.path.join(rule_runner.build_root, "subdir")
1✔
403
    relative_symlink(dest, link)
1✔
404

405
    exc_reg = r".*Maximum link depth exceeded"
1✔
406
    with pytest.raises(Exception, match=exc_reg):
1✔
407
        assert_path_globs(rule_runner, ["**"], expected_files=[], expected_dirs=[])
1✔
408

409

410
def test_path_globs_to_digest_contents(rule_runner: RuleRunner) -> None:
1✔
411
    setup_fs_test_tar(rule_runner)
1✔
412

413
    def get_contents(globs: Iterable[str]) -> set[FileContent]:
1✔
414
        return set(rule_runner.request(DigestContents, [PathGlobs(globs)]))
1✔
415

416
    assert get_contents(["4.txt", "a/4.txt.ln"]) == {
1✔
417
        FileContent("4.txt", b"four\n"),
418
        FileContent("a/4.txt.ln", b"four\n"),
419
    }
420
    assert get_contents(["c.ln/../3.txt"]) == {FileContent("c.ln/../3.txt", b"three\n")}
1✔
421

422
    # Directories are empty.
423
    assert not get_contents(["a/b"])
1✔
424
    assert not get_contents(["c.ln"])
1✔
425

426

427
def test_path_globs_to_digest_entries(rule_runner: RuleRunner) -> None:
1✔
428
    setup_fs_test_tar(rule_runner)
1✔
429

430
    def get_entries(globs: Iterable[str]) -> set[FileEntry | Directory | SymlinkEntry]:
1✔
431
        return set(rule_runner.request(DigestEntries, [PathGlobs(globs)]))
1✔
432

433
    assert get_entries(["4.txt", "a/4.txt.ln"]) == {
1✔
434
        FileEntry(
435
            "4.txt",
436
            FileDigest("ab929fcd5594037960792ea0b98caf5fdaf6b60645e4ef248c28db74260f393e", 5),
437
        ),
438
        FileEntry(
439
            "a/4.txt.ln",
440
            FileDigest("ab929fcd5594037960792ea0b98caf5fdaf6b60645e4ef248c28db74260f393e", 5),
441
        ),
442
    }
443
    assert get_entries(["c.ln/../3.txt"]) == {
1✔
444
        FileEntry(
445
            "c.ln/../3.txt",
446
            FileDigest("f6936912184481f5edd4c304ce27c5a1a827804fc7f329f43d273b8621870776", 6),
447
        )
448
    }
449

450
    # Directories are empty.
451
    assert get_entries(["a/b"]) == {Directory("a/b")}
1✔
452
    assert get_entries(["c.ln"]) == {Directory("c.ln")}
1✔
453

454

455
def test_digest_entries_handles_empty_directory(rule_runner: RuleRunner) -> None:
1✔
456
    digest = rule_runner.request(
1✔
457
        Digest, [CreateDigest([Directory("a/b"), FileContent("a/foo.txt", b"four\n")])]
458
    )
459
    entries = rule_runner.request(DigestEntries, [digest])
1✔
460
    assert entries == DigestEntries(
1✔
461
        [
462
            Directory("a/b"),
463
            FileEntry(
464
                "a/foo.txt",
465
                FileDigest("ab929fcd5594037960792ea0b98caf5fdaf6b60645e4ef248c28db74260f393e", 5),
466
            ),
467
        ]
468
    )
469

470

471
def test_digest_entries_handles_symlinks(rule_runner: RuleRunner) -> None:
1✔
472
    digest = rule_runner.request(
1✔
473
        Digest,
474
        [
475
            CreateDigest(
476
                [
477
                    SymlinkEntry("a.ln", "a.txt"),
478
                    SymlinkEntry("b.ln", "b.txt"),
479
                    FileContent("a.txt", b"four\n"),
480
                ]
481
            )
482
        ],
483
    )
484
    entries = rule_runner.request(DigestEntries, [digest])
1✔
485
    assert entries == DigestEntries(
1✔
486
        [
487
            SymlinkEntry("a.ln", "a.txt"),
488
            FileEntry(
489
                "a.txt",
490
                FileDigest("ab929fcd5594037960792ea0b98caf5fdaf6b60645e4ef248c28db74260f393e", 5),
491
            ),
492
            SymlinkEntry("b.ln", "b.txt"),
493
        ]
494
    )
495

496

497
@pytest.mark.parametrize(
1✔
498
    "create_digest, files, dirs",
499
    [
500
        pytest.param(
501
            CreateDigest(
502
                [
503
                    FileContent("file.txt", b"four\n"),
504
                    SymlinkEntry("symlink", "file.txt"),
505
                    SymlinkEntry("relsymlink", "./file.txt"),
506
                    SymlinkEntry("a/symlink", "../file.txt"),
507
                    SymlinkEntry("a/b/symlink", "../../file.txt"),
508
                ]
509
            ),
510
            ("a/b/symlink", "a/symlink", "file.txt", "relsymlink", "symlink"),
511
            ("a", "a/b"),
512
            id="simple",
513
        ),
514
        pytest.param(
515
            CreateDigest(
516
                [
517
                    FileContent("file.txt", b"four\n"),
518
                    SymlinkEntry(
519
                        "circular1", "./circular1"
520
                    ),  # After so many traversals, we give up
521
                    SymlinkEntry("circular2", "circular2"),  # After so many traversals, we give up
522
                    SymlinkEntry("chain1", "chain2"),
523
                    SymlinkEntry("chain2", "chain3"),
524
                    SymlinkEntry("chain3", "chain1"),
525
                    SymlinkEntry(
526
                        "a/symlink", "file.txt"
527
                    ),  # looks for a/file.txt, which doesn't exist
528
                    SymlinkEntry("a/too-far.ln", "../../file.txt"),  # went too far up
529
                    SymlinkEntry("a/parent", ".."),
530
                    SymlinkEntry("too-far.ln", "../file.txt"),  # went too far up
531
                    SymlinkEntry("absolute1.ln", str(Path(__file__).resolve())),  # absolute path
532
                    SymlinkEntry("absolute2.ln", "/file.txt"),
533
                ]
534
            ),
535
            ("file.txt",),
536
            ("a",),
537
            id="ignored",
538
        ),
539
        pytest.param(
540
            CreateDigest(
541
                [
542
                    FileContent("file.txt", b"four\n"),
543
                    SymlinkEntry("a/b/parent-file.ln", "../../file.txt"),
544
                    SymlinkEntry("dirlink", "a"),
545
                ]
546
            ),
547
            ("a/b/parent-file.ln", "dirlink/b/parent-file.ln", "file.txt"),
548
            ("a", "a/b", "dirlink", "dirlink/b"),
549
            id="parentdir-in-symlink-target",
550
        ),
551
        pytest.param(
552
            CreateDigest(
553
                [
554
                    FileContent("a/file.txt", b"four\n"),
555
                    SymlinkEntry("dirlink", "a"),
556
                    SymlinkEntry("double-dirlink", "dirlink"),
557
                ]
558
            ),
559
            ("a/file.txt", "dirlink/file.txt", "double-dirlink/file.txt"),
560
            ("a", "dirlink", "double-dirlink"),
561
            id="double-dirlink",
562
        ),
563
        pytest.param(
564
            CreateDigest(
565
                [
566
                    FileContent("a/file.txt", b"four\n"),
567
                    SymlinkEntry("a/self", "."),
568
                ]
569
            ),
570
            tuple(f"a/{'self/' * count}file.txt" for count in range(64)),
571
            ("a",),
572
            id="self-dir",
573
        ),
574
    ],
575
)
576
def test_snapshot_and_contents_are_symlink_oblivious(
1✔
577
    rule_runner: RuleRunner,
578
    create_digest: CreateDigest,
579
    files: tuple[str, ...],
580
    dirs: tuple[str, ...],
581
) -> None:
582
    digest = rule_runner.request(Digest, [create_digest])
1✔
583
    snapshot = rule_runner.request(Snapshot, [digest])
1✔
584
    assert snapshot.files == files
1✔
585
    assert snapshot.dirs == dirs
1✔
586
    contents = rule_runner.request(DigestContents, [digest])
1✔
587
    assert tuple(content.path for content in contents) == files
1✔
588

589

590
def test_glob_match_error_behavior(rule_runner: RuleRunner, caplog) -> None:
1✔
591
    setup_fs_test_tar(rule_runner)
1✔
592
    test_name = f"{__name__}.{test_glob_match_error_behavior.__name__}()"
1✔
593

594
    def evaluate_path_globs(globs: Iterable[str], error_behavior: GlobMatchErrorBehavior) -> None:
1✔
595
        pg = PathGlobs(
1✔
596
            globs,
597
            glob_match_error_behavior=error_behavior,
598
            description_of_origin=(
599
                test_name if error_behavior != GlobMatchErrorBehavior.ignore else None
600
            ),
601
        )
602
        rule_runner.request(Snapshot, [pg])
1✔
603

604
    with pytest.raises(Exception) as exc:
1✔
605
        evaluate_path_globs(["not-a-file.txt"], GlobMatchErrorBehavior.error)
1✔
606
    assert f'Unmatched glob from {test_name}: "not-a-file.txt"' in str(exc.value)
1✔
607

608
    with pytest.raises(Exception) as exc:
1✔
609
        evaluate_path_globs(["not-a-file.txt", "!ignore.txt"], GlobMatchErrorBehavior.error)
1✔
610
    assert f'Unmatched glob from {test_name}: "not-a-file.txt", exclude: "ignore.txt"' in str(
1✔
611
        exc.value
612
    )
613

614
    # TODO: get Rust logging working with RuleRunner.
615
    # caplog.clear()
616
    # evaluate_path_globs(["not-a-file.txt"], GlobMatchErrorBehavior.warn)
617
    # assert len(caplog.records) == 1
618
    # assert f'Unmatched glob from {test_name}: "not-a-file.txt"' in caplog.text
619

620
    caplog.clear()
1✔
621
    evaluate_path_globs(["not-a-file.txt"], GlobMatchErrorBehavior.ignore)
1✔
622
    assert len(caplog.records) == 0
1✔
623

624

625
# -----------------------------------------------------------------------------------------------
626
# `PathGlobsAndRoot`
627
# -----------------------------------------------------------------------------------------------
628

629

630
def test_snapshot_from_outside_buildroot(rule_runner: RuleRunner) -> None:
1✔
631
    with temporary_dir() as temp_dir:
1✔
632
        Path(temp_dir, "roland").write_text("European Burmese")
1✔
633
        snapshot = rule_runner.scheduler.capture_snapshots(
1✔
634
            [PathGlobsAndRoot(PathGlobs(["*"]), temp_dir)]
635
        )[0]
636
    assert snapshot.files == ("roland",)
1✔
637
    assert snapshot.digest == ROLAND_DIGEST
1✔
638

639

640
def test_multiple_snapshots_from_outside_buildroot(rule_runner: RuleRunner) -> None:
1✔
641
    with temporary_dir() as temp_dir:
1✔
642
        Path(temp_dir, "roland").write_text("European Burmese")
1✔
643
        Path(temp_dir, "susannah").write_text("I don't know")
1✔
644
        snapshots = rule_runner.scheduler.capture_snapshots(
1✔
645
            [
646
                PathGlobsAndRoot(PathGlobs(["roland"]), temp_dir),
647
                PathGlobsAndRoot(PathGlobs(["susannah"]), temp_dir),
648
                PathGlobsAndRoot(PathGlobs(["doesnotexist"]), temp_dir),
649
            ]
650
        )
651
    assert len(snapshots) == 3
1✔
652
    assert snapshots[0].files == ("roland",)
1✔
653
    assert snapshots[0].digest == ROLAND_DIGEST
1✔
654
    assert snapshots[1].files == ("susannah",)
1✔
655
    assert snapshots[1].digest == Digest(
1✔
656
        "d3539cfc21eb4bab328ca9173144a8e932c515b1b9e26695454eeedbc5a95f6f", 82
657
    )
658
    assert snapshots[2] == EMPTY_SNAPSHOT
1✔
659

660

661
def test_snapshot_from_outside_buildroot_failure(rule_runner: RuleRunner) -> None:
1✔
662
    with temporary_dir() as temp_dir:
1✔
663
        with pytest.raises(Exception) as exc:
1✔
664
            rule_runner.scheduler.capture_snapshots(
1✔
665
                [PathGlobsAndRoot(PathGlobs(["*"]), os.path.join(temp_dir, "doesnotexist"))]
666
            )
667
    assert "doesnotexist" in str(exc.value)
1✔
668

669

670
# -----------------------------------------------------------------------------------------------
671
# `CreateDigest`
672
# -----------------------------------------------------------------------------------------------
673

674

675
def test_create_empty_directory(rule_runner: RuleRunner) -> None:
1✔
676
    res = rule_runner.request(Snapshot, [CreateDigest([Directory("a/")])])
1✔
677
    assert res.dirs == ("a",)
1✔
678
    assert not res.files
1✔
679
    assert res.digest != EMPTY_DIGEST
1✔
680

681
    res = rule_runner.request(
1✔
682
        Snapshot, [CreateDigest([Directory("x/y/z"), Directory("m"), Directory("m/n")])]
683
    )
684
    assert res.dirs == ("m", "m/n", "x", "x/y", "x/y/z")
1✔
685
    assert not res.files
1✔
686
    assert res.digest != EMPTY_DIGEST
1✔
687

688

689
def test_create_digest_with_file_entries(rule_runner: RuleRunner) -> None:
1✔
690
    # Retrieve some known FileEntry's from the test tar.
691
    setup_fs_test_tar(rule_runner)
1✔
692
    file_entries = rule_runner.request(DigestEntries, [PathGlobs(["4.txt", "a/4.txt.ln"])])
1✔
693

694
    # Make a snapshot with just those files.
695
    snapshot = rule_runner.request(Snapshot, [CreateDigest(file_entries)])
1✔
696
    assert snapshot.dirs == ("a",)
1✔
697
    assert snapshot.files == ("4.txt", "a/4.txt.ln")
1✔
698
    assert snapshot.digest != EMPTY_DIGEST
1✔
699

700

701
# -----------------------------------------------------------------------------------------------
702
# `MergeDigests`
703
# -----------------------------------------------------------------------------------------------
704

705

706
def test_merge_digests(rule_runner: RuleRunner) -> None:
1✔
707
    with temporary_dir() as temp_dir:
1✔
708
        Path(temp_dir, "roland").write_text("European Burmese")
1✔
709
        Path(temp_dir, "susannah").write_text("Not sure actually")
1✔
710
        (
1✔
711
            empty_snapshot,
712
            roland_snapshot,
713
            susannah_snapshot,
714
            both_snapshot,
715
        ) = rule_runner.scheduler.capture_snapshots(
716
            (
717
                PathGlobsAndRoot(PathGlobs(["doesnotmatch"]), temp_dir),
718
                PathGlobsAndRoot(PathGlobs(["roland"]), temp_dir),
719
                PathGlobsAndRoot(PathGlobs(["susannah"]), temp_dir),
720
                PathGlobsAndRoot(PathGlobs(["*"]), temp_dir),
721
            )
722
        )
723

724
    empty_merged = rule_runner.request(Digest, [MergeDigests((empty_snapshot.digest,))])
1✔
725
    assert empty_snapshot.digest == empty_merged
1✔
726

727
    roland_merged = rule_runner.request(
1✔
728
        Digest, [MergeDigests((roland_snapshot.digest, empty_snapshot.digest))]
729
    )
730
    assert roland_snapshot.digest == roland_merged
1✔
731

732
    both_merged = rule_runner.request(
1✔
733
        Digest, [MergeDigests((roland_snapshot.digest, susannah_snapshot.digest))]
734
    )
735
    assert both_snapshot.digest == both_merged
1✔
736

737

738
# -----------------------------------------------------------------------------------------------
739
# `DigestSubset`
740
# -----------------------------------------------------------------------------------------------
741

742

743
def generate_original_digest(rule_runner: RuleRunner) -> Digest:
1✔
744
    files = [
1✔
745
        FileContent(path, b"dummy content")
746
        for path in [
747
            "a.txt",
748
            "b.txt",
749
            "c.txt",
750
            "subdir/a.txt",
751
            "subdir/b.txt",
752
            "subdir2/a.txt",
753
            "subdir2/nested_subdir/x.txt",
754
        ]
755
    ]
756
    return rule_runner.request(
1✔
757
        Digest,
758
        [CreateDigest(files)],
759
    )
760

761

762
def test_digest_subset_empty(rule_runner: RuleRunner) -> None:
1✔
763
    subset_snapshot = rule_runner.request(
1✔
764
        Snapshot, [DigestSubset(generate_original_digest(rule_runner), PathGlobs(()))]
765
    )
766
    assert subset_snapshot.digest == EMPTY_DIGEST
1✔
767
    assert subset_snapshot.files == ()
1✔
768
    assert subset_snapshot.dirs == ()
1✔
769

770

771
def test_digest_subset_globs(rule_runner: RuleRunner) -> None:
1✔
772
    subset_snapshot = rule_runner.request(
1✔
773
        Snapshot,
774
        [
775
            DigestSubset(
776
                generate_original_digest(rule_runner),
777
                PathGlobs(("a.txt", "c.txt", "subdir2/**")),
778
            )
779
        ],
780
    )
781
    assert set(subset_snapshot.files) == {
1✔
782
        "a.txt",
783
        "c.txt",
784
        "subdir2/a.txt",
785
        "subdir2/nested_subdir/x.txt",
786
    }
787
    assert set(subset_snapshot.dirs) == {"subdir2", "subdir2/nested_subdir"}
1✔
788

789
    expected_files = [
1✔
790
        FileContent(path, b"dummy content")
791
        for path in [
792
            "a.txt",
793
            "c.txt",
794
            "subdir2/a.txt",
795
            "subdir2/nested_subdir/x.txt",
796
        ]
797
    ]
798
    subset_digest = rule_runner.request(Digest, [CreateDigest(expected_files)])
1✔
799
    assert subset_snapshot.digest == subset_digest
1✔
800

801

802
def test_digest_subset_globs_2(rule_runner: RuleRunner) -> None:
1✔
803
    subset_snapshot = rule_runner.request(
1✔
804
        Snapshot,
805
        [
806
            DigestSubset(
807
                generate_original_digest(rule_runner), PathGlobs(("a.txt", "c.txt", "subdir2/*"))
808
            )
809
        ],
810
    )
811
    assert set(subset_snapshot.files) == {"a.txt", "c.txt", "subdir2/a.txt"}
1✔
812
    assert set(subset_snapshot.dirs) == {"subdir2", "subdir2/nested_subdir"}
1✔
813

814

815
def test_digest_subset_nonexistent_filename_globs(rule_runner: RuleRunner) -> None:
1✔
816
    # We behave according to the `GlobMatchErrorBehavior`.
817
    original_digest = generate_original_digest(rule_runner)
1✔
818
    globs = ["some_file_not_in_snapshot.txt", "a.txt"]
1✔
819
    subset_snapshot = rule_runner.request(
1✔
820
        Snapshot, [DigestSubset(original_digest, PathGlobs(globs))]
821
    )
822
    assert set(subset_snapshot.files) == {"a.txt"}
1✔
823
    expected_digest = rule_runner.request(
1✔
824
        Digest, [CreateDigest([FileContent("a.txt", b"dummy content")])]
825
    )
826
    assert subset_snapshot.digest == expected_digest
1✔
827

828
    # TODO: Fix this to actually error.
829
    # with pytest.raises(ExecutionError):
830
    #     rule_runner.request(
831
    #         Snapshot,
832
    #         [
833
    #             DigestSubset(
834
    #                 original_digest,
835
    #                 PathGlobs(
836
    #                     globs,
837
    #                     glob_match_error_behavior=GlobMatchErrorBehavior.error,
838
    #                     conjunction=GlobExpansionConjunction.all_match,
839
    #                     description_of_origin="test",
840
    #                 ),
841
    #             )
842
    #         ],
843
    #     )
844

845

846
# -----------------------------------------------------------------------------------------------
847
# `Digest` -> `Snapshot`
848
# -----------------------------------------------------------------------------------------------
849

850

851
def test_lift_digest_to_snapshot(rule_runner: RuleRunner) -> None:
1✔
852
    prime_store_with_roland_digest(rule_runner)
1✔
853
    snapshot = rule_runner.request(Snapshot, [ROLAND_DIGEST])
1✔
854
    assert snapshot.files == ("roland",)
1✔
855
    assert snapshot.digest == ROLAND_DIGEST
1✔
856

857

858
def test_error_lifting_file_digest_to_snapshot(rule_runner: RuleRunner) -> None:
1✔
859
    prime_store_with_roland_digest(rule_runner)
1✔
860
    # A file digest is not a directory digest. Here, we hash the file that was primed as part of
861
    # that directory, and show that we can't turn it into a Snapshot.
862
    text = b"European Burmese"
1✔
863
    hasher = hashlib.sha256()
1✔
864
    hasher.update(text)
1✔
865
    digest = Digest(fingerprint=hasher.hexdigest(), serialized_bytes_length=len(text))
1✔
866
    with pytest.raises(ExecutionError) as exc:
1✔
867
        rule_runner.request(Snapshot, [digest])
1✔
868
    assert "unknown directory" in str(exc.value)
1✔
869

870

871
# -----------------------------------------------------------------------------------------------
872
# `AddPrefix` and `RemovePrefix`
873
# -----------------------------------------------------------------------------------------------
874

875

876
def test_add_prefix(rule_runner: RuleRunner) -> None:
1✔
877
    digest = rule_runner.request(
1✔
878
        Digest,
879
        [CreateDigest([FileContent("main.ext", b""), FileContent("subdir/sub.ext", b"")])],
880
    )
881

882
    # Two components.
883
    output_digest = rule_runner.request(Digest, [AddPrefix(digest, "outer_dir/middle_dir")])
1✔
884
    snapshot = rule_runner.request(Snapshot, [output_digest])
1✔
885
    assert sorted(snapshot.files) == [
1✔
886
        "outer_dir/middle_dir/main.ext",
887
        "outer_dir/middle_dir/subdir/sub.ext",
888
    ]
889
    assert sorted(snapshot.dirs) == [
1✔
890
        "outer_dir",
891
        "outer_dir/middle_dir",
892
        "outer_dir/middle_dir/subdir",
893
    ]
894

895
    # Empty.
896
    output_digest = rule_runner.request(Digest, [AddPrefix(digest, "")])
1✔
897
    assert digest == output_digest
1✔
898

899
    # Illegal.
900
    with pytest.raises(Exception, match=r"The `prefix` must be relative."):
1✔
901
        rule_runner.request(Digest, [AddPrefix(digest, "../something")])
1✔
902

903

904
def test_remove_prefix(rule_runner: RuleRunner) -> None:
1✔
905
    relevant_files = (
1✔
906
        "characters/dark_tower/roland",
907
        "characters/dark_tower/susannah",
908
    )
909
    all_files = (
1✔
910
        "books/dark_tower/gunslinger",
911
        "characters/altered_carbon/kovacs",
912
        *relevant_files,
913
        "index",
914
    )
915

916
    with temporary_dir() as temp_dir:
1✔
917
        safe_file_dump(os.path.join(temp_dir, "index"), "books\ncharacters\n")
1✔
918
        safe_file_dump(
1✔
919
            os.path.join(temp_dir, "characters", "altered_carbon", "kovacs"),
920
            "Envoy",
921
            makedirs=True,
922
        )
923

924
        tower_dir = os.path.join(temp_dir, "characters", "dark_tower")
1✔
925
        safe_file_dump(os.path.join(tower_dir, "roland"), "European Burmese", makedirs=True)
1✔
926
        safe_file_dump(os.path.join(tower_dir, "susannah"), "Not sure actually", makedirs=True)
1✔
927

928
        safe_file_dump(
1✔
929
            os.path.join(temp_dir, "books", "dark_tower", "gunslinger"),
930
            "1982",
931
            makedirs=True,
932
        )
933

934
        snapshot, snapshot_with_extra_files = rule_runner.scheduler.capture_snapshots(
1✔
935
            [
936
                PathGlobsAndRoot(PathGlobs(["characters/dark_tower/*"]), temp_dir),
937
                PathGlobsAndRoot(PathGlobs(["**"]), temp_dir),
938
            ]
939
        )
940

941
        # Check that we got the full snapshots that we expect
942
        assert snapshot.files == relevant_files
1✔
943
        assert snapshot_with_extra_files.files == all_files
1✔
944

945
        # Strip empty prefix:
946
        zero_prefix_stripped_digest = rule_runner.request(
1✔
947
            Digest, [RemovePrefix(snapshot.digest, "")]
948
        )
949
        assert snapshot.digest == zero_prefix_stripped_digest
1✔
950

951
        # Strip a non-empty prefix shared by all files:
952
        stripped_digest = rule_runner.request(
1✔
953
            Digest, [RemovePrefix(snapshot.digest, "characters/dark_tower")]
954
        )
955
        assert stripped_digest == Digest(
1✔
956
            fingerprint="71e788fc25783c424db555477071f5e476d942fc958a5d06ffc1ed223f779a8c",
957
            serialized_bytes_length=162,
958
        )
959

960
        expected_snapshot = assert_single_element(
1✔
961
            rule_runner.scheduler.capture_snapshots([PathGlobsAndRoot(PathGlobs(["*"]), tower_dir)])
962
        )
963
        assert expected_snapshot.files == ("roland", "susannah")
1✔
964
        assert stripped_digest == expected_snapshot.digest
1✔
965

966
        # Try to strip a prefix which isn't shared by all files:
967
        with pytest.raises(Exception) as exc:
1✔
968
            rule_runner.request(
1✔
969
                Digest,
970
                [RemovePrefix(snapshot_with_extra_files.digest, "characters/dark_tower")],
971
            )
972
        assert (
1✔
973
            "Cannot strip prefix characters/dark_tower from root directory (Digest "
974
            "with hash Fingerprint<28c47f77867f0c8d577d2ada2f06b03fc8e5ef2d780e8942713b26c5e3f434b8>)"
975
            " - root directory contained non-matching directory named: books and file named: index"
976
        ) in str(exc.value)
977

978

979
# -----------------------------------------------------------------------------------------------
980
# `DownloadFile`
981
# -----------------------------------------------------------------------------------------------
982

983

984
@pytest.fixture
1✔
985
def downloads_rule_runner() -> RuleRunner:
1✔
986
    return RuleRunner(rules=[QueryRule(Snapshot, [DownloadFile])], isolated_local_store=True)
1✔
987

988

989
class StubHandler(BaseHTTPRequestHandler):
1✔
990
    response_text = b"Hello, client!"
1✔
991

992
    def do_HEAD(self):
1✔
993
        self.send_headers()
×
994

995
    def do_GET(self):
1✔
996
        self.send_headers()
1✔
997
        self.wfile.write(self.response_text)
1✔
998

999
    def send_headers(self):
1✔
1000
        code = 200 if self.path == "/file.txt" else 404
1✔
1001
        self.send_response(code)
1✔
1002
        self.send_header("Content-Type", "text/utf-8")
1✔
1003
        self.send_header("Content-Length", f"{len(self.response_text)}")
1✔
1004
        self.end_headers()
1✔
1005

1006

1007
def stub_erroring_handler(error_count_value: int) -> type[BaseHTTPRequestHandler]:
1✔
1008
    """Return a handler that errors once mid-download before succeeding for the next GET.
1009

1010
    This function returns an anonymous class so that each call can create a new instance with its
1011
    own error counter.
1012
    """
1013
    error_num = 1
1✔
1014

1015
    class StubErroringHandler(BaseHTTPRequestHandler):
1✔
1016
        error_count = error_count_value
1✔
1017
        response_text = b"Hello, client!"
1✔
1018

1019
        def do_HEAD(self):
1✔
1020
            self.send_headers()
×
1021

1022
        def do_GET(self):
1✔
1023
            self.send_headers()
1✔
1024
            nonlocal error_num
1025
            if error_num <= self.error_count:
1✔
1026
                msg = f"Returning error {error_num}"
1✔
1027
                error_num += 1
1✔
1028
                raise Exception(msg)
1✔
1029
            self.wfile.write(self.response_text)
1✔
1030

1031
        def send_headers(self):
1✔
1032
            code = 200 if self.path == "/file.txt" else 404
1✔
1033
            self.send_response(code)
1✔
1034
            self.send_header("Content-Type", "text/utf-8")
1✔
1035
            self.send_header("Content-Length", f"{len(self.response_text)}")
1✔
1036
            self.end_headers()
1✔
1037

1038
    return StubErroringHandler
1✔
1039

1040

1041
DOWNLOADS_FILE_DIGEST = FileDigest(
1✔
1042
    "8fcbc50cda241aee7238c71e87c27804e7abc60675974eaf6567aa16366bc105", 14
1043
)
1044
DOWNLOADS_EXPECTED_DIRECTORY_DIGEST = Digest(
1✔
1045
    "4c9cf91fcd7ba1abbf7f9a0a1c8175556a82bee6a398e34db3284525ac24a3ad", 84
1046
)
1047
ROLAND_DOWNLOAD_DIGEST = Digest(
1✔
1048
    "9341f76bef74170bedffe51e4f2e233f61786b7752d21c2339f8ee6070eba819", 82
1049
)
1050

1051

1052
def test_download_valid(downloads_rule_runner: RuleRunner) -> None:
1✔
1053
    with http_server(StubHandler) as port:
1✔
1054
        snapshot = downloads_rule_runner.request(
1✔
1055
            Snapshot, [DownloadFile(f"http://localhost:{port}/file.txt", DOWNLOADS_FILE_DIGEST)]
1056
        )
1057
    assert snapshot.files == ("file.txt",)
1✔
1058
    assert snapshot.digest == DOWNLOADS_EXPECTED_DIRECTORY_DIGEST
1✔
1059

1060

1061
def test_download_missing_file(downloads_rule_runner: RuleRunner) -> None:
1✔
1062
    with pytest.raises(ExecutionError) as exc:
1✔
1063
        with http_server(StubHandler) as port:
1✔
1064
            downloads_rule_runner.request(
1✔
1065
                Snapshot, [DownloadFile(f"http://localhost:{port}/notfound", DOWNLOADS_FILE_DIGEST)]
1066
            )
1067
    assert "404" in str(exc.value)
1✔
1068

1069

1070
def test_download_body_error_retry(downloads_rule_runner: RuleRunner) -> None:
1✔
1071
    with http_server(stub_erroring_handler(1)) as port:
1✔
1072
        snapshot = downloads_rule_runner.request(
1✔
1073
            Snapshot, [DownloadFile(f"http://localhost:{port}/file.txt", DOWNLOADS_FILE_DIGEST)]
1074
        )
1075
    assert snapshot.files == ("file.txt",)
1✔
1076
    assert snapshot.digest == DOWNLOADS_EXPECTED_DIRECTORY_DIGEST
1✔
1077

1078

1079
def test_download_body_error_retry_eventually_fails(downloads_rule_runner: RuleRunner) -> None:
1✔
1080
    # Returns one more error than the retry will allow.
1081
    downloads_rule_runner.set_options(
1✔
1082
        ["--file-downloads-max-attempts=4", "--file-downloads-retry-delay=0.001"]
1083
    )
1084
    with http_server(stub_erroring_handler(5)) as port:
1✔
1085
        with pytest.raises(Exception):
1✔
1086
            _ = downloads_rule_runner.request(
1✔
1087
                Snapshot, [DownloadFile(f"http://localhost:{port}/file.txt", DOWNLOADS_FILE_DIGEST)]
1088
            )
1089

1090

1091
def test_download_wrong_digest(downloads_rule_runner: RuleRunner) -> None:
1✔
1092
    file_digest = FileDigest(
1✔
1093
        DOWNLOADS_FILE_DIGEST.fingerprint, DOWNLOADS_FILE_DIGEST.serialized_bytes_length + 1
1094
    )
1095
    with pytest.raises(ExecutionError) as exc:
1✔
1096
        with http_server(StubHandler) as port:
1✔
1097
            downloads_rule_runner.request(
1✔
1098
                Snapshot, [DownloadFile(f"http://localhost:{port}/file.txt", file_digest)]
1099
            )
1100
    assert "wrong digest" in str(exc.value).lower()
1✔
1101

1102

1103
def test_download_file(downloads_rule_runner: RuleRunner) -> None:
1✔
1104
    with temporary_dir() as temp_dir:
1✔
1105
        roland = Path(temp_dir, "roland")
1✔
1106
        roland.write_text("European Burmese")
1✔
1107
        snapshot = downloads_rule_runner.request(
1✔
1108
            Snapshot,
1109
            [DownloadFile(f"file:{roland}", ROLAND_FILE_DIGEST)],
1110
        )
1111

1112
    assert snapshot.files == ("roland",)
1✔
1113
    assert snapshot.digest == ROLAND_DOWNLOAD_DIGEST
1✔
1114

1115

1116
def test_download_caches(downloads_rule_runner: RuleRunner) -> None:
1✔
1117
    # We put the expected content in the store, but because we have never fetched it from this
1118
    # URL, we confirm the URL and attempt to refetch. Once it is cached, it does not need to be
1119
    # refetched.
1120
    prime_store_with_roland_digest(downloads_rule_runner)
1✔
1121
    with temporary_dir() as temp_dir:
1✔
1122
        roland = Path(temp_dir, "roland")
1✔
1123
        roland.write_text("European Burmese")
1✔
1124
        snapshot = downloads_rule_runner.request(
1✔
1125
            Snapshot,
1126
            [DownloadFile(f"file:{roland}", ROLAND_FILE_DIGEST)],
1127
        )
1128

1129
    assert snapshot.files == ("roland",)
1✔
1130
    assert snapshot.digest == ROLAND_DOWNLOAD_DIGEST
1✔
1131

1132

1133
def test_download_https() -> None:
1✔
1134
    # This also tests that the custom certs functionality works.
1135
    with temporary_dir() as temp_dir:
1✔
1136

1137
        def write_resource(name: str) -> Path:
1✔
1138
            path = Path(temp_dir) / name
1✔
1139
            data = pkgutil.get_data("pants.engine.internals", f"fs_test_data/tls/rsa/{name}")
1✔
1140
            assert data is not None
1✔
1141
            path.write_bytes(data)
1✔
1142
            return path
1✔
1143

1144
        server_cert = write_resource("server.crt")
1✔
1145
        server_key = write_resource("server.key")
1✔
1146
        cert_chain = write_resource("server.chain")
1✔
1147

1148
        rule_runner = RuleRunner(
1✔
1149
            rules=[QueryRule(Snapshot, [DownloadFile])],
1150
            isolated_local_store=True,
1151
            ca_certs_path=str(cert_chain),
1152
        )
1153

1154
        ssl_context = ssl.SSLContext()
1✔
1155
        ssl_context.load_cert_chain(certfile=str(server_cert), keyfile=str(server_key))
1✔
1156

1157
        with http_server(StubHandler, ssl_context=ssl_context) as port:
1✔
1158
            snapshot = rule_runner.request(
1✔
1159
                Snapshot,
1160
                [DownloadFile(f"https://localhost:{port}/file.txt", DOWNLOADS_FILE_DIGEST)],
1161
            )
1162

1163
    assert snapshot.files == ("file.txt",)
1✔
1164
    assert snapshot.digest == DOWNLOADS_EXPECTED_DIRECTORY_DIGEST
1✔
1165

1166

1167
# -----------------------------------------------------------------------------------------------
1168
# `Workspace` and `.write_digest()`
1169
# -----------------------------------------------------------------------------------------------
1170

1171

1172
def test_write_digest_scheduler(rule_runner: RuleRunner) -> None:
1✔
1173
    prime_store_with_roland_digest(rule_runner)
1✔
1174

1175
    path = Path(rule_runner.build_root, "roland")
1✔
1176
    assert not path.is_file()
1✔
1177

1178
    rule_runner.scheduler.write_digest(ROLAND_DIGEST)
1✔
1179
    assert path.is_file()
1✔
1180
    assert path.read_text() == "European Burmese"
1✔
1181

1182
    rule_runner.scheduler.write_digest(ROLAND_DIGEST, path_prefix="test/")
1✔
1183
    path = Path(rule_runner.build_root, "test/roland")
1✔
1184
    assert path.is_file()
1✔
1185
    assert path.read_text() == "European Burmese"
1✔
1186

1187

1188
def test_write_digest_workspace(rule_runner: RuleRunner) -> None:
1✔
1189
    workspace = Workspace(rule_runner.scheduler, _enforce_effects=False)
1✔
1190
    digest = rule_runner.request(
1✔
1191
        Digest,
1192
        [CreateDigest([FileContent("a.txt", b"hello"), FileContent("subdir/b.txt", b"goodbye")])],
1193
    )
1194

1195
    path1 = Path(rule_runner.build_root, "a.txt")
1✔
1196
    path2 = Path(rule_runner.build_root, "subdir/b.txt")
1✔
1197
    assert not path1.is_file()
1✔
1198
    assert not path2.is_file()
1✔
1199

1200
    workspace.write_digest(digest)
1✔
1201
    assert path1.is_file()
1✔
1202
    assert path2.is_file()
1✔
1203
    assert path1.read_text() == "hello"
1✔
1204
    assert path2.read_text() == "goodbye"
1✔
1205

1206
    workspace.write_digest(digest, path_prefix="prefix")
1✔
1207
    path1 = Path(rule_runner.build_root, "prefix/a.txt")
1✔
1208
    path2 = Path(rule_runner.build_root, "prefix/subdir/b.txt")
1✔
1209
    assert path1.is_file()
1✔
1210
    assert path2.is_file()
1✔
1211
    assert path1.read_text() == "hello"
1✔
1212
    assert path2.read_text() == "goodbye"
1✔
1213

1214

1215
def test_write_digest_workspace_clear_paths(rule_runner: RuleRunner) -> None:
1✔
1216
    workspace = Workspace(rule_runner.scheduler, _enforce_effects=False)
1✔
1217
    digest_a = rule_runner.request(
1✔
1218
        Digest,
1219
        [CreateDigest([FileContent("newdir/a.txt", b"hello")])],
1220
    )
1221
    digest_b = rule_runner.request(
1✔
1222
        Digest,
1223
        [CreateDigest([FileContent("newdir/b.txt", b"goodbye")])],
1224
    )
1225
    digest_c = rule_runner.request(
1✔
1226
        Digest,
1227
        [CreateDigest([FileContent("newdir/c.txt", b"hello again")])],
1228
    )
1229
    digest_c_root = rule_runner.request(
1✔
1230
        Digest, [CreateDigest([FileContent("c.txt", b"hello again")])]
1231
    )
1232
    digest_d = rule_runner.request(
1✔
1233
        Digest, [CreateDigest([SymlinkEntry("newdir/d.txt", "newdir/a.txt")])]
1234
    )
1235
    all_paths = {name: Path(rule_runner.build_root, f"newdir/{name}.txt") for name in "abcd"}
1✔
1236

1237
    def check(expected_names: set[str]) -> None:
1✔
1238
        for name, path in all_paths.items():
1✔
1239
            expected = name in expected_names
1✔
1240
            assert path.exists() == expected
1✔
1241

1242
    workspace.write_digest(digest_a, clear_paths=())
1✔
1243
    workspace.write_digest(digest_b, clear_paths=())
1✔
1244
    check({"a", "b"})
1✔
1245

1246
    # clear a file
1247
    workspace.write_digest(digest_d, clear_paths=("newdir/b.txt",))
1✔
1248
    check({"a", "d"})
1✔
1249

1250
    # clear a symlink (doesn't remove target file)
1251
    workspace.write_digest(digest_b, clear_paths=("newdir/d.txt",))
1✔
1252
    check({"a", "b"})
1✔
1253

1254
    # clear a directory
1255
    workspace.write_digest(digest_c, clear_paths=("newdir",))
1✔
1256
    check({"c"})
1✔
1257

1258
    # path prefix, and clearing the 'current' directory
1259
    workspace.write_digest(digest_c_root, path_prefix="newdir", clear_paths=("",))
1✔
1260
    check({"c"})
1✔
1261

1262
    # clear multiple paths
1263
    workspace.write_digest(digest_b, clear_paths=())
1✔
1264
    check({"b", "c"})
1✔
1265
    workspace.write_digest(digest_a, clear_paths=("newdir/b.txt", "newdir/c.txt"))
1✔
1266
    check({"a"})
1✔
1267

1268
    # clearing non-existent paths is fine
1269
    workspace.write_digest(
1✔
1270
        digest_b, clear_paths=("not-here", "newdir/not-here", "not-here/also-not-here")
1271
    )
1272
    check({"a", "b"})
1✔
1273

1274

1275
@dataclass(frozen=True)
1✔
1276
class DigestRequest:
1✔
1277
    create_digest: CreateDigest
1✔
1278

1279

1280
class WorkspaceGoalSubsystem(GoalSubsystem):
1✔
1281
    name = "workspace-goal"
1✔
1282

1283

1284
class WorkspaceGoal(Goal):
1✔
1285
    subsystem_cls = WorkspaceGoalSubsystem
1✔
1286
    environment_behavior = Goal.EnvironmentBehavior.LOCAL_ONLY
1✔
1287

1288

1289
def test_workspace_in_goal_rule() -> None:
1✔
1290
    @rule
1✔
1291
    async def digest_request_singleton() -> DigestRequest:
1✔
1292
        fc = FileContent(path="a.txt", content=b"hello")
×
1293
        return DigestRequest(CreateDigest([fc]))
×
1294

1295
    @goal_rule
1✔
1296
    async def workspace_goal_rule(
1✔
1297
        console: Console, workspace: Workspace, digest_request: DigestRequest
1298
    ) -> WorkspaceGoal:
1299
        snapshot = await digest_to_snapshot(
×
1300
            **implicitly({digest_request.create_digest: CreateDigest})
1301
        )
1302
        workspace.write_digest(snapshot.digest)
×
1303
        console.print_stdout(snapshot.files[0], end="")
×
1304
        return WorkspaceGoal(exit_code=0)
×
1305

1306
    rule_runner = RuleRunner(rules=[workspace_goal_rule, digest_request_singleton])
1✔
1307
    result = rule_runner.run_goal_rule(WorkspaceGoal)
1✔
1308
    assert result.exit_code == 0
1✔
1309
    assert result.stdout == "a.txt"
1✔
1310
    assert Path(rule_runner.build_root, "a.txt").read_text() == "hello"
1✔
1311

1312

1313
# -----------------------------------------------------------------------------------------------
1314
# Invalidation of the FS
1315
# -----------------------------------------------------------------------------------------------
1316

1317

1318
def test_invalidated_after_rewrite(rule_runner: RuleRunner) -> None:
1✔
1319
    """Test that updating files causes invalidation of previous operations on those files."""
1320
    setup_fs_test_tar(rule_runner)
1✔
1321

1322
    def read_file() -> str:
1✔
1323
        digest_contents = rule_runner.request(DigestContents, [PathGlobs(["4.txt"])])
1✔
1324
        assert len(digest_contents) == 1
1✔
1325
        return digest_contents[0].content.decode()
1✔
1326

1327
    # First read the file, which should cache it.
1328
    assert read_file() == "four\n"
1✔
1329

1330
    new_value = "cuatro\n"
1✔
1331
    Path(rule_runner.build_root, "4.txt").write_text(new_value)
1✔
1332
    assert try_with_backoff(lambda: read_file() == new_value)
1✔
1333

1334

1335
def test_invalidated_after_parent_deletion(rule_runner: RuleRunner) -> None:
1✔
1336
    """Test that FileContent is invalidated after deleting the parent directory."""
1337
    setup_fs_test_tar(rule_runner)
1✔
1338

1339
    def read_file() -> str | None:
1✔
1340
        digest_contents = rule_runner.request(DigestContents, [PathGlobs(["a/b/1.txt"])])
1✔
1341
        if not digest_contents:
1✔
1342
            return None
1✔
1343
        assert len(digest_contents) == 1
1✔
1344
        return digest_contents[0].content.decode()
1✔
1345

1346
    # Read the original file so that we have nodes to invalidate.
1347
    assert read_file() == "one\n"
1✔
1348

1349
    shutil.rmtree(Path(rule_runner.build_root, "a/b"))
1✔
1350
    assert try_with_backoff((lambda: read_file() is None), count=10)
1✔
1351

1352

1353
def test_invalidated_after_child_deletion(rule_runner: RuleRunner) -> None:
1✔
1354
    setup_fs_test_tar(rule_runner)
1✔
1355
    original_snapshot = rule_runner.request(Snapshot, [PathGlobs(["a/*"])])
1✔
1356
    assert original_snapshot.files == ("a/3.txt", "a/4.txt.ln")
1✔
1357
    assert original_snapshot.dirs == ("a", "a/b")
1✔
1358

1359
    Path(rule_runner.build_root, "a/3.txt").unlink()
1✔
1360

1361
    def is_changed_snapshot() -> bool:
1✔
1362
        new_snapshot = rule_runner.request(Snapshot, [PathGlobs(["a/*"])])
1✔
1363
        return (
1✔
1364
            new_snapshot.digest != original_snapshot.digest
1365
            and new_snapshot.files == ("a/4.txt.ln",)
1366
            and new_snapshot.dirs == ("a", "a/b")
1367
        )
1368

1369
    assert try_with_backoff(is_changed_snapshot)
1✔
1370

1371

1372
def test_invalidated_after_new_child(rule_runner: RuleRunner) -> None:
1✔
1373
    setup_fs_test_tar(rule_runner)
1✔
1374
    original_snapshot = rule_runner.request(Snapshot, [PathGlobs(["a/*"])])
1✔
1375
    assert original_snapshot.files == ("a/3.txt", "a/4.txt.ln")
1✔
1376
    assert original_snapshot.dirs == ("a", "a/b")
1✔
1377

1378
    Path(rule_runner.build_root, "a/new_file.txt").write_text("new file")
1✔
1379

1380
    def is_changed_snapshot() -> bool:
1✔
1381
        new_snapshot = rule_runner.request(Snapshot, [PathGlobs(["a/*"])])
1✔
1382
        return (
1✔
1383
            new_snapshot.digest != original_snapshot.digest
1384
            and new_snapshot.files == ("a/3.txt", "a/4.txt.ln", "a/new_file.txt")
1385
            and new_snapshot.dirs == ("a", "a/b")
1386
        )
1387

1388
    assert try_with_backoff(is_changed_snapshot)
1✔
1389

1390

1391
# -----------------------------------------------------------------------------------------------
1392
# Native types
1393
# -----------------------------------------------------------------------------------------------
1394

1395

1396
@pytest.mark.parametrize("digest_cls", (Digest, FileDigest))
1✔
1397
def test_digest_properties(digest_cls: type) -> None:
1✔
1398
    digest = digest_cls("a" * 64, 1000)
1✔
1399
    assert digest.fingerprint == "a" * 64
1✔
1400
    assert digest.serialized_bytes_length == 1000
1✔
1401

1402

1403
@pytest.mark.parametrize("digest_cls,cls_name", ((Digest, "Digest"), (FileDigest, "FileDigest")))
1✔
1404
def test_digest_repr(digest_cls: type, cls_name: str) -> None:
1✔
1405
    assert str(digest_cls("a" * 64, 1)) == f"{cls_name}({repr('a' * 64)}, 1)"
1✔
1406

1407

1408
@pytest.mark.parametrize("digest_cls", (Digest, FileDigest))
1✔
1409
def test_digest_hash(digest_cls: type) -> None:
1✔
1410
    assert hash(digest_cls("a" * 64, 1)) == -6148914691236517206
1✔
1411
    assert hash(digest_cls("b" * 64, 1)) == -4919131752989213765
1✔
1412
    # Note that the size bytes is not considered in the hash.
1413
    assert hash(digest_cls("a" * 64, 1000)) == -6148914691236517206
1✔
1414

1415

1416
@pytest.mark.parametrize("digest_cls", (Digest, FileDigest))
1✔
1417
def test_digest_equality(digest_cls) -> None:
1✔
1418
    digest = digest_cls("a" * 64, 1)
1✔
1419
    assert digest == digest_cls("a" * 64, 1)
1✔
1420
    assert digest != digest_cls("a" * 64, 1000)
1✔
1421
    assert digest != digest_cls("0" * 64, 1)
1✔
1422
    with pytest.raises(TypeError):
1✔
1423
        digest < digest
1✔
1424

1425

1426
def test_digest_is_not_file_digest() -> None:
1✔
1427
    assert Digest("a" * 64, 1) != FileDigest("a" * 64, 1)
1✔
1428

1429

1430
def test_snapshot_properties() -> None:
1✔
1431
    snapshot = Snapshot.create_for_testing(["f.ext", "dir/f.ext"], ["dir"])
1✔
1432
    assert snapshot.digest is not None
1✔
1433
    assert snapshot.files == ("dir/f.ext", "f.ext")
1✔
1434
    assert snapshot.dirs == ("dir",)
1✔
1435

1436

1437
def test_snapshot_hash_and_eq() -> None:
1✔
1438
    one = Snapshot.create_for_testing(["f.ext"], ["dir"])
1✔
1439
    two = Snapshot.create_for_testing(["f.ext"], ["dir"])
1✔
1440
    assert hash(one) == hash(two)
1✔
1441
    assert one == two
1✔
1442
    three = Snapshot.create_for_testing(["f.ext"], [])
1✔
1443
    assert hash(two) != hash(three)
1✔
1444
    assert two != three
1✔
1445

1446

1447
@pytest.mark.parametrize(
1✔
1448
    "before, after, expected_diff",
1449
    [
1450
        ({"pants.txt": "relaxed fit"}, {"pants.txt": "relaxed fit"}, SnapshotDiff()),
1451
        (
1452
            {"pants.txt": "relaxed fit"},
1453
            {"pants.txt": "slim fit"},
1454
            SnapshotDiff(
1455
                changed_files=("pants.txt",),
1456
            ),
1457
        ),
1458
        (
1459
            {
1460
                "levis/501.txt": "original",
1461
                "levis/jeans/511": "slim fit",
1462
                "wrangler/cowboy_cut.txt": "performance",
1463
            },
1464
            {},
1465
            SnapshotDiff(
1466
                our_unique_dirs=("levis", "wrangler"),
1467
            ),
1468
        ),
1469
        (
1470
            {
1471
                "levis/501.txt": "original",
1472
                "levis/jeans/511": "slim fit",
1473
                "levis/chinos/502": "taper fit",
1474
                "wrangler/cowboy_cut.txt": "performance",
1475
            },
1476
            {
1477
                "levis/501.txt": "slim",
1478
                "levis/jeans/511": "slim fit",
1479
                "wrangler/authentics.txt": "relaxed",
1480
            },
1481
            SnapshotDiff(
1482
                our_unique_dirs=("levis/chinos",),
1483
                our_unique_files=("wrangler/cowboy_cut.txt",),
1484
                their_unique_files=("wrangler/authentics.txt",),
1485
                changed_files=("levis/501.txt",),
1486
            ),
1487
        ),
1488
        # Same name, but one is a file and one is a dir
1489
        (
1490
            {"duluth/pants.txt": "5-Pocket"},
1491
            {"duluth": "DuluthFlex"},
1492
            SnapshotDiff(our_unique_dirs=("duluth",), their_unique_files=("duluth",)),
1493
        ),
1494
    ],
1495
)
1496
def test_snapshot_diff(
1✔
1497
    rule_runner: RuleRunner,
1498
    before: dict[str, str],
1499
    after: dict[str, str],
1500
    expected_diff: SnapshotDiff,
1501
) -> None:
1502
    diff = SnapshotDiff.from_snapshots(
1✔
1503
        rule_runner.make_snapshot(before), rule_runner.make_snapshot(after)
1504
    )
1505

1506
    assert diff.our_unique_files == expected_diff.our_unique_files
1✔
1507
    assert diff.our_unique_dirs == expected_diff.our_unique_dirs
1✔
1508
    assert diff.their_unique_files == expected_diff.their_unique_files
1✔
1509
    assert diff.their_unique_dirs == expected_diff.their_unique_dirs
1✔
1510
    assert diff.changed_files == expected_diff.changed_files
1✔
1511

1512
    # test with the arguments reversed
1513
    diff = SnapshotDiff.from_snapshots(
1✔
1514
        rule_runner.make_snapshot(after), rule_runner.make_snapshot(before)
1515
    )
1516

1517
    assert diff.our_unique_files == expected_diff.their_unique_files
1✔
1518
    assert diff.our_unique_dirs == expected_diff.their_unique_dirs
1✔
1519
    assert diff.their_unique_files == expected_diff.our_unique_files
1✔
1520
    assert diff.their_unique_dirs == expected_diff.our_unique_dirs
1✔
1521
    assert diff.changed_files == expected_diff.changed_files
1✔
1522

1523

1524
def retry_failed_assertions(
1✔
1525
    callable: Callable[[], Any], n: int, sleep_duration: float = 0.05
1526
) -> None:
1527
    """Retry the callable if any assertions failed.
1528

1529
    This is used to handle any failures resulting from an external system not fully processing
1530
    certain events as expected.
1531
    """
1532
    last_exception: BaseException | None = None
1✔
1533

1534
    while n > 0:
1✔
1535
        try:
1✔
1536
            callable()
1✔
1537
            return
1✔
UNCOV
1538
        except AssertionError as e:
×
UNCOV
1539
            last_exception = e
×
UNCOV
1540
            n -= 1
×
UNCOV
1541
            time.sleep(sleep_duration)
×
UNCOV
1542
            sleep_duration *= 2
×
1543

1544
    assert last_exception is not None
×
1545
    raise last_exception
×
1546

1547

1548
def test_path_metadata_request_inside_buildroot(rule_runner: RuleRunner) -> None:
1✔
1549
    rule_runner.write_files(
1✔
1550
        {
1551
            "foo": b"xyzzy",
1552
            "sub-dir/bar": b"12345",
1553
        }
1554
    )
1555
    os.symlink("foo", os.path.join(rule_runner.build_root, "bar"))
1✔
1556

1557
    def get_metadata(path: str) -> PathMetadata | None:
1✔
1558
        result = rule_runner.request(PathMetadataResult, [PathMetadataRequest(path)])
1✔
1559
        return result.metadata
1✔
1560

1561
    m1 = get_metadata("foo")
1✔
1562
    assert m1 is not None
1✔
1563
    assert m1.path == "foo"
1✔
1564
    assert m1.kind == PathMetadataKind.FILE
1✔
1565
    assert m1.length == len(b"xyzzy")
1✔
1566
    assert m1.symlink_target is None
1✔
1567

1568
    m2 = get_metadata("not-found")
1✔
1569
    assert m2 is None
1✔
1570
    (Path(rule_runner.build_root) / "not-found").write_bytes(b"is found")
1✔
1571

1572
    def check_metadata_exists() -> None:
1✔
1573
        m3 = get_metadata("not-found")
1✔
1574
        assert m3 is not None
1✔
1575

1576
    retry_failed_assertions(check_metadata_exists, 3)
1✔
1577

1578
    m4 = get_metadata("bar")
1✔
1579
    assert m4 is not None
1✔
1580
    assert m4.path == "bar"
1✔
1581
    assert m4.kind == PathMetadataKind.SYMLINK
1✔
1582
    assert m4.length == 3
1✔
1583
    assert m4.symlink_target == "foo"
1✔
1584

1585
    m5 = get_metadata("sub-dir")
1✔
1586
    assert m5 is not None
1✔
1587
    assert m5.path == "sub-dir"
1✔
1588
    assert m5.kind == PathMetadataKind.DIRECTORY
1✔
1589
    assert m5.symlink_target is None
1✔
1590

1591

1592
def test_path_metadata_request_outside_buildroot(rule_runner: RuleRunner) -> None:
1✔
1593
    with temporary_dir() as tmpdir:
1✔
1594
        assert not tmpdir.startswith(rule_runner.build_root)
1✔
1595

1596
        def get_metadata(path: str) -> PathMetadata | None:
1✔
1597
            result = rule_runner.request(
1✔
1598
                PathMetadataResult,
1599
                [PathMetadataRequest(os.path.join(tmpdir, path), PathNamespace.SYSTEM)],
1600
            )
1601
            return result.metadata
1✔
1602

1603
        base_path = Path(tmpdir)
1✔
1604
        (base_path / "foo").write_bytes(b"xyzzy")
1✔
1605
        (base_path / "sub-dir").mkdir(parents=True)
1✔
1606
        (base_path / "sub-dir" / "bar").write_bytes(b"12345")
1✔
1607
        os.symlink("foo", os.path.join(base_path, "bar"))
1✔
1608

1609
        m1 = get_metadata("foo")
1✔
1610
        assert m1 is not None
1✔
1611
        assert m1.path == str(base_path / "foo")
1✔
1612
        assert m1.kind == PathMetadataKind.FILE
1✔
1613
        assert m1.length == len(b"xyzzy")
1✔
1614
        assert m1.symlink_target is None
1✔
1615

1616
        m2 = get_metadata("not-found")
1✔
1617
        assert m2 is None
1✔
1618

1619
        m4 = get_metadata("bar")
1✔
1620
        assert m4 is not None
1✔
1621
        assert m4.path == str(base_path / "bar")
1✔
1622
        assert m4.kind == PathMetadataKind.SYMLINK
1✔
1623
        assert m4.length == 3
1✔
1624
        assert m4.symlink_target == "foo"
1✔
1625

1626
        m5 = get_metadata("sub-dir")
1✔
1627
        assert m5 is not None
1✔
1628
        assert m5.path == str(base_path / "sub-dir")
1✔
1629
        assert m5.kind == PathMetadataKind.DIRECTORY
1✔
1630
        assert m5.symlink_target is None
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc