• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

psf / black / 7692220850

29 Jan 2024 06:46AM UTC coverage: 96.45%. Remained the same
7692220850

Pull #4192

github

web-flow
Bump peter-evans/create-or-update-comment from 3.1.0 to 4.0.0

Bumps [peter-evans/create-or-update-comment](https://github.com/peter-evans/create-or-update-comment) from 3.1.0 to 4.0.0.
- [Release notes](https://github.com/peter-evans/create-or-update-comment/releases)
- [Commits](https://github.com/peter-evans/create-or-update-comment/compare/23ff15729...71345be02)

---
updated-dependencies:
- dependency-name: peter-evans/create-or-update-comment
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Pull Request #4192: Bump peter-evans/create-or-update-comment from 3.1.0 to 4.0.0

3021 of 3232 branches covered (0.0%)

7145 of 7408 relevant lines covered (96.45%)

4.82 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

94.38
/src/black/cache.py
1
"""Caching of formatted files with feature-based invalidation."""
2

3
import hashlib
5✔
4
import os
5✔
5
import pickle
5✔
6
import sys
5✔
7
import tempfile
5✔
8
from dataclasses import dataclass, field
5✔
9
from pathlib import Path
5✔
10
from typing import Dict, Iterable, NamedTuple, Set, Tuple
5✔
11

12
from platformdirs import user_cache_dir
5✔
13

14
from _black_version import version as __version__
5✔
15
from black.mode import Mode
5✔
16
from black.output import err
5✔
17

18
if sys.version_info >= (3, 11):
5✔
19
    from typing import Self
2✔
20
else:
21
    from typing_extensions import Self
3✔
22

23

24
class FileData(NamedTuple):
5✔
25
    st_mtime: float
5✔
26
    st_size: int
5✔
27
    hash: str
5✔
28

29

30
def get_cache_dir() -> Path:
5✔
31
    """Get the cache directory used by black.
32

33
    Users can customize this directory on all systems using `BLACK_CACHE_DIR`
34
    environment variable. By default, the cache directory is the user cache directory
35
    under the black application.
36

37
    This result is immediately set to a constant `black.cache.CACHE_DIR` as to avoid
38
    repeated calls.
39
    """
40
    # NOTE: Function mostly exists as a clean way to test getting the cache directory.
41
    default_cache_dir = user_cache_dir("black")
5✔
42
    cache_dir = Path(os.environ.get("BLACK_CACHE_DIR", default_cache_dir))
5✔
43
    cache_dir = cache_dir / __version__
5✔
44
    return cache_dir
5✔
45

46

47
CACHE_DIR = get_cache_dir()
5✔
48

49

50
def get_cache_file(mode: Mode) -> Path:
5✔
51
    return CACHE_DIR / f"cache.{mode.get_cache_key()}.pickle"
5✔
52

53

54
@dataclass
5✔
55
class Cache:
5✔
56
    mode: Mode
5✔
57
    cache_file: Path
5✔
58
    file_data: Dict[str, FileData] = field(default_factory=dict)
5✔
59

60
    @classmethod
5✔
61
    def read(cls, mode: Mode) -> Self:
5✔
62
        """Read the cache if it exists and is well-formed.
63

64
        If it is not well-formed, the call to write later should
65
        resolve the issue.
66
        """
67
        cache_file = get_cache_file(mode)
5✔
68
        try:
5✔
69
            exists = cache_file.exists()
5✔
70
        except OSError as e:
×
71
            # Likely file too long; see #4172 and #4174
72
            err(f"Unable to read cache file {cache_file} due to {e}")
×
73
            return cls(mode, cache_file)
×
74
        if not exists:
5✔
75
            return cls(mode, cache_file)
5✔
76

77
        with cache_file.open("rb") as fobj:
5✔
78
            try:
5✔
79
                data: Dict[str, Tuple[float, int, str]] = pickle.load(fobj)
5✔
80
                file_data = {k: FileData(*v) for k, v in data.items()}
5✔
81
            except (pickle.UnpicklingError, ValueError, IndexError):
5✔
82
                return cls(mode, cache_file)
5✔
83

84
        return cls(mode, cache_file, file_data)
5✔
85

86
    @staticmethod
5✔
87
    def hash_digest(path: Path) -> str:
5✔
88
        """Return hash digest for path."""
89

90
        data = path.read_bytes()
5✔
91
        return hashlib.sha256(data).hexdigest()
5✔
92

93
    @staticmethod
5✔
94
    def get_file_data(path: Path) -> FileData:
5✔
95
        """Return file data for path."""
96

97
        stat = path.stat()
5✔
98
        hash = Cache.hash_digest(path)
5✔
99
        return FileData(stat.st_mtime, stat.st_size, hash)
5✔
100

101
    def is_changed(self, source: Path) -> bool:
5✔
102
        """Check if source has changed compared to cached version."""
103
        res_src = source.resolve()
5✔
104
        old = self.file_data.get(str(res_src))
5✔
105
        if old is None:
5✔
106
            return True
5✔
107

108
        st = res_src.stat()
5✔
109
        if st.st_size != old.st_size:
5✔
110
            return True
5✔
111
        if st.st_mtime != old.st_mtime:
5✔
112
            new_hash = Cache.hash_digest(res_src)
5✔
113
            if new_hash != old.hash:
5✔
114
                return True
5✔
115
        return False
5✔
116

117
    def filtered_cached(self, sources: Iterable[Path]) -> Tuple[Set[Path], Set[Path]]:
5✔
118
        """Split an iterable of paths in `sources` into two sets.
119

120
        The first contains paths of files that modified on disk or are not in the
121
        cache. The other contains paths to non-modified files.
122
        """
123
        changed: Set[Path] = set()
5✔
124
        done: Set[Path] = set()
5✔
125
        for src in sources:
5✔
126
            if self.is_changed(src):
5✔
127
                changed.add(src)
5✔
128
            else:
129
                done.add(src)
5✔
130
        return changed, done
5✔
131

132
    def write(self, sources: Iterable[Path]) -> None:
5✔
133
        """Update the cache file data and write a new cache file."""
134
        self.file_data.update(
5✔
135
            **{str(src.resolve()): Cache.get_file_data(src) for src in sources}
136
        )
137
        try:
5✔
138
            CACHE_DIR.mkdir(parents=True, exist_ok=True)
5✔
139
            with tempfile.NamedTemporaryFile(
5✔
140
                dir=str(self.cache_file.parent), delete=False
141
            ) as f:
142
                # We store raw tuples in the cache because pickling NamedTuples
143
                # doesn't work with mypyc on Python 3.8, and because it's faster.
144
                data: Dict[str, Tuple[float, int, str]] = {
5✔
145
                    k: (*v,) for k, v in self.file_data.items()
146
                }
147
                pickle.dump(data, f, protocol=4)
5✔
148
            os.replace(f.name, self.cache_file)
5✔
149
        except OSError:
×
150
            pass
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc