• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

karellen / kubernator / 15126454653

20 May 2025 01:02AM UTC coverage: 76.274% (+0.03%) from 76.249%
15126454653

push

github

arcivanov
Release v1.0.20

615 of 951 branches covered (64.67%)

Branch coverage included in aggregate %.

2378 of 2973 relevant lines covered (79.99%)

3.99 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

74.65
/src/main/python/kubernator/api.py
1
# -*- coding: utf-8 -*-
2
#
3
#   Copyright 2020 Express Systems USA, Inc
4
#   Copyright 2021 Karellen, Inc.
5
#
6
#   Licensed under the Apache License, Version 2.0 (the "License");
7
#   you may not use this file except in compliance with the License.
8
#   You may obtain a copy of the License at
9
#
10
#       http://www.apache.org/licenses/LICENSE-2.0
11
#
12
#   Unless required by applicable law or agreed to in writing, software
13
#   distributed under the License is distributed on an "AS IS" BASIS,
14
#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
#   See the License for the specific language governing permissions and
16
#   limitations under the License.
17
#
18

19
import fnmatch
5✔
20
import json
5✔
21
import logging
5✔
22
import os
5✔
23
import platform
5✔
24
import re
5✔
25
import sys
5✔
26
import traceback
5✔
27
import urllib.parse
5✔
28
from collections.abc import Callable
5✔
29
from collections.abc import Iterable, MutableSet, Reversible
5✔
30
from enum import Enum
5✔
31
from hashlib import sha256
5✔
32
from io import StringIO as io_StringIO
5✔
33
from pathlib import Path
5✔
34
from shutil import rmtree
5✔
35
from subprocess import CalledProcessError
5✔
36
from types import GeneratorType
5✔
37
from typing import Optional, Union, MutableSequence
5✔
38

39
import requests
5✔
40
import yaml
5✔
41
from diff_match_patch import diff_match_patch
5✔
42
from gevent import sleep
5✔
43
from jinja2 import (Environment,
5✔
44
                    ChainableUndefined,
45
                    make_logging_undefined,
46
                    Template as JinjaTemplate,
47
                    pass_context)
48
from jsonschema import validators
5✔
49
from platformdirs import user_cache_dir
5✔
50

51
from kubernator._json_path import jp  # noqa: F401
5✔
52
from kubernator._k8s_client_patches import (URLLIB_HEADERS_PATCH,
5✔
53
                                            CUSTOM_OBJECT_PATCH_23,
54
                                            CUSTOM_OBJECT_PATCH_25)
55

56
_CACHE_HEADER_TRANSLATION = {"etag": "if-none-match",
5✔
57
                             "last-modified": "if-modified-since"}
58
_CACHE_HEADERS = ("etag", "last-modified")
5✔
59

60

61
def calling_frame_source(depth=2):
5✔
62
    f = traceback.extract_stack(limit=depth + 1)[0]
×
63
    return f"file {f.filename}, line {f.lineno} in {f.name}"
×
64

65

66
def re_filter(name: str, patterns: Iterable[re.Pattern]):
5✔
67
    for pattern in patterns:
5✔
68
        if pattern.match(name):
5✔
69
            return True
5✔
70

71

72
def to_patterns(*patterns):
5✔
73
    return [re.compile(fnmatch.translate(p)) for p in patterns]
×
74

75

76
def scan_dir(logger, path: Path, path_filter: Callable[[os.DirEntry], bool], excludes, includes):
5✔
77
    logger.debug("Scanning %s, excluding %s, including %s", path, excludes, includes)
5✔
78
    with os.scandir(path) as it:  # type: Iterable[os.DirEntry]
5✔
79
        files = {f: f for f in
5✔
80
                 sorted(d.name for d in it if path_filter(d) and not re_filter(d.name, excludes))}
81

82
    for include in includes:
5✔
83
        logger.trace("Considering include %s in %s", include, path)
5✔
84
        for f in list(files.keys()):
5✔
85
            if include.match(f):
5✔
86
                del files[f]
5✔
87
                logger.debug("Selecting %s in %s as it matches %s", f, path, include)
5✔
88
                yield path / f
5✔
89

90

91
class FileType(Enum):
5✔
92
    JSON = (json.load,)
5✔
93
    YAML = (yaml.safe_load_all,)
5✔
94

95
    def __init__(self, func):
5✔
96
        self.func = func
5✔
97

98

99
def _load_file(logger, path: Path, file_type: FileType, source=None) -> Iterable[dict]:
5✔
100
    with open(path, "rb") as f:
5✔
101
        try:
5✔
102
            data = file_type.func(f)
5✔
103
            if isinstance(data, GeneratorType):
5✔
104
                data = list(data)
5✔
105
            return data
5✔
106
        except Exception as e:
×
107
            logger.error("Failed parsing %s using %s", source or path, file_type, exc_info=e)
×
108
            raise
×
109

110

111
def _download_remote_file(url, file_name, cache: dict):
5✔
112
    retry_delay = 0
5✔
113
    while True:
4✔
114
        if retry_delay:
5✔
115
            sleep(retry_delay)
1✔
116

117
        with requests.get(url, headers=cache, stream=True) as r:
5✔
118
            if r.status_code == 429:
5✔
119
                if not retry_delay:
1!
120
                    retry_delay = 0.2
1✔
121
                else:
122
                    retry_delay *= 2.0
×
123
                if retry_delay > 2.5:
1!
124
                    retry_delay = 2.5
×
125
                continue
1✔
126

127
            r.raise_for_status()
5✔
128
            if r.status_code != 304:
5!
129
                with open(file_name, "wb") as out:
×
130
                    for chunk in r.iter_content(chunk_size=65535):
×
131
                        out.write(chunk)
×
132
                return dict(r.headers)
×
133
            else:
134
                return None
5✔
135

136

137
def get_app_cache_dir():
5✔
138
    return Path(user_cache_dir("kubernator", "karellen"))
5✔
139

140

141
def get_cache_dir(category: str, sub_category: str = None):
5✔
142
    cache_dir = get_app_cache_dir() / category
5✔
143
    if sub_category:
5!
144
        cache_dir = cache_dir / sub_category
×
145
    if not cache_dir.exists():
5✔
146
        cache_dir.mkdir(parents=True)
5✔
147

148
    return cache_dir
5✔
149

150

151
def download_remote_file(logger, url: str, category: str = "k8s", sub_category: str = None,
5✔
152
                         downloader=_download_remote_file):
153
    config_dir = get_cache_dir(category, sub_category)
5✔
154

155
    file_name = config_dir / sha256(url.encode("UTF-8")).hexdigest()
5✔
156
    cache_file_name = file_name.with_suffix(".cache")
5✔
157
    logger.trace("Cache file for %s is %s.cache", url, file_name)
5✔
158

159
    cache = {}
5✔
160
    if cache_file_name.exists():
5!
161
        logger.trace("Loading cache file from %s", cache_file_name)
5✔
162
        try:
5✔
163
            with open(cache_file_name, "rb") as cache_f:
5✔
164
                cache = json.load(cache_f)
5✔
165
        except (IOError, ValueError) as e:
×
166
            logger.trace("Failed loading cache file from %s (cleaning up)", cache_file_name, exc_info=e)
×
167
            cache_file_name.unlink(missing_ok=True)
×
168

169
    logger.trace("Downloading %s into %s%s", url, file_name, " (caching)" if cache else "")
5✔
170
    headers = downloader(url, file_name, cache)
5✔
171
    up_to_date = False
5✔
172
    if not headers:
5!
173
        logger.trace("File %s(%s) is up-to-date", url, file_name.name)
5✔
174
        up_to_date = True
5✔
175
    else:
176
        cache = {_CACHE_HEADER_TRANSLATION.get(k.lower(), k): v
×
177
                 for k, v in headers.items()
178
                 if k.lower() in _CACHE_HEADERS}
179

180
        logger.trace("Update cache file in %s: %r", cache_file_name, cache)
×
181
        with open(cache_file_name, "wt") as cache_f:
×
182
            json.dump(cache, cache_f)
×
183

184
    return file_name, up_to_date
5✔
185

186

187
def load_remote_file(logger, url, file_type: FileType, category: str = "k8s", sub_category: str = None,
5✔
188
                     downloader=_download_remote_file):
189
    file_name, _ = download_remote_file(logger, url, category, sub_category, downloader=downloader)
5✔
190
    logger.debug("Loading %s from %s using %s", url, file_name, file_type.name)
5✔
191
    return _load_file(logger, file_name, file_type, url)
5✔
192

193

194
def load_file(logger, path: Path, file_type: FileType, source=None) -> Iterable[dict]:
5✔
195
    logger.debug("Loading %s using %s", source or path, file_type.name)
5✔
196
    return _load_file(logger, path, file_type)
5✔
197

198

199
def validator_with_defaults(validator_class):
5✔
200
    validate_properties = validator_class.VALIDATORS["properties"]
5✔
201

202
    def set_defaults(validator, properties, instance, schema):
5✔
203
        for property, subschema in properties.items():
5✔
204
            if "default" in subschema:
5✔
205
                instance.setdefault(property, subschema["default"])
5✔
206

207
        for error in validate_properties(validator, properties, instance, schema):
5!
208
            yield error
×
209

210
    return validators.extend(validator_class, {"properties": set_defaults})
5✔
211

212

213
class _PropertyList(MutableSequence):
5✔
214

215
    def __init__(self, seq, read_parent, name):
5✔
216
        self.__read_seq = seq
5✔
217
        self.__read_parent = read_parent
5✔
218
        self.__write_parent = None
5✔
219
        self.__write_seq = None
5✔
220
        self.__name = name
5✔
221

222
    def __iter__(self):
5✔
223
        return self.__read_seq.__iter__()
5✔
224

225
    def __mul__(self, __n):
5✔
226
        return self.__read_seq.__mul__(__n)
×
227

228
    def __rmul__(self, __n):
5✔
229
        return self.__read_seq.__rmul__(__n)
×
230

231
    def __imul__(self, __n):
5✔
232
        return self.__read_seq.__imul__(__n)
×
233

234
    def __contains__(self, __o):
5✔
235
        return self.__read_seq.__contains__(__o)
×
236

237
    def __reversed__(self):
5✔
238
        return self.__read_seq.__reversed__()
5✔
239

240
    def __gt__(self, __x):
5✔
241
        return self.__read_seq.__gt__(__x)
×
242

243
    def __ge__(self, __x):
5✔
244
        return self.__read_seq.__ge__(__x)
×
245

246
    def __lt__(self, __x):
5✔
247
        return self.__read_seq.__lt__(__x)
×
248

249
    def __le__(self, __x):
5✔
250
        return self.__read_seq.__le__(__x)
×
251

252
    def __len__(self):
5✔
253
        return self.__read_seq.__len__()
5✔
254

255
    def count(self, __value):
5✔
256
        return self.__read_seq.count(__value)
×
257

258
    def copy(self):
5✔
259
        while True:
260
            try:
×
261
                return self.__write_seq.copy()
×
262
            except AttributeError:
×
263
                self.__clone()
×
264

265
    def __getitem__(self, __i):
5✔
266
        return self.__read_seq.__getitem__(__i)
5✔
267

268
    def append(self, __object):
5✔
269
        while True:
4✔
270
            try:
5✔
271
                return self.__write_seq.append(__object)
5✔
272
            except AttributeError:
5✔
273
                self.__clone()
5✔
274

275
    def extend(self, __iterable):
5✔
276
        while True:
277
            try:
×
278
                return self.__write_seq.extend(__iterable)
×
279
            except AttributeError:
×
280
                self.__clone()
×
281

282
    def pop(self, __index=None):
5✔
283
        while True:
284
            try:
×
285
                return self.__write_seq.pop(__index)
×
286
            except AttributeError:
×
287
                self.__clone()
×
288

289
    def insert(self, __index, __object):
5✔
290
        while True:
291
            try:
×
292
                return self.__write_seq.insert(__index, __object)
×
293
            except AttributeError:
×
294
                self.__clone()
×
295

296
    def remove(self, __value):
5✔
297
        while True:
298
            try:
×
299
                return self.__write_seq.remove(__value)
×
300
            except AttributeError:
×
301
                self.__clone()
×
302

303
    def sort(self, *, key=None, reverse=False):
5✔
304
        while True:
305
            try:
×
306
                return self.__write_seq.sort(key=key, reverse=reverse)
×
307
            except AttributeError:
×
308
                self.__clone()
×
309

310
    def __setitem__(self, __i, __o):
5✔
311
        while True:
312
            try:
×
313
                return self.__write_seq.__setitem__(__i, __o)
×
314
            except AttributeError:
×
315
                self.__clone()
×
316

317
    def __delitem__(self, __i):
5✔
318
        while True:
319
            try:
×
320
                return self.__write_seq.__delitem__(__i)
×
321
            except AttributeError:
×
322
                self.__clone()
×
323

324
    def __add__(self, __x):
5✔
325
        while True:
326
            try:
×
327
                return self.__write_seq.__add__(__x)
×
328
            except AttributeError:
×
329
                self.__clone()
×
330

331
    def __iadd__(self, __x):
5✔
332
        while True:
333
            try:
×
334
                return self.__write_seq.__iadd__(__x)
×
335
            except AttributeError:
×
336
                self.__clone()
×
337

338
    def clear(self):
5✔
339
        while True:
340
            try:
×
341
                return self.__write_seq.clear()
×
342
            except AttributeError:
×
343
                self.__clone()
×
344

345
    def reverse(self):
5✔
346
        while True:
347
            try:
×
348
                return self.__write_seq.reverse()
×
349
            except AttributeError:
×
350
                self.__clone()
×
351

352
    def __clone(self):
5✔
353
        if self.__read_parent == self.__write_parent:
5✔
354
            self.__write_seq = self.__read_seq
5✔
355
        else:
356
            self.__write_seq = self.__read_seq.copy()
5✔
357
            self.__read_seq = self.__write_seq
5✔
358

359
            setattr(self.__write_parent, self.__name, self.__write_seq)
5✔
360

361

362
class PropertyDict:
5✔
363
    def __init__(self, _dict=None, _parent=None):
5✔
364
        self.__dict__["_PropertyDict__dict"] = _dict or {}
5✔
365
        self.__dict__["_PropertyDict__parent"] = _parent
5✔
366

367
    def __getattr__(self, item):
5✔
368
        v = self.__getattr(item)
5✔
369
        if isinstance(v, _PropertyList):
5✔
370
            v._PropertyList__write_parent = self
5✔
371
        return v
5✔
372

373
    def __getattr(self, item):
5✔
374
        try:
5✔
375
            v = self.__dict[item]
5✔
376
            if isinstance(v, list):
5✔
377
                v = _PropertyList(v, self, item)
5✔
378
            return v
5✔
379
        except KeyError:
5✔
380
            parent = self.__parent
5✔
381
            if parent is not None:
5!
382
                return parent.__getattr(item)
5✔
383
            raise AttributeError("no attribute %r" % item) from None
×
384

385
    def __setattr__(self, key, value):
5✔
386
        if key.startswith("_PropertyDict__"):
5!
387
            raise AttributeError("prohibited attribute %r" % key)
×
388
        if isinstance(value, dict):
5✔
389
            parent_dict = None
5✔
390
            if self.__parent is not None:
5✔
391
                try:
5✔
392
                    parent_dict = self.__parent.__getattr__(key)
5✔
393
                    if not isinstance(parent_dict, PropertyDict):
5!
394
                        raise ValueError("cannot override a scalar with a synthetic object for attribute %s", key)
×
395
                except AttributeError:
×
396
                    pass
×
397
            value = PropertyDict(value, _parent=parent_dict)
5✔
398
        self.__dict[key] = value
5✔
399

400
    def __delattr__(self, item):
5✔
401
        del self.__dict[item]
5✔
402

403
    def __len__(self):
5✔
404
        return len(self.__dir__())
×
405

406
    def __getitem__(self, item):
5✔
407
        return self.__dict.__getitem__(item)
×
408

409
    def __setitem__(self, key, value):
5✔
410
        self.__dict.__setitem__(key, value)
×
411

412
    def __delitem__(self, key):
5✔
413
        self.__dict.__delitem__(key)
×
414

415
    def __contains__(self, item):
5✔
416
        try:
5✔
417
            self.__dict[item]
5✔
418
            return True
5✔
419
        except KeyError:
5✔
420
            parent = self.__parent
5✔
421
            if parent is not None:
5✔
422
                return parent.__contains__(item)
5✔
423
            return False
5✔
424

425
    def __dir__(self) -> Iterable[str]:
5✔
426
        result: set[str] = set()
×
427
        result.update(self.__dict.keys())
×
428
        if self.__parent is not None:
×
429
            result.update(self.__parent.__dir__())
×
430
        return result
×
431

432
    def __repr__(self):
5✔
433
        return "PropertyDict[%r]" % self.__dict
×
434

435

436
def config_parent(config: PropertyDict):
5✔
437
    return config._PropertyDict__parent
×
438

439

440
def config_as_dict(config: PropertyDict):
5✔
441
    return {k: config[k] for k in dir(config)}
×
442

443

444
def config_get(config: PropertyDict, key: str, default=None):
5✔
445
    try:
×
446
        return config[key]
×
447
    except KeyError:
×
448
        return default
×
449

450

451
class Globs(MutableSet[Union[str, re.Pattern]]):
5✔
452
    def __init__(self, source: Optional[list[Union[str, re.Pattern]]] = None,
5✔
453
                 immutable=False):
454
        self._immutable = immutable
5✔
455
        if source:
5!
456
            self._list = [self.__wrap__(v) for v in source]
5✔
457
        else:
458
            self._list = []
×
459

460
    def __wrap__(self, item: Union[str, re.Pattern]):
5✔
461
        if isinstance(item, re.Pattern):
5✔
462
            return item
5✔
463
        return re.compile(fnmatch.translate(item))
5✔
464

465
    def __contains__(self, item: Union[str, re.Pattern]):
5✔
466
        return self._list.__contains__(self.__wrap__(item))
×
467

468
    def __iter__(self):
5✔
469
        return self._list.__iter__()
5✔
470

471
    def __len__(self):
5✔
472
        return self._list.__len__()
5✔
473

474
    def add(self, value: Union[str, re.Pattern]):
5✔
475
        if self._immutable:
5!
476
            raise RuntimeError("immutable")
×
477

478
        _list = self._list
5✔
479
        value = self.__wrap__(value)
5✔
480
        if value not in _list:
5!
481
            _list.append(value)
5✔
482

483
    def extend(self, values: Iterable[Union[str, re.Pattern]]):
5✔
484
        for v in values:
×
485
            self.add(v)
×
486

487
    def discard(self, value: Union[str, re.Pattern]):
5✔
488
        if self._immutable:
5!
489
            raise RuntimeError("immutable")
×
490

491
        _list = self._list
5✔
492
        value = self.__wrap__(value)
5✔
493
        if value in _list:
5!
494
            _list.remove(value)
5✔
495

496
    def add_first(self, value: Union[str, re.Pattern]):
5✔
497
        if self._immutable:
×
498
            raise RuntimeError("immutable")
×
499

500
        _list = self._list
×
501
        value = self.__wrap__(value)
×
502
        if value not in _list:
×
503
            _list.insert(0, value)
×
504

505
    def extend_first(self, values: Reversible[Union[str, re.Pattern]]):
5✔
506
        for v in reversed(values):
×
507
            self.add_first(v)
×
508

509
    def __str__(self):
5✔
510
        return self._list.__str__()
5✔
511

512
    def __repr__(self):
5✔
513
        return f"Globs[{self._list}]"
×
514

515

516
class TemplateEngine:
5✔
517
    VARIABLE_START_STRING = "{${"
5✔
518
    VARIABLE_END_STRING = "}$}"
5✔
519

520
    def __init__(self, logger):
5✔
521
        self.template_failures = 0
5✔
522
        self.templates = {}
5✔
523

524
        class CollectingUndefined(ChainableUndefined):
5✔
525
            __slots__ = ()
5✔
526

527
            def __str__(self):
5✔
528
                self.template_failures += 1
×
529
                return super().__str__()
×
530

531
        logging_undefined = make_logging_undefined(
5✔
532
            logger=logger,
533
            base=CollectingUndefined
534
        )
535

536
        @pass_context
5✔
537
        def variable_finalizer(ctx, value):
5✔
538
            normalized_value = str(value)
5✔
539
            if self.VARIABLE_START_STRING in normalized_value and self.VARIABLE_END_STRING in normalized_value:
5✔
540
                value_template_content = sys.intern(normalized_value)
5✔
541
                env: Environment = ctx.environment
5✔
542
                value_template = self.templates.get(value_template_content)
5✔
543
                if not value_template:
5!
544
                    value_template = env.from_string(value_template_content, env.globals)
5✔
545
                    self.templates[value_template_content] = value_template
5✔
546
                return value_template.render(ctx.parent)
5✔
547

548
            return normalized_value
5✔
549

550
        self.env = Environment(variable_start_string=self.VARIABLE_START_STRING,
5✔
551
                               variable_end_string=self.VARIABLE_END_STRING,
552
                               autoescape=False,
553
                               finalize=variable_finalizer,
554
                               undefined=logging_undefined)
555

556
    def from_string(self, template):
5✔
557
        return self.env.from_string(template)
5✔
558

559
    def failures(self):
5✔
560
        return self.template_failures
5✔
561

562

563
class Template:
5✔
564
    def __init__(self, name: str, template: JinjaTemplate, defaults: dict = None, path=None, source=None):
5✔
565
        self.name = name
5✔
566
        self.source = source
5✔
567
        self.path = path
5✔
568
        self.template = template
5✔
569
        self.defaults = defaults
5✔
570

571
    def render(self, context: dict, values: dict):
5✔
572
        variables = {"ktor": context,
5✔
573
                     "values": (self.defaults or {}) | values}
574
        return self.template.render(variables)
5✔
575

576

577
class StringIO:
5✔
578
    def __init__(self, trimmed=True):
5✔
579
        self.write = self.write_trimmed if trimmed else self.write_untrimmed
5✔
580
        self._buf = io_StringIO()
5✔
581

582
    def write_untrimmed(self, line):
5✔
583
        self._buf.write(line)
5✔
584

585
    def write_trimmed(self, line):
5✔
586
        self._buf.write(f"{line}\n")
×
587

588
    def getvalue(self):
5✔
589
        return self._buf.getvalue()
5✔
590

591

592
class StripNL:
5✔
593
    def __init__(self, func):
5✔
594
        self._func = func
5✔
595

596
    def __call__(self, line: str):
5✔
597
        return self._func(line.rstrip("\r\n"))
5✔
598

599

600
def log_level_to_verbosity_count(level: int):
5✔
601
    return int(-level / 10 + 6)
5✔
602

603

604
def clone_url_str(url):
5✔
605
    return urllib.parse.urlunsplit(url[:3] + ("", ""))  # no query or fragment
5✔
606

607

608
def prepend_os_path(path):
5✔
609
    path = str(path)
5✔
610
    paths = os.environ["PATH"].split(os.pathsep)
5✔
611
    if path not in paths:
5!
612
        paths.insert(0, path)
5✔
613
        os.environ["PATH"] = os.pathsep.join(paths)
5✔
614
        return True
5✔
615
    return False
×
616

617

618
_GOLANG_MACHINE = platform.machine().lower()
5✔
619
if _GOLANG_MACHINE == "x86_64":
5!
620
    _GOLANG_MACHINE = "amd64"
5✔
621

622
_GOLANG_OS = platform.system().lower()
5✔
623

624

625
def get_golang_machine():
5✔
626
    return _GOLANG_MACHINE
5✔
627

628

629
def get_golang_os():
5✔
630
    return _GOLANG_OS
5✔
631

632

633
def sha256_file_digest(path):
5✔
634
    h = sha256()
×
635
    with open(path, "rb") as f:
×
636
        h.update(f.read(65535))
×
637
    return h.hexdigest()
×
638

639

640
class Repository:
5✔
641
    logger = logging.getLogger("kubernator.repository")
5✔
642
    git_logger = logger.getChild("git")
5✔
643

644
    def __init__(self, repo, cred_aug=None):
5✔
645
        repo = str(repo)  # in case this is a Path
5✔
646
        url = urllib.parse.urlsplit(repo)
5✔
647

648
        if not url.scheme and not url.netloc and Path(url.path).exists():
5!
649
            url = url._replace(scheme="file")  # In case it's a local repository
5✔
650

651
        self.url = url
5✔
652
        self.url_str = urllib.parse.urlunsplit(url[:4] + ("",))
5✔
653
        self._cred_aug = cred_aug
5✔
654
        self._hash_obj = (url.hostname if url.username or url.password else url.netloc,
5✔
655
                          url.path,
656
                          url.query)
657

658
        self.clone_url = None  # Actual URL components used in cloning operations
5✔
659
        self.clone_url_str = None  # Actual URL string used in cloning operations
5✔
660
        self.ref = None
5✔
661
        self.local_dir = None
5✔
662

663
    def __eq__(self, o: object) -> bool:
5✔
664
        if isinstance(o, Repository):
×
665
            return self._hash_obj == o._hash_obj
×
666

667
    def __hash__(self) -> int:
5✔
668
        return hash(self._hash_obj)
5✔
669

670
    def init(self, logger, context):
5✔
671
        run = context.app.run
5✔
672
        run_capturing_out = context.app.run_capturing_out
5✔
673

674
        url = self.url
5✔
675
        if self._cred_aug:
5!
676
            url = self._cred_aug(url)
5✔
677

678
        self.clone_url = url
5✔
679
        self.clone_url_str = clone_url_str(url)
5✔
680

681
        query = urllib.parse.parse_qs(self.url.query)
5✔
682
        ref = query.get("ref")
5✔
683
        if ref:
5✔
684
            self.ref = ref[0]
5✔
685

686
        config_dir = get_cache_dir("git")
5✔
687

688
        git_cache = config_dir / sha256(self.clone_url_str.encode("UTF-8")).hexdigest()
5✔
689

690
        if git_cache.exists() and git_cache.is_dir() and (git_cache / ".git").exists():
5✔
691
            try:
5✔
692
                run(["git", "status"], None, None, cwd=git_cache).wait()
5✔
693
            except CalledProcessError:
×
694
                rmtree(git_cache)
×
695

696
        self.local_dir = git_cache
5✔
697

698
        stdout_logger = StripNL(self.git_logger.debug)
5✔
699
        stderr_logger = StripNL(self.git_logger.info)
5✔
700
        if git_cache.exists():
5✔
701
            if not self.ref:
5!
702
                ref = run_capturing_out(["git", "symbolic-ref", "refs/remotes/origin/HEAD", "--short"],
×
703
                                        stderr_logger, cwd=git_cache).strip()[7:]  # Remove prefix "origin/"
704
            else:
705
                ref = self.ref
5✔
706
            self.logger.info("Using %s%s cached in %s", self.url_str,
5✔
707
                             f"?ref={ref}" if not self.ref else "",
708
                             self.local_dir)
709
            run(["git", "config", "remote.origin.fetch", f"+refs/heads/{ref}:refs/remotes/origin/{ref}"],
5✔
710
                stdout_logger, stderr_logger, cwd=git_cache).wait()
711
            run(["git", "fetch", "-pPt", "--force"], stdout_logger, stderr_logger, cwd=git_cache).wait()
5✔
712
            run(["git", "checkout", ref], stdout_logger, stderr_logger, cwd=git_cache).wait()
5✔
713
            run(["git", "clean", "-f"], stdout_logger, stderr_logger, cwd=git_cache).wait()
5✔
714
            run(["git", "reset", "--hard", ref, "--"], stdout_logger, stderr_logger, cwd=git_cache).wait()
5✔
715
            run(["git", "pull"], stdout_logger, stderr_logger, cwd=git_cache).wait()
5✔
716
        else:
717
            self.logger.info("Initializing %s -> %s", self.url_str, self.local_dir)
5✔
718
            args = (["git", "clone", "--depth", "1",
5✔
719
                     "-" + ("v" * log_level_to_verbosity_count(logger.getEffectiveLevel()))] +
720
                    (["-b", self.ref] if self.ref else []) +
721
                    ["--", self.clone_url_str, str(self.local_dir)])
722
            safe_args = [c if c != self.clone_url_str else self.url_str for c in args]
5✔
723
            run(args, stdout_logger, stderr_logger, safe_args=safe_args).wait()
5✔
724

725
    def cleanup(self):
5✔
726
        if False and self.local_dir:
5!
727
            self.logger.info("Cleaning up %s -> %s", self.url_str, self.local_dir)
728
            rmtree(self.local_dir)
729

730

731
class KubernatorPlugin:
5✔
732
    _name = None
5✔
733

734
    def set_context(self, context):
5✔
735
        raise NotImplementedError
×
736

737
    def register(self, **kwargs):
5✔
738
        pass
5✔
739

740
    def handle_init(self):
5✔
741
        pass
5✔
742

743
    def handle_start(self):
5✔
744
        pass
5✔
745

746
    def handle_before_dir(self, cwd: Path):
5✔
747
        pass
5✔
748

749
    def handle_before_script(self, cwd: Path):
5✔
750
        pass
5✔
751

752
    def handle_after_script(self, cwd: Path):
5✔
753
        pass
5✔
754

755
    def handle_after_dir(self, cwd: Path):
5✔
756
        pass
5✔
757

758
    def handle_apply(self):
5✔
759
        pass
5✔
760

761
    def handle_verify(self):
5✔
762
        pass
5✔
763

764
    def handle_shutdown(self):
5✔
765
        pass
5✔
766

767
    def handle_summary(self):
5✔
768
        pass
5✔
769

770

771
def install_python_k8s_client(run, package_major, logger, logger_stdout, logger_stderr, disable_patching):
5✔
772
    cache_dir = get_cache_dir("python")
5✔
773
    package_major_dir = cache_dir / str(package_major)
5✔
774
    package_major_dir_str = str(package_major_dir)
5✔
775
    patch_indicator = package_major_dir / ".patched"
5✔
776

777
    if disable_patching and package_major_dir.exists() and patch_indicator.exists():
5✔
778
        logger.info("Patching is disabled, existing Kubernetes Client %s (%s) was patched - "
5✔
779
                    "deleting current client",
780
                    str(package_major), package_major_dir)
781
        rmtree(package_major_dir)
5✔
782

783
    if not package_major_dir.exists():
5✔
784
        package_major_dir.mkdir(parents=True, exist_ok=True)
5✔
785
        run([sys.executable, "-m", "pip", "install", "--no-deps", "--no-input",
5✔
786
             "--root-user-action=ignore", "--break-system-packages", "--disable-pip-version-check",
787
             "--target", package_major_dir_str, f"kubernetes>={package_major!s}dev0,<{int(package_major) + 1!s}"],
788
            logger_stdout, logger_stderr).wait()
789

790
    if not patch_indicator.exists() and not disable_patching:
5✔
791
        for patch_text, target_file, skip_if_found, min_version, max_version, name in (
5✔
792
                URLLIB_HEADERS_PATCH, CUSTOM_OBJECT_PATCH_23, CUSTOM_OBJECT_PATCH_25):
793
            patch_target = package_major_dir / target_file
5✔
794
            logger.info("Applying patch %s to %s...", name, patch_target)
5✔
795
            if min_version and int(package_major) < min_version:
5✔
796
                logger.info("Skipping patch %s on %s due to package major version %s below minimum %d!",
5✔
797
                            name, patch_target, package_major, min_version)
798
                continue
5✔
799
            if max_version and int(package_major) > max_version:
5✔
800
                logger.info("Skipping patch %s on %s due to package major version %s above maximum %d!",
5✔
801
                            name, patch_target, package_major, max_version)
802
                continue
5✔
803

804
            with open(patch_target, "rt") as f:
5✔
805
                target_file_original = f.read()
5✔
806
            if skip_if_found in target_file_original:
5✔
807
                logger.info("Skipping patch %s on %s, as it already appears to be patched!", name,
5✔
808
                            patch_target)
809
                continue
5✔
810

811
            dmp = diff_match_patch()
5✔
812
            patches = dmp.patch_fromText(patch_text)
5✔
813
            target_file_patched, results = dmp.patch_apply(patches, target_file_original)
5✔
814
            failed_patch = False
5✔
815
            for idx, result in enumerate(results):
5✔
816
                if not result:
5!
817
                    failed_patch = True
×
818
                    msg = ("Failed to apply a patch to Kubernetes Client API %s, hunk #%d, patch: \n%s" % (
×
819
                        patch_target, idx, patches[idx]))
820
                    logger.fatal(msg)
×
821
            if failed_patch:
5!
822
                raise RuntimeError(f"Failed to apply some Kubernetes Client API {patch_target} patches")
×
823

824
            with open(patch_target, "wt") as f:
5✔
825
                f.write(target_file_patched)
5✔
826

827
        patch_indicator.touch(exist_ok=False)
5✔
828

829
    return package_major_dir
5✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc