• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

karellen / kubernator / 19201075326

09 Nov 2025 12:59AM UTC coverage: 75.318% (+0.3%) from 75.065%
19201075326

push

github

web-flow
Merge pull request #91 from karellen/helm_version_check

Add Helm ability to check latest versions of the charts used

650 of 1022 branches covered (63.6%)

Branch coverage included in aggregate %.

25 of 25 new or added lines in 1 file covered. (100.0%)

5 existing lines in 3 files now uncovered.

2545 of 3220 relevant lines covered (79.04%)

4.74 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

73.0
/src/main/python/kubernator/api.py
1
# -*- coding: utf-8 -*-
2
#
3
#   Copyright 2020 Express Systems USA, Inc
4
#   Copyright 2021 Karellen, Inc.
5
#
6
#   Licensed under the Apache License, Version 2.0 (the "License");
7
#   you may not use this file except in compliance with the License.
8
#   You may obtain a copy of the License at
9
#
10
#       http://www.apache.org/licenses/LICENSE-2.0
11
#
12
#   Unless required by applicable law or agreed to in writing, software
13
#   distributed under the License is distributed on an "AS IS" BASIS,
14
#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
#   See the License for the specific language governing permissions and
16
#   limitations under the License.
17
#
18

19
import fnmatch
6✔
20
import json
6✔
21
import logging
6✔
22
import os
6✔
23
import platform
6✔
24
import re
6✔
25
import sys
6✔
26
import textwrap
6✔
27
import traceback
6✔
28
import urllib.parse
6✔
29
from collections.abc import Callable
6✔
30
from collections.abc import Iterable, MutableSet, Reversible
6✔
31
from enum import Enum
6✔
32
from hashlib import sha256
6✔
33
from io import StringIO as io_StringIO
6✔
34
from pathlib import Path
6✔
35
from shutil import rmtree
6✔
36
from subprocess import CalledProcessError
6✔
37
from types import GeneratorType
6✔
38
from typing import Optional, Union, MutableSequence
6✔
39

40
import requests
6✔
41
import yaml
6✔
42
from diff_match_patch import diff_match_patch
6✔
43
from gevent import sleep
6✔
44
from jinja2 import (Environment,
6✔
45
                    ChainableUndefined,
46
                    make_logging_undefined,
47
                    Template as JinjaTemplate,
48
                    pass_context)
49
from jsonschema import validators
6✔
50
from platformdirs import user_cache_dir
6✔
51
from yaml import MarkedYAMLError
6✔
52

53
from kubernator._json_path import jp  # noqa: F401
6✔
54
from kubernator._k8s_client_patches import (URLLIB_HEADERS_PATCH,
6✔
55
                                            CUSTOM_OBJECT_PATCH_23,
56
                                            CUSTOM_OBJECT_PATCH_25)
57

58
_CACHE_HEADER_TRANSLATION = {"etag": "if-none-match",
6✔
59
                             "last-modified": "if-modified-since"}
60
_CACHE_HEADERS = ("etag", "last-modified")
6✔
61

62

63
def to_json(obj: Union[dict, list]):
6✔
64
    return json.dumps(obj)
6✔
65

66

67
def to_yaml_str(s: str):
6✔
68
    return yaml.safe_dump(s)
6✔
69

70

71
def to_json_yaml_str(obj: Union[dict, list]):
6✔
72
    """
73
    Takes `obj`, dumps as json representation, converts json representation to YAML string literal.
74
    """
75
    return to_yaml_str(to_json(obj))
6✔
76

77

78
def to_yaml_str_block(s: str, indent: int = 4, pretty_indent: int = 2):
6✔
79
    """
80
    Takes a multiline string, dedents it then indents it `indent` spaces for in-yaml alignment and
81
    `pretty-indent` spaces for in-block alignment.
82
    """
83
    return (f"|+{pretty_indent}\n" +
6✔
84
            textwrap.indent(textwrap.dedent(s), " " * (indent + pretty_indent)))
85

86

87
def to_json_yaml_str_block(obj: Union[str, dict, list], indent: int = 4, pretty_indent=2):
6✔
88
    """
89
    Takes an `obj`, serializes it as pretty JSON with `pretty_indent` in-json indentation and then
90
    passes it to `to_yaml_str_block`.
91
    """
92
    return to_yaml_str_block(json.dumps(obj, indent=pretty_indent), indent=indent, pretty_indent=pretty_indent)
6✔
93

94

95
def to_yaml(obj: Union[dict, list], level_indent: int, indent: int):
6✔
96
    s = yaml.safe_dump(obj, indent=indent)
6✔
97
    return "\n" + textwrap.indent(s, " " * level_indent)
6✔
98

99

100
class TemplateEngine:
6✔
101
    VARIABLE_START_STRING = "{${"
6✔
102
    VARIABLE_END_STRING = "}$}"
6✔
103

104
    def __init__(self, logger):
6✔
105
        self.template_failures = 0
6✔
106
        self.templates = {}
6✔
107

108
        class CollectingUndefined(ChainableUndefined):
6✔
109
            __slots__ = ()
6✔
110

111
            def __str__(self):
6✔
112
                self.template_failures += 1
×
113
                return super().__str__()
×
114

115
        logging_undefined = make_logging_undefined(
6✔
116
            logger=logger,
117
            base=CollectingUndefined
118
        )
119

120
        @pass_context
6✔
121
        def variable_finalizer(ctx, value):
6✔
122
            normalized_value = str(value)
6✔
123
            if self.VARIABLE_START_STRING in normalized_value and self.VARIABLE_END_STRING in normalized_value:
6✔
124
                value_template_content = sys.intern(normalized_value)
6✔
125
                env: Environment = ctx.environment
6✔
126
                value_template = self.templates.get(value_template_content)
6✔
127
                if not value_template:
6!
128
                    value_template = env.from_string(value_template_content, env.globals)
6✔
129
                    self.templates[value_template_content] = value_template
6✔
130
                return value_template.render(ctx.parent)
6✔
131

132
            return normalized_value
6✔
133

134
        self.env = Environment(variable_start_string=self.VARIABLE_START_STRING,
6✔
135
                               variable_end_string=self.VARIABLE_END_STRING,
136
                               autoescape=False,
137
                               finalize=variable_finalizer,
138
                               undefined=logging_undefined,
139
                               )
140

141
        self.env.filters["to_json"] = to_json
6✔
142
        self.env.filters["to_yaml_str"] = to_yaml_str
6✔
143
        self.env.filters["to_yaml"] = to_yaml
6✔
144
        self.env.filters["to_yaml_str_block"] = to_yaml_str_block
6✔
145
        self.env.filters["to_json_yaml_str_block"] = to_json_yaml_str_block
6✔
146
        self.env.filters["to_json_yaml_str"] = to_json_yaml_str
6✔
147

148
    def from_string(self, template):
6✔
149
        return self.env.from_string(template)
6✔
150

151
    def failures(self):
6✔
152
        return self.template_failures
6✔
153

154

155
def calling_frame_source(depth=2):
6✔
156
    f = traceback.extract_stack(limit=depth + 1)[0]
×
157
    return f"file {f.filename}, line {f.lineno} in {f.name}"
×
158

159

160
def re_filter(name: str, patterns: Iterable[re.Pattern]):
6✔
161
    for pattern in patterns:
6✔
162
        if pattern.match(name):
6✔
163
            return True
6✔
164

165

166
def to_patterns(*patterns):
6✔
167
    return [re.compile(fnmatch.translate(p)) for p in patterns]
×
168

169

170
def scan_dir(logger, path: Path, path_filter: Callable[[os.DirEntry], bool], excludes, includes):
6✔
171
    logger.debug("Scanning %s, excluding %s, including %s", path, excludes, includes)
6✔
172
    with os.scandir(path) as it:  # type: Iterable[os.DirEntry]
6✔
173
        files = {f: f for f in
6✔
174
                 sorted(d.name for d in it if path_filter(d) and not re_filter(d.name, excludes))}
175

176
    for include in includes:
6✔
177
        logger.trace("Considering include %s in %s", include, path)
6✔
178
        for f in list(files.keys()):
6✔
179
            if include.match(f):
6✔
180
                del files[f]
6✔
181
                logger.debug("Selecting %s in %s as it matches %s", f, path, include)
6✔
182
                yield path / f
6✔
183

184

185
def parse_yaml_docs(document: str, source=None):
6✔
186
    try:
6✔
187
        return list(d for d in yaml.safe_load_all(document) if d)
6✔
188
    except MarkedYAMLError:
×
189
        raise
×
190

191

192
class FileType(Enum):
6✔
193
    TEXT = (lambda x: x,)
6!
194
    BINARY = (lambda x: x,)
6!
195
    JSON = (json.loads,)
6✔
196
    YAML = (parse_yaml_docs,)
6✔
197

198
    def __init__(self, func):
6✔
199
        self.func = func
6✔
200

201

202
def _load_file(logger, path: Path, file_type: FileType, source=None,
6✔
203
               template_engine: Optional[TemplateEngine] = None,
204
               template_context: Optional[dict] = None) -> Iterable[dict]:
205
    with open(path, "rb" if file_type == FileType.BINARY else "rt") as f:
6✔
206
        try:
6✔
207
            if template_engine and not file_type == FileType.BINARY:
6✔
208
                raw_data = template_engine.from_string(f.read()).render(template_context)
6✔
209
            else:
210
                raw_data = f.read()
6✔
211
            data = file_type.func(raw_data)
6✔
212
            if isinstance(data, GeneratorType):
6!
213
                data = list(data)
×
214
            return data
6✔
215
        except Exception as e:
×
216
            logger.error("Failed parsing %s using %s", source or path, file_type, exc_info=e)
×
217
            raise
×
218

219

220
def _download_remote_file(url, file_name, cache: dict):
6✔
221
    retry_delay = 0
6✔
222
    while True:
5✔
223
        if retry_delay:
6!
224
            sleep(retry_delay)
×
225

226
        with requests.get(url, headers=cache, stream=True) as r:
6!
227
            if r.status_code == 429:
6!
228
                if not retry_delay:
×
229
                    retry_delay = 0.2
×
230
                else:
231
                    retry_delay *= 2.0
×
232
                if retry_delay > 2.5:
×
233
                    retry_delay = 2.5
×
234
                continue
×
235

236
            r.raise_for_status()
6✔
237
            if r.status_code != 304:
6!
238
                with open(file_name, "wb") as out:
×
239
                    for chunk in r.iter_content(chunk_size=65535):
×
240
                        out.write(chunk)
×
241
                return dict(r.headers)
×
242
            else:
243
                return None
6✔
244

245

246
def get_app_cache_dir():
6✔
247
    return Path(user_cache_dir("kubernator", "karellen"))
6✔
248

249

250
def get_cache_dir(category: str, sub_category: str = None):
6✔
251
    cache_dir = get_app_cache_dir() / category
6✔
252
    if sub_category:
6!
253
        cache_dir = cache_dir / sub_category
×
254
    if not cache_dir.exists():
6✔
255
        cache_dir.mkdir(parents=True)
6✔
256

257
    return cache_dir
6✔
258

259

260
def download_remote_file(logger, url: str, category: str = "k8s", sub_category: str = None,
6✔
261
                         downloader=_download_remote_file):
262
    config_dir = get_cache_dir(category, sub_category)
6✔
263

264
    file_name = config_dir / sha256(url.encode("UTF-8")).hexdigest()
6✔
265
    cache_file_name = file_name.with_suffix(".cache")
6✔
266
    logger.trace("Cache file for %s is %s.cache", url, file_name)
6✔
267

268
    cache = {}
6✔
269
    if cache_file_name.exists():
6!
270
        logger.trace("Loading cache file from %s", cache_file_name)
6✔
271
        try:
6✔
272
            with open(cache_file_name, "rb") as cache_f:
6✔
273
                cache = json.load(cache_f)
6✔
274
        except (IOError, ValueError) as e:
×
275
            logger.trace("Failed loading cache file from %s (cleaning up)", cache_file_name, exc_info=e)
×
276
            cache_file_name.unlink(missing_ok=True)
×
277

278
    logger.trace("Downloading %s into %s%s", url, file_name, " (caching)" if cache else "")
6✔
279
    headers = downloader(url, file_name, cache)
6✔
280
    up_to_date = False
6✔
281
    if not headers:
6!
282
        logger.trace("File %s(%s) is up-to-date", url, file_name.name)
6✔
283
        up_to_date = True
6✔
284
    else:
285
        cache = {_CACHE_HEADER_TRANSLATION.get(k.lower(), k): v
×
286
                 for k, v in headers.items()
287
                 if k.lower() in _CACHE_HEADERS}
288

289
        logger.trace("Update cache file in %s: %r", cache_file_name, cache)
×
290
        with open(cache_file_name, "wt") as cache_f:
×
291
            json.dump(cache, cache_f)
×
292

293
    return file_name, up_to_date
6✔
294

295

296
def load_remote_file(logger, url, file_type: FileType, category: str = "k8s", sub_category: str = None,
6✔
297
                     downloader=_download_remote_file):
298
    file_name, _ = download_remote_file(logger, url, category, sub_category, downloader=downloader)
6✔
299
    logger.debug("Loading %s from %s using %s", url, file_name, file_type.name)
6✔
300
    return _load_file(logger, file_name, file_type, url)
6✔
301

302

303
def load_file(logger, path: Path, file_type: FileType, source=None,
6✔
304
              template_engine: Optional[TemplateEngine] = None,
305
              template_context: Optional[dict] = None) -> Iterable[dict]:
306
    logger.debug("Loading %s using %s", source or path, file_type.name)
6✔
307
    return _load_file(logger, path, file_type,
6✔
308
                      source, template_engine, template_context)
309

310

311
def validator_with_defaults(validator_class):
6✔
312
    validate_properties = validator_class.VALIDATORS["properties"]
6✔
313

314
    def set_defaults(validator, properties, instance, schema):
6✔
315
        for property, subschema in properties.items():
6✔
316
            if "default" in subschema:
6✔
317
                instance.setdefault(property, subschema["default"])
6✔
318

319
        for error in validate_properties(validator, properties, instance, schema):
6!
320
            yield error
×
321

322
    return validators.extend(validator_class, {"properties": set_defaults})
6✔
323

324

325
class _PropertyList(MutableSequence):
6✔
326

327
    def __init__(self, seq, read_parent, name):
6✔
328
        self.__read_seq = seq
6✔
329
        self.__read_parent = read_parent
6✔
330
        self.__write_parent = None
6✔
331
        self.__write_seq = None
6✔
332
        self.__name = name
6✔
333

334
    def __iter__(self):
6✔
335
        return self.__read_seq.__iter__()
6✔
336

337
    def __mul__(self, __n):
6✔
338
        return self.__read_seq.__mul__(__n)
×
339

340
    def __rmul__(self, __n):
6✔
341
        return self.__read_seq.__rmul__(__n)
×
342

343
    def __imul__(self, __n):
6✔
344
        return self.__read_seq.__imul__(__n)
×
345

346
    def __contains__(self, __o):
6✔
347
        return self.__read_seq.__contains__(__o)
×
348

349
    def __reversed__(self):
6✔
350
        return self.__read_seq.__reversed__()
6✔
351

352
    def __gt__(self, __x):
6✔
353
        return self.__read_seq.__gt__(__x)
×
354

355
    def __ge__(self, __x):
6✔
356
        return self.__read_seq.__ge__(__x)
×
357

358
    def __lt__(self, __x):
6✔
359
        return self.__read_seq.__lt__(__x)
×
360

361
    def __le__(self, __x):
6✔
362
        return self.__read_seq.__le__(__x)
×
363

364
    def __len__(self):
6✔
365
        return self.__read_seq.__len__()
6✔
366

367
    def count(self, __value):
6✔
368
        return self.__read_seq.count(__value)
×
369

370
    def copy(self):
6✔
371
        while True:
372
            try:
×
373
                return self.__write_seq.copy()
×
374
            except AttributeError:
×
375
                self.__clone()
×
376

377
    def __getitem__(self, __i):
6✔
378
        return self.__read_seq.__getitem__(__i)
6✔
379

380
    def append(self, __object):
6✔
381
        while True:
5✔
382
            try:
6✔
383
                return self.__write_seq.append(__object)
6✔
384
            except AttributeError:
6✔
385
                self.__clone()
6✔
386

387
    def extend(self, __iterable):
6✔
388
        while True:
389
            try:
×
390
                return self.__write_seq.extend(__iterable)
×
391
            except AttributeError:
×
392
                self.__clone()
×
393

394
    def pop(self, __index=None):
6✔
395
        while True:
396
            try:
×
397
                return self.__write_seq.pop(__index)
×
398
            except AttributeError:
×
399
                self.__clone()
×
400

401
    def insert(self, __index, __object):
6✔
402
        while True:
5✔
403
            try:
×
404
                return self.__write_seq.insert(__index, __object)
×
405
            except AttributeError:
×
406
                self.__clone()
×
407

408
    def remove(self, __value):
6✔
409
        while True:
410
            try:
×
411
                return self.__write_seq.remove(__value)
×
412
            except AttributeError:
×
413
                self.__clone()
×
414

415
    def sort(self, *, key=None, reverse=False):
6✔
416
        while True:
417
            try:
×
418
                return self.__write_seq.sort(key=key, reverse=reverse)
×
419
            except AttributeError:
×
420
                self.__clone()
×
421

422
    def __setitem__(self, __i, __o):
6✔
423
        while True:
424
            try:
×
425
                return self.__write_seq.__setitem__(__i, __o)
×
426
            except AttributeError:
×
427
                self.__clone()
×
428

429
    def __delitem__(self, __i):
6✔
430
        while True:
431
            try:
×
432
                return self.__write_seq.__delitem__(__i)
×
433
            except AttributeError:
×
434
                self.__clone()
×
435

436
    def __add__(self, __x):
6✔
437
        while True:
438
            try:
×
439
                return self.__write_seq.__add__(__x)
×
440
            except AttributeError:
×
441
                self.__clone()
×
442

443
    def __iadd__(self, __x):
6✔
444
        while True:
445
            try:
×
446
                return self.__write_seq.__iadd__(__x)
×
447
            except AttributeError:
×
448
                self.__clone()
×
449

450
    def clear(self):
6✔
451
        while True:
452
            try:
×
453
                return self.__write_seq.clear()
×
454
            except AttributeError:
×
455
                self.__clone()
×
456

457
    def reverse(self):
6✔
458
        while True:
459
            try:
×
460
                return self.__write_seq.reverse()
×
461
            except AttributeError:
×
462
                self.__clone()
×
463

464
    def __clone(self):
6✔
465
        if self.__read_parent == self.__write_parent:
6✔
466
            self.__write_seq = self.__read_seq
6✔
467
        else:
468
            self.__write_seq = self.__read_seq.copy()
6✔
469
            self.__read_seq = self.__write_seq
6✔
470

471
            setattr(self.__write_parent, self.__name, self.__write_seq)
6✔
472

473

474
class PropertyDict:
6✔
475
    def __init__(self, _dict=None, _parent=None):
6✔
476
        self.__dict__["_PropertyDict__dict"] = _dict or {}
6✔
477
        self.__dict__["_PropertyDict__parent"] = _parent
6✔
478

479
    def __getattr__(self, item):
6✔
480
        v = self.__getattr(item)
6✔
481
        if isinstance(v, _PropertyList):
6✔
482
            v._PropertyList__write_parent = self
6✔
483
        return v
6✔
484

485
    def __getattr(self, item):
6✔
486
        try:
6✔
487
            v = self.__dict[item]
6✔
488
            if isinstance(v, list):
6✔
489
                v = _PropertyList(v, self, item)
6✔
490
            return v
6✔
491
        except KeyError:
6✔
492
            parent = self.__parent
6✔
493
            if parent is not None:
6!
494
                return parent.__getattr(item)
6✔
495
            raise AttributeError("no attribute %r" % item) from None
×
496

497
    def __setattr__(self, key, value):
6✔
498
        if key.startswith("_PropertyDict__"):
6!
499
            raise AttributeError("prohibited attribute %r" % key)
×
500
        if isinstance(value, dict):
6✔
501
            parent_dict = None
6✔
502
            if self.__parent is not None:
6✔
503
                try:
6✔
504
                    parent_dict = self.__parent.__getattr__(key)
6✔
505
                    if not isinstance(parent_dict, PropertyDict):
6!
506
                        raise ValueError("cannot override a scalar with a synthetic object for attribute %s", key)
×
507
                except AttributeError:
×
508
                    pass
×
509
            value = PropertyDict(value, _parent=parent_dict)
6✔
510
        self.__dict[key] = value
6✔
511

512
    def __delattr__(self, item):
6✔
513
        del self.__dict[item]
6✔
514

515
    def __len__(self):
6✔
516
        return len(self.__dir__())
×
517

518
    def __getitem__(self, item):
6✔
519
        return self.__dict.__getitem__(item)
×
520

521
    def __setitem__(self, key, value):
6✔
522
        self.__dict.__setitem__(key, value)
×
523

524
    def __delitem__(self, key):
6✔
525
        self.__dict.__delitem__(key)
×
526

527
    def __contains__(self, item):
6✔
528
        try:
6✔
529
            self.__dict[item]
6✔
530
            return True
6✔
531
        except KeyError:
6✔
532
            parent = self.__parent
6✔
533
            if parent is not None:
6✔
534
                return parent.__contains__(item)
6✔
535
            return False
6✔
536

537
    def __dir__(self) -> Iterable[str]:
6✔
538
        result: set[str] = set()
×
539
        result.update(self.__dict.keys())
×
540
        if self.__parent is not None:
×
541
            result.update(self.__parent.__dir__())
×
542
        return result
×
543

544
    def __repr__(self):
6✔
545
        return "PropertyDict[%r]" % self.__dict
×
546

547

548
def config_parent(config: PropertyDict):
6✔
549
    return config._PropertyDict__parent
×
550

551

552
def config_as_dict(config: PropertyDict):
6✔
553
    return {k: config[k] for k in dir(config)}
×
554

555

556
def config_get(config: PropertyDict, key: str, default=None):
6✔
557
    try:
×
558
        return config[key]
×
559
    except KeyError:
×
560
        return default
×
561

562

563
class Globs(MutableSet[Union[str, re.Pattern]]):
6✔
564
    def __init__(self, source: Optional[list[Union[str, re.Pattern]]] = None,
6✔
565
                 immutable=False):
566
        self._immutable = immutable
6✔
567
        if source:
6!
568
            self._list = [self.__wrap__(v) for v in source]
6✔
569
        else:
570
            self._list = []
×
571

572
    def __wrap__(self, item: Union[str, re.Pattern]):
6✔
573
        if isinstance(item, re.Pattern):
6✔
574
            return item
6✔
575
        return re.compile(fnmatch.translate(item))
6✔
576

577
    def __contains__(self, item: Union[str, re.Pattern]):
6✔
578
        return self._list.__contains__(self.__wrap__(item))
×
579

580
    def __iter__(self):
6✔
581
        return self._list.__iter__()
6✔
582

583
    def __len__(self):
6✔
584
        return self._list.__len__()
6✔
585

586
    def add(self, value: Union[str, re.Pattern]):
6✔
587
        if self._immutable:
6!
588
            raise RuntimeError("immutable")
×
589

590
        _list = self._list
6✔
591
        value = self.__wrap__(value)
6✔
592
        if value not in _list:
6!
593
            _list.append(value)
6✔
594

595
    def extend(self, values: Iterable[Union[str, re.Pattern]]):
6✔
596
        for v in values:
×
597
            self.add(v)
×
598

599
    def discard(self, value: Union[str, re.Pattern]):
6✔
600
        if self._immutable:
6!
601
            raise RuntimeError("immutable")
×
602

603
        _list = self._list
6✔
604
        value = self.__wrap__(value)
6✔
605
        if value in _list:
6!
606
            _list.remove(value)
6✔
607

608
    def add_first(self, value: Union[str, re.Pattern]):
6✔
609
        if self._immutable:
×
610
            raise RuntimeError("immutable")
×
611

612
        _list = self._list
×
613
        value = self.__wrap__(value)
×
614
        if value not in _list:
×
615
            _list.insert(0, value)
×
616

617
    def extend_first(self, values: Reversible[Union[str, re.Pattern]]):
6✔
618
        for v in reversed(values):
×
619
            self.add_first(v)
×
620

621
    def __str__(self):
6✔
622
        return self._list.__str__()
6✔
623

624
    def __repr__(self):
6✔
625
        return f"Globs[{self._list}]"
×
626

627

628
class Template:
6✔
629
    def __init__(self, name: str, template: JinjaTemplate, defaults: dict = None, path=None, source=None):
6✔
630
        self.name = name
6✔
631
        self.source = source
6✔
632
        self.path = path
6✔
633
        self.template = template
6✔
634
        self.defaults = defaults
6✔
635

636
    def render(self, context: dict, values: dict):
6✔
637
        variables = {"ktor": context,
6✔
638
                     "values": (self.defaults or {}) | values}
639
        return self.template.render(variables)
6✔
640

641

642
class StringIO:
6✔
643
    def __init__(self, trimmed=True):
6✔
644
        self.write = self.write_trimmed if trimmed else self.write_untrimmed
6✔
645
        self._buf = io_StringIO()
6✔
646

647
    def write_untrimmed(self, line):
6✔
648
        self._buf.write(line)
6✔
649

650
    def write_trimmed(self, line):
6✔
651
        self._buf.write(f"{line}\n")
×
652

653
    def getvalue(self):
6✔
654
        return self._buf.getvalue()
6✔
655

656

657
class StripNL:
6✔
658
    def __init__(self, func):
6✔
659
        self._func = func
6✔
660

661
    def __call__(self, line: str):
6✔
662
        return self._func(line.rstrip("\r\n"))
6✔
663

664

665
def log_level_to_verbosity_count(level: int):
6✔
666
    return int(-level / 10 + 6)
6✔
667

668

669
def clone_url_str(url):
6✔
670
    return urllib.parse.urlunsplit(url[:3] + ("", ""))  # no query or fragment
6✔
671

672

673
def prepend_os_path(path):
6✔
674
    path = str(path)
6✔
675
    paths = os.environ["PATH"].split(os.pathsep)
6✔
676
    if path not in paths:
6!
677
        paths.insert(0, path)
6✔
678
        os.environ["PATH"] = os.pathsep.join(paths)
6✔
679
        return True
6✔
680
    return False
×
681

682

683
_GOLANG_MACHINE = platform.machine().lower()
6✔
684
if _GOLANG_MACHINE == "x86_64":
6!
685
    _GOLANG_MACHINE = "amd64"
6✔
686

687
_GOLANG_OS = platform.system().lower()
6✔
688

689

690
def get_golang_machine():
6✔
691
    return _GOLANG_MACHINE
6✔
692

693

694
def get_golang_os():
6✔
695
    return _GOLANG_OS
6✔
696

697

698
def sha256_file_digest(path):
6✔
699
    h = sha256()
×
700
    with open(path, "rb") as f:
×
701
        h.update(f.read(65535))
×
702
    return h.hexdigest()
×
703

704

705
class Repository:
6✔
706
    logger = logging.getLogger("kubernator.repository")
6✔
707
    git_logger = logger.getChild("git")
6✔
708

709
    def __init__(self, repo, cred_aug=None):
6✔
710
        repo = str(repo)  # in case this is a Path
6✔
711
        url = urllib.parse.urlsplit(repo)
6✔
712

713
        if not url.scheme and not url.netloc and Path(url.path).exists():
6!
714
            url = url._replace(scheme="file")  # In case it's a local repository
6✔
715

716
        self.url = url
6✔
717
        self.url_str = urllib.parse.urlunsplit(url[:4] + ("",))
6✔
718
        self._cred_aug = cred_aug
6✔
719
        self._hash_obj = (url.hostname if url.username or url.password else url.netloc,
6✔
720
                          url.path,
721
                          url.query)
722

723
        self.clone_url = None  # Actual URL components used in cloning operations
6✔
724
        self.clone_url_str = None  # Actual URL string used in cloning operations
6✔
725
        self.ref = None
6✔
726
        self.local_dir = None
6✔
727

728
    def __eq__(self, o: object) -> bool:
6✔
729
        if isinstance(o, Repository):
×
730
            return self._hash_obj == o._hash_obj
×
731

732
    def __hash__(self) -> int:
6✔
733
        return hash(self._hash_obj)
6✔
734

735
    def init(self, logger, context):
6✔
736
        run = context.app.run
6✔
737
        run_capturing_out = context.app.run_capturing_out
6✔
738

739
        url = self.url
6✔
740
        if self._cred_aug:
6!
741
            url = self._cred_aug(url)
6✔
742

743
        self.clone_url = url
6✔
744
        self.clone_url_str = clone_url_str(url)
6✔
745

746
        query = urllib.parse.parse_qs(self.url.query)
6✔
747
        ref = query.get("ref")
6✔
748
        if ref:
6✔
749
            self.ref = ref[0]
6✔
750

751
        config_dir = get_cache_dir("git")
6✔
752

753
        git_cache = config_dir / sha256(self.clone_url_str.encode("UTF-8")).hexdigest()
6✔
754

755
        if git_cache.exists() and git_cache.is_dir() and (git_cache / ".git").exists():
6✔
756
            try:
6✔
757
                run(["git", "status"], None, None, cwd=git_cache).wait()
6✔
758
            except CalledProcessError:
×
759
                rmtree(git_cache)
×
760

761
        self.local_dir = git_cache
6✔
762

763
        stdout_logger = StripNL(self.git_logger.debug)
6✔
764
        stderr_logger = StripNL(self.git_logger.info)
6✔
765
        if git_cache.exists():
6✔
766
            if not self.ref:
6!
767
                ref = run_capturing_out(["git", "symbolic-ref", "refs/remotes/origin/HEAD", "--short"],
×
768
                                        stderr_logger, cwd=git_cache).strip()[7:]  # Remove prefix "origin/"
769
            else:
770
                ref = self.ref
6✔
771
            self.logger.info("Using %s%s cached in %s", self.url_str,
6✔
772
                             f"?ref={ref}" if not self.ref else "",
773
                             self.local_dir)
774
            run(["git", "config", "remote.origin.fetch", f"+refs/heads/{ref}:refs/remotes/origin/{ref}"],
6✔
775
                stdout_logger, stderr_logger, cwd=git_cache).wait()
776
            run(["git", "fetch", "-pPt", "--force"], stdout_logger, stderr_logger, cwd=git_cache).wait()
6✔
777
            run(["git", "checkout", ref], stdout_logger, stderr_logger, cwd=git_cache).wait()
6✔
778
            run(["git", "clean", "-f"], stdout_logger, stderr_logger, cwd=git_cache).wait()
6✔
779
            run(["git", "reset", "--hard", ref, "--"], stdout_logger, stderr_logger, cwd=git_cache).wait()
6✔
780
            run(["git", "pull"], stdout_logger, stderr_logger, cwd=git_cache).wait()
6✔
781
        else:
782
            self.logger.info("Initializing %s -> %s", self.url_str, self.local_dir)
6✔
783
            args = (["git", "clone", "--depth", "1",
6✔
784
                     "-" + ("v" * log_level_to_verbosity_count(logger.getEffectiveLevel()))] +
785
                    (["-b", self.ref] if self.ref else []) +
786
                    ["--", self.clone_url_str, str(self.local_dir)])
787
            safe_args = [c if c != self.clone_url_str else self.url_str for c in args]
6✔
788
            run(args, stdout_logger, stderr_logger, safe_args=safe_args).wait()
6✔
789

790
    def cleanup(self):
6✔
791
        if False and self.local_dir:
6!
UNCOV
792
            self.logger.info("Cleaning up %s -> %s", self.url_str, self.local_dir)
UNCOV
793
            rmtree(self.local_dir)
794

795

796
class KubernatorPlugin:
6✔
797
    _name = None
6✔
798

799
    def set_context(self, context):
6✔
800
        raise NotImplementedError
×
801

802
    def register(self, **kwargs):
6✔
803
        pass
6✔
804

805
    def handle_init(self):
6✔
806
        pass
6✔
807

808
    def handle_start(self):
6✔
809
        pass
6✔
810

811
    def handle_before_dir(self, cwd: Path):
6✔
812
        pass
6✔
813

814
    def handle_before_script(self, cwd: Path):
6✔
815
        pass
6✔
816

817
    def handle_after_script(self, cwd: Path):
6✔
818
        pass
6✔
819

820
    def handle_after_dir(self, cwd: Path):
6✔
821
        pass
6✔
822

823
    def handle_apply(self):
6✔
824
        pass
6✔
825

826
    def handle_verify(self):
6✔
827
        pass
6✔
828

829
    def handle_shutdown(self):
6✔
830
        pass
6✔
831

832
    def handle_summary(self):
6✔
833
        pass
6✔
834

835

836
def install_python_k8s_client(run, package_major, logger, logger_stdout, logger_stderr, disable_patching,
6✔
837
                              fallback=False):
838
    cache_dir = get_cache_dir("python")
6✔
839
    package_major_dir = cache_dir / str(package_major)
6✔
840
    package_major_dir_str = str(package_major_dir)
6✔
841
    patch_indicator = package_major_dir / ".patched"
6✔
842

843
    if disable_patching and package_major_dir.exists() and patch_indicator.exists():
6✔
844
        logger.info("Patching is disabled, existing Kubernetes Client %s (%s) was patched - "
6✔
845
                    "deleting current client",
846
                    str(package_major), package_major_dir)
847
        rmtree(package_major_dir)
6✔
848

849
    if not package_major_dir.exists() or not len(os.listdir(package_major_dir)):
6✔
850
        package_major_dir.mkdir(parents=True, exist_ok=True)
6✔
851
        try:
6✔
852
            run([sys.executable, "-m", "pip", "install", "--no-deps", "--no-input",
6✔
853
                 "--root-user-action=ignore", "--break-system-packages", "--disable-pip-version-check",
854
                 "--target", package_major_dir_str, f"kubernetes>={package_major!s}dev0,<{int(package_major) + 1!s}"],
855
                logger_stdout, logger_stderr)
856
        except CalledProcessError as e:
×
857
            if not fallback and "No matching distribution found for" in e.stderr:
×
858
                logger.warning("Kubernetes Client %s (%s) failed to install because the version wasn't found. "
×
859
                               "Falling back to a client of the previous version - %s",
860
                               str(package_major), package_major_dir, int(package_major) - 1)
861
                return install_python_k8s_client(run,
×
862
                                                 int(package_major) - 1,
863
                                                 logger,
864
                                                 logger_stdout,
865
                                                 logger_stderr,
866
                                                 disable_patching,
867
                                                 fallback=True)
868
            else:
869
                raise
×
870

871
    if not patch_indicator.exists() and not disable_patching:
6✔
872
        if not fallback and not len(os.listdir(package_major_dir)):
6!
873
            # Directory is empty
874
            logger.warning("Kubernetes Client %s (%s) directory is empty - the client was not installed. "
×
875
                           "Falling back to a client of the previous version - %s",
876
                           str(package_major), package_major_dir, int(package_major) - 1)
877

878
            return install_python_k8s_client(run,
×
879
                                             int(package_major) - 1,
880
                                             logger,
881
                                             logger_stdout,
882
                                             logger_stderr,
883
                                             disable_patching,
884
                                             fallback=True)
885

886
        for patch_text, target_file, skip_if_found, min_version, max_version, name in (
6✔
887
                URLLIB_HEADERS_PATCH, CUSTOM_OBJECT_PATCH_23, CUSTOM_OBJECT_PATCH_25):
888
            patch_target = package_major_dir / target_file
6✔
889
            logger.info("Applying patch %s to %s...", name, patch_target)
6✔
890
            if min_version and int(package_major) < min_version:
6✔
891
                logger.info("Skipping patch %s on %s due to package major version %s below minimum %d!",
6✔
892
                            name, patch_target, package_major, min_version)
893
                continue
6✔
894
            if max_version and int(package_major) > max_version:
6✔
895
                logger.info("Skipping patch %s on %s due to package major version %s above maximum %d!",
6✔
896
                            name, patch_target, package_major, max_version)
897
                continue
6✔
898

899
            with open(patch_target, "rt") as f:
6✔
900
                target_file_original = f.read()
6✔
901
            if skip_if_found in target_file_original:
6✔
902
                logger.info("Skipping patch %s on %s, as it already appears to be patched!", name,
6✔
903
                            patch_target)
904
                continue
6✔
905

906
            dmp = diff_match_patch()
6✔
907
            patches = dmp.patch_fromText(patch_text)
6✔
908
            target_file_patched, results = dmp.patch_apply(patches, target_file_original)
6✔
909
            failed_patch = False
6✔
910
            for idx, result in enumerate(results):
6✔
911
                if not result:
6!
912
                    failed_patch = True
×
913
                    msg = ("Failed to apply a patch to Kubernetes Client API %s, hunk #%d, patch: \n%s" % (
×
914
                        patch_target, idx, patches[idx]))
915
                    logger.fatal(msg)
×
916
            if failed_patch:
6!
917
                raise RuntimeError(f"Failed to apply some Kubernetes Client API {patch_target} patches")
×
918

919
            with open(patch_target, "wt") as f:
6✔
920
                f.write(target_file_patched)
6✔
921

922
        patch_indicator.touch(exist_ok=False)
6✔
923

924
    return package_major_dir
6✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc