• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

flathub / flatpak-external-data-checker / 18530269075

15 Oct 2025 01:19PM UTC coverage: 90.635% (-0.2%) from 90.837%
18530269075

Pull #488

github

web-flow
Merge 508221da2 into c2f4d73d9
Pull Request #488: Drop distutils

46 of 56 new or added lines in 2 files covered. (82.14%)

17 existing lines in 1 file now uncovered.

2226 of 2456 relevant lines covered (90.64%)

0.91 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

88.18
/src/lib/utils.py
1
# Copyright © 2018–2020 Endless Mobile, Inc.
2
#
3
# Authors:
4
#       Joaquim Rocha <jrocha@endlessm.com>
5
#       Will Thompson <wjt@endlessm.com>
6
#
7
# This program is free software; you can redistribute it and/or modify
8
# it under the terms of the GNU General Public License as published by
9
# the Free Software Foundation; either version 2 of the License, or
10
# (at your option) any later version.
11
#
12
# This program is distributed in the hope that it will be useful,
13
# but WITHOUT ANY WARRANTY; without even the implied warranty of
14
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15
# GNU General Public License for more details.
16
#
17
# You should have received a copy of the GNU General Public License along
18
# with this program; if not, write to the Free Software Foundation, Inc.,
19
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20

21
import apt_inst
1✔
22
import apt_pkg
1✔
23
import datetime as dt
1✔
24
import zoneinfo
1✔
25
import json
1✔
26
import logging
1✔
27
import os
1✔
28
import re
1✔
29
import subprocess
1✔
30
import tempfile
1✔
31
import urllib.request
1✔
32
import urllib.parse
1✔
33
import typing as t
1✔
34
from distutils.version import StrictVersion, LooseVersion
1✔
35
import asyncio
1✔
36
import shlex
1✔
37
from pathlib import Path
1✔
38
import operator
1✔
39

40
from collections import OrderedDict
1✔
41
from ruamel.yaml import YAML
1✔
42
from elftools.elf.elffile import ELFFile
1✔
43
import aiohttp
1✔
44
import editorconfig
1✔
45
import magic
1✔
46

47
from . import externaldata, TIMEOUT_CONNECT, HTTP_CHUNK_SIZE, OPERATORS
1✔
48
from .errors import CheckerRemoteError, CheckerQueryError, CheckerFetchError
1✔
49
from .checksums import MultiHash
1✔
50

51
import gi
1✔
52

53
gi.require_version("Json", "1.0")
1✔
54
from gi.repository import GLib, Json  # noqa: E402
1✔
55

56
log = logging.getLogger(__name__)
1✔
57

58

59
def _extract_timestamp(info):
1✔
60
    date_str = info.get("Last-Modified") or info.get("Date")
1✔
61
    return parse_date_header(date_str)
1✔
62

63

64
def parse_date_header(date_str):
1✔
65
    """Parse a stringified date, from a Last-Modified or Date header.
66

67
    In addition to standard(ish) formats, non-standard formats where the
68
    timezone is a named zone rather than an offset are detected and handled."""
69
    for tz in zoneinfo.available_timezones():
1✔
70
        if tz in ("UTC", "GMT") or not date_str.endswith(tz):
1✔
71
            continue
1✔
72

73
        date_str_notz = date_str[: -(len(tz) + 1)]
1✔
74
        for date_fmt in [
1✔
75
            "%a, %d %b %Y %H:%M:%S",
76
            "%a, %d-%b-%Y %H:%M:%S",
77
        ]:
78
            try:
1✔
79
                dt_obj = dt.datetime.strptime(date_str_notz, date_fmt)
1✔
80
                local_dt = dt.datetime.fromisoformat(str(dt_obj)).replace(
1✔
81
                    tzinfo=zoneinfo.ZoneInfo(tz)
82
                )
83
                utc_dt = local_dt.astimezone(zoneinfo.ZoneInfo("UTC"))
1✔
84
                return utc_dt.replace(tzinfo=None)
1✔
UNCOV
85
            except ValueError:
×
86
                continue
×
87
            raise CheckerRemoteError(f"Cannot parse date/time: {date_str}")
88

89
    for date_fmt in [
1✔
90
        "%a, %d %b %Y %H:%M:%S %Z",
91
        "%a, %d %b %Y %H:%M:%S %z",
92
        "%a, %d-%b-%Y %H:%M:%S %Z",
93
        "%a, %d-%b-%Y %H:%M:%S %z",
94
    ]:
95
        try:
1✔
96
            return dt.datetime.strptime(date_str, date_fmt)
1✔
97
        except ValueError:
1✔
98
            continue
1✔
99
        raise CheckerRemoteError(f"Cannot parse date/time: {date_str}")
100

101
    if not date_str:
1✔
UNCOV
102
        return dt.datetime.now()  # what else can we do?
×
103

104

105
def _check_newline(fp):
1✔
106
    original_position = fp.tell()
1✔
107
    fp.seek(0, os.SEEK_END)
1✔
108
    fp.seek(fp.tell() - 1, os.SEEK_SET)
1✔
109
    last_char = fp.read()
1✔
110
    fp.seek(original_position, os.SEEK_SET)
1✔
111
    if last_char == "\n":
1✔
112
        return True
1✔
113
    else:
114
        return False
1✔
115

116

117
def strip_query(url):
1✔
118
    """Sanitizes the query string from the given URL, if any. Parameters whose
119
    names begin with an underscore are assumed to be tracking identifiers and
120
    are removed."""
121
    parts = urllib.parse.urlparse(url)
1✔
122
    if not parts.query:
1✔
123
        return url
1✔
124
    qsl = urllib.parse.parse_qsl(parts.query)
1✔
125
    qsl_stripped = [(k, v) for (k, v) in qsl if not k.startswith("_")]
1✔
126
    query_stripped = urllib.parse.urlencode(qsl_stripped)
1✔
127
    stripped = urllib.parse.urlunparse(parts._replace(query=query_stripped))
1✔
128
    log.debug("Normalised %s to %s", url, stripped)
1✔
129
    return stripped
1✔
130

131

132
async def get_timestamp_from_url(url: str, session: aiohttp.ClientSession):
1✔
133
    async with session.head(url, allow_redirects=True) as response:
1✔
134
        return _extract_timestamp(response.headers)
1✔
135

136

137
async def get_extra_data_info_from_url(
1✔
138
    url: str,
139
    session: aiohttp.ClientSession,
140
    follow_redirects: bool = True,
141
    dest_io: t.Optional[t.IO] = None,
142
    content_type_deny: t.Optional[t.Iterable[re.Pattern]] = None,
143
):
144
    async with session.get(
1✔
145
        url,
146
        skip_auto_headers=[aiohttp.hdrs.ACCEPT_ENCODING],
147
    ) as response:
148
        real_url = str(response.url)
1✔
149
        info = response.headers
1✔
150

151
        def content_type_rejected(content_type: t.Optional[str]) -> bool:
1✔
152
            return (
1✔
153
                content_type is not None
154
                and content_type_deny is not None
155
                and any(r.match(content_type) for r in content_type_deny)
156
            )
157

158
        checksum = MultiHash()
1✔
159
        first_chunk = True
1✔
160
        size = 0
1✔
161
        async for chunk in response.content.iter_chunked(HTTP_CHUNK_SIZE):
1✔
162
            if first_chunk:
1✔
163
                first_chunk = False
1✔
164
                # determine content type from magic number since http header may be
165
                # wrong
166
                actual_content_type = magic.from_buffer(chunk, mime=True)
1✔
167
                if content_type_rejected(actual_content_type):
1✔
168
                    if real_url != url:
1✔
UNCOV
169
                        suffix = f" (redirected from '{url}')"
×
170
                    else:
171
                        suffix = ""
1✔
172
                    raise CheckerFetchError(
1✔
173
                        f"Wrong content type '{actual_content_type}' received "
174
                        f"from '{real_url}'{suffix}"
175
                    )
176

177
            checksum.update(chunk)
1✔
178
            size += len(chunk)
1✔
179
            if dest_io is not None:
1✔
180
                dest_io.write(chunk)
1✔
181

182
        if dest_io is not None:
1✔
183
            dest_io.flush()
1✔
184

185
    external_file = externaldata.ExternalFile(
1✔
186
        url=strip_query(real_url if follow_redirects else url),
187
        checksum=checksum.hexdigest(),
188
        size=size,
189
        version=None,
190
        timestamp=_extract_timestamp(info),
191
    )
192

193
    return external_file
1✔
194

195

196
class VersionComparisonError(CheckerQueryError):
1✔
197
    def __init__(self, left, right):
1✔
198
        self.left = left
1✔
199
        self.right = right
1✔
200
        super().__init__(f"Can't compare {self.left} and {self.right}")
1✔
201

202

203
class FallbackVersion(t.NamedTuple):
1✔
204
    s: str
1✔
205

206
    def __compare(self, oper, other) -> bool:
1✔
207
        try:
1✔
208
            return oper(StrictVersion(self.s), StrictVersion(other.s))
1✔
209
        except ValueError:
1✔
210
            try:
1✔
211
                return oper(LooseVersion(self.s), LooseVersion(other.s))
1✔
212
            except TypeError as err:
1✔
213
                raise VersionComparisonError(self.s, other.s) from err
1✔
214

215
    def __lt__(self, other):
1✔
216
        return self.__compare(operator.lt, other)
1✔
217

218
    def __le__(self, other):
1✔
219
        return self.__compare(operator.le, other)
1✔
220

221
    def __gt__(self, other):
1✔
222
        return self.__compare(operator.gt, other)
1✔
223

224
    def __ge__(self, other):
1✔
225
        return self.__compare(operator.ge, other)
1✔
226

227
    def __eq__(self, other):
1✔
228
        return self.__compare(operator.eq, other)
1✔
229

230
    def __ne__(self, other):
1✔
231
        return self.__compare(operator.ne, other)
1✔
232

233

234
class _SupportsComparison(t.Protocol):
1✔
235
    def __lt__(self, other: t.Any) -> bool: ...
1✔
236

237

238
_VersionedObj = t.TypeVar("_VersionedObj")
1✔
239
_ComparableObj = t.TypeVar("_ComparableObj", bound=_SupportsComparison)
1✔
240

241

242
def filter_versioned_items(
1✔
243
    items: t.Iterable[_VersionedObj],
244
    constraints: t.Iterable[t.Tuple[str, _ComparableObj]],
245
    to_version: t.Callable[[_VersionedObj], _ComparableObj],
246
    sort=False,
247
) -> t.List[_VersionedObj]:
248
    constraints = list(constraints)
1✔
249
    new_items = []
1✔
250
    for item in items:
1✔
251
        version = to_version(item)
1✔
252
        matches = []
1✔
253
        for oper_str, version_limit in constraints:
1✔
254
            oper = OPERATORS[oper_str]
1✔
255
            try:
1✔
256
                match = oper(version, version_limit)
1✔
257
            except VersionComparisonError as err:
1✔
258
                log.debug(err)
1✔
259
                match = False
1✔
260
            matches.append(match)
1✔
261
        if all(matches):
1✔
262
            new_items.append(item)
1✔
263

264
    if sort:
1✔
265
        return sorted(new_items, key=to_version)
1✔
266

267
    return new_items
1✔
268

269

270
def filter_versions(
1✔
271
    versions: t.Iterable[str],
272
    constraints: t.Iterable[t.Tuple[str, str]],
273
    sort=False,
274
) -> t.List[str]:
275
    return filter_versioned_items(
1✔
276
        versions,
277
        ((o, FallbackVersion(l)) for o, l in constraints),
278
        to_version=FallbackVersion,
279
        sort=sort,
280
    )
281

282

283
def wrap_in_bwrap(cmdline, bwrap_args=None):
1✔
284
    bwrap_cmd = ["bwrap", "--unshare-all", "--dev", "/dev"]
1✔
285
    for path in ("/usr", "/lib", "/lib64", "/bin", "/proc"):
1✔
286
        bwrap_cmd.extend(["--ro-bind", path, path])
1✔
287
    if bwrap_args is not None:
1✔
UNCOV
288
        bwrap_cmd.extend(bwrap_args)
×
289
    return bwrap_cmd + ["--"] + cmdline
1✔
290

291

292
def check_bwrap():
1✔
293
    try:
1✔
294
        subprocess.run(
1✔
295
            wrap_in_bwrap(["/bin/true"]),
296
            stdout=subprocess.PIPE,
297
            stderr=subprocess.STDOUT,
298
            text=True,
299
            check=True,
300
        )
301
    except FileNotFoundError as err:
1✔
UNCOV
302
        log.debug("bwrap unavailable: %s", err)
×
303
        return False
×
304
    except subprocess.CalledProcessError as err:
1✔
305
        log.debug("bwrap unavailable: %s", err.output.strip())
1✔
306
        return False
1✔
UNCOV
307
    return True
×
308

309

310
class Command:
1✔
311
    class SandboxPath(t.NamedTuple):
1✔
312
        path: str
1✔
313
        readonly: bool = False
1✔
314
        optional: bool = False
1✔
315

316
        @property
1✔
317
        def bwrap_args(self) -> t.List[str]:
1✔
UNCOV
318
            prefix = "ro-" if self.readonly else ""
×
319
            suffix = "-try" if self.optional else ""
×
320
            return [f"--{prefix}bind{suffix}", self.path, self.path]
×
321

322
    argv: t.List[str]
1✔
323
    cwd: str
1✔
324
    sandbox: bool
1✔
325

326
    def __init__(
1✔
327
        self,
328
        argv: t.List[str],
329
        cwd: t.Optional[str] = None,
330
        stdin: t.Optional[int] = subprocess.PIPE,
331
        stdout: t.Optional[int] = subprocess.PIPE,
332
        stderr: t.Optional[int] = None,
333
        timeout: t.Optional[float] = None,
334
        sandbox: t.Optional[bool] = None,
335
        allow_network: bool = False,
336
        allow_paths: t.Optional[t.List[t.Union[str, SandboxPath]]] = None,
337
    ):
338
        self.cwd = cwd or os.getcwd()
1✔
339
        self.stdin = stdin
1✔
340
        self.stdout = stdout
1✔
341
        self.stderr = stderr
1✔
342
        self.timeout = timeout
1✔
343
        # If sandbox not explicitly enabled or disabled, try to use it if available,
344
        # and proceed unsandboxed if sandbox is unavailable
345
        if sandbox is None:
1✔
346
            self.sandbox = check_bwrap()
1✔
347
        else:
UNCOV
348
            self.sandbox = sandbox
×
349
        if self.sandbox:
1✔
UNCOV
350
            bwrap_args = ["--die-with-parent"]
×
351
            if allow_network:
×
352
                bwrap_args.append("--share-net")
×
353
            if allow_paths:
×
354
                for path in allow_paths:
×
355
                    if isinstance(path, str):
×
356
                        mount = self.SandboxPath(path)
×
357
                    else:
UNCOV
358
                        mount = path
×
359
                    bwrap_args.extend(mount.bwrap_args)
×
360
            self.argv = wrap_in_bwrap(argv, bwrap_args)
×
361
        else:
362
            self.argv = argv
1✔
363
        self._orig_argv = argv
1✔
364

365
    async def run(self, input_data: t.Optional[bytes] = None) -> t.Tuple[bytes, bytes]:
1✔
366
        proc = await asyncio.create_subprocess_exec(
1✔
367
            *self.argv,
368
            cwd=self.cwd,
369
            stdin=self.stdin,
370
            stdout=self.stdout,
371
            stderr=self.stderr,
372
        )
373
        try:
1✔
374
            stdout, stderr = await asyncio.wait_for(
1✔
375
                proc.communicate(input=input_data), self.timeout
376
            )
377
        except asyncio.TimeoutError as err:
1✔
378
            try:
1✔
379
                proc.kill()
1✔
UNCOV
380
            except OSError as kill_err:
×
381
                log.warning("Failed to terminate timed out process: %s", kill_err)
×
382
            assert self.timeout is not None
1✔
383
            raise subprocess.TimeoutExpired(
1✔
384
                cmd=self.argv,
385
                timeout=self.timeout,
386
            ) from err
387
        if proc.returncode != 0:
1✔
388
            assert proc.returncode is not None
1✔
389
            raise subprocess.CalledProcessError(
1✔
390
                returncode=proc.returncode,
391
                cmd=self.argv,
392
                output=stdout,
393
                stderr=stderr,
394
            )
395
        return stdout, stderr
1✔
396

397
    def run_sync(self, input_data: t.Optional[bytes] = None) -> t.Tuple[bytes, bytes]:
1✔
398
        proc = subprocess.run(
1✔
399
            self.argv,
400
            cwd=self.cwd,
401
            input=input_data,
402
            stdout=self.stdout,
403
            stderr=self.stderr,
404
            timeout=self.timeout,
405
            check=False,
406
        )
UNCOV
407
        proc.check_returncode()
×
408
        return proc.stdout, proc.stderr
×
409

410
    def __str__(self):
1✔
411
        return " ".join(shlex.quote(a) for a in self._orig_argv)
1✔
412

413

414
async def git_ls_remote(url: str) -> t.Dict[str, str]:
1✔
415
    git_cmd = Command(
1✔
416
        ["git", "ls-remote", "--exit-code", url],
417
        timeout=TIMEOUT_CONNECT,
418
        allow_network=True,
419
        allow_paths=[
420
            Command.SandboxPath("/etc/ssl", True, True),
421
            Command.SandboxPath("/etc/pki", True, True),
422
            Command.SandboxPath("/etc/resolv.conf", True, False),
423
        ],
424
    )
425
    try:
1✔
426
        git_stdout_raw, _ = await git_cmd.run()
1✔
UNCOV
427
    except (subprocess.CalledProcessError, subprocess.TimeoutExpired) as err:
×
428
        raise CheckerQueryError("Listing Git remote failed") from err
×
429
    git_stdout = git_stdout_raw.decode()
1✔
430

431
    return {r: c for c, r in (line.split() for line in git_stdout.splitlines())}
1✔
432

433

434
async def extract_appimage_version(appimg_io: t.IO):
1✔
435
    """
436
    Saves 'data' to a temporary file with the given basename, executes it (in a sandbox)
437
    with --appimage-extract to unpack it, and scrapes the version number out of the
438
    first .desktop file it finds.
439
    """
440
    assert appimg_io.name
1✔
441

442
    with tempfile.TemporaryDirectory() as tmpdir:
1✔
443
        header = ELFFile(appimg_io).header
1✔
444
        offset = header["e_shoff"] + header["e_shnum"] * header["e_shentsize"]
1✔
445

446
        unsquashfs_cmd = Command(
1✔
447
            ["unsquashfs", "-no-progress", "-offset", str(offset), appimg_io.name],
448
            cwd=tmpdir,
449
            allow_paths=[tmpdir, appimg_io.name],
450
            stdout=None,
451
        )
452
        log.info("Running %s", unsquashfs_cmd)
1✔
453
        await unsquashfs_cmd.run()
1✔
454

455
        for desktop in (Path(tmpdir) / "squashfs-root").glob("*.desktop"):
1✔
456
            kf = GLib.KeyFile()
1✔
457
            kf.load_from_file(str(desktop), GLib.KeyFileFlags.NONE)
1✔
458
            return kf.get_string(GLib.KEY_FILE_DESKTOP_GROUP, "X-AppImage-Version")
1✔
459

460

461
def extract_deb_version(deb_io: t.IO):
1✔
462
    assert deb_io.name
1✔
463
    control = apt_inst.DebFile(deb_io.name).control.extractdata("control")
1✔
464
    return apt_pkg.TagSection(control).get("Version")
1✔
465

466

467
_GITHUB_URL_PATTERN = re.compile(
1✔
468
    r"""
469
        ^git@github.com:
470
        (?P<org_repo>[^/]+/[^/]+?)
471
        (?:\.git)?$
472
    """,
473
    re.VERBOSE,
474
)
475

476

477
def parse_github_url(url):
1✔
478
    """
479
    Parses the organization/repo part out of a git remote URL.
480
    """
481
    if url.startswith("https:"):
1✔
482
        o = urllib.parse.urlparse(url)
1✔
483
        return o.path[1:]
1✔
484

485
    m = _GITHUB_URL_PATTERN.match(url)
1✔
486
    if m:
1✔
487
        return m.group("org_repo")
1✔
488
    else:
UNCOV
489
        raise ValueError(f"{url!r} doesn't look like a Git URL")
×
490

491

492
def read_json_manifest(manifest_path: Path):
1✔
493
    """Read manifest from 'manifest_path', which may contain C-style
494
    comments or multi-line strings (accepted by json-glib and hence
495
    flatpak-builder, but not Python's json module)."""
496

497
    # Round-trip through json-glib to get rid of comments, multi-line
498
    # strings, and any other invalid JSON
499
    parser = Json.Parser()
1✔
500
    try:
1✔
501
        parser.load_from_file(str(manifest_path))
1✔
UNCOV
502
    except GLib.Error as err:
×
503
        if err.matches(GLib.file_error_quark(), GLib.FileError.NOENT):
×
504
            raise FileNotFoundError(err.message) from err  # pylint: disable=no-member
×
505
        raise
×
506
    root = parser.get_root()
1✔
507
    clean_manifest = Json.to_string(root, False)
1✔
508

509
    return json.loads(clean_manifest, object_pairs_hook=OrderedDict)
1✔
510

511

512
_yaml = YAML()
1✔
513
# ruamel preserves some formatting (such as comments and blank lines) but
514
# not the indentation of the source file. These settings match the style
515
# recommended at <https://github.com/flathub/flathub/wiki/YAML-Style-Guide>.
516
_yaml.indent(mapping=2, sequence=4, offset=2)
1✔
517

518

519
def read_yaml_manifest(manifest_path: Path):
1✔
520
    """Read a YAML manifest from 'manifest_path'."""
521
    with manifest_path.open("r") as f:
1✔
522
        has_yaml_header = (_ := f.read(3)) == "---"
1✔
523
        f.seek(0)
1✔
524
        data = _yaml.load(f)
1✔
525
    return data, has_yaml_header
1✔
526

527

528
def read_manifest(manifest_path: t.Union[Path, str]):
1✔
529
    """Reads a JSON or YAML manifest from 'manifest_path'."""
530
    manifest_path = Path(manifest_path)
1✔
531
    if manifest_path.suffix in (".yaml", ".yml"):
1✔
532
        return read_yaml_manifest(manifest_path)
1✔
533
    else:
534
        return read_json_manifest(manifest_path)
1✔
535

536

537
def dump_manifest(
1✔
538
    contents: t.Dict, manifest_path: t.Union[Path, str], has_yaml_header: bool = False
539
):
540
    """Writes back 'contents' to 'manifest_path'.
541

542
    For YAML, we make a best-effort attempt to preserve
543
    formatting; for JSON, we use the canonical 4-space indentation,
544
    but add a trailing newline if originally present."""
545
    manifest_path = Path(manifest_path)
1✔
546

547
    assert manifest_path.is_absolute()
1✔
548
    conf = editorconfig.get_properties(manifest_path)
1✔
549

550
    # Determine indentation preference
551
    indent: t.Union[str, int]
552
    if conf.get("indent_style") == "space":
1✔
553
        indent = int(conf.get("indent_size", 4))
1✔
554
    elif conf.get("indent_style") == "tab":
1✔
555
        indent = "\t"
1✔
556
    else:
557
        indent = 4
1✔
558

559
    # Determine max line length preference
560
    if max_line_length := conf.get("max_line_length"):
1✔
UNCOV
561
        try:
×
562
            # See https://sourceforge.net/p/ruamel-yaml/tickets/322/
UNCOV
563
            _yaml.width = int(max_line_length)  # type: ignore
×
564
        except ValueError:
×
565
            log.warning("Ignoring invalid max_line_length %r", max_line_length)
×
566

567
    # Determine trailing newline preference
568
    newline: t.Optional[bool]
569
    if "insert_final_newline" in conf:
1✔
570
        newline = {"true": True, "false": False}.get(conf["insert_final_newline"])
1✔
571
    else:
572
        with manifest_path.open("r") as fp:
1✔
573
            newline = _check_newline(fp)
1✔
574

575
    with manifest_path.open("w", encoding="utf-8") as fp:
1✔
576
        if manifest_path.suffix in (".yaml", ".yml"):
1✔
577
            _yaml.explicit_start = has_yaml_header  # type: ignore[assignment]
1✔
578
            _yaml.dump(contents, fp)
1✔
579
        else:
580
            json.dump(obj=contents, fp=fp, indent=indent)
1✔
581
            if newline:
1✔
582
                fp.write("\n")
1✔
583

584

585
def init_logging(level=logging.DEBUG):
1✔
586
    logging.basicConfig(level=level, format="%(levelname)-7s %(name)s: %(message)s")
1✔
587
    if level == logging.DEBUG:
1✔
588
        logging.getLogger("github.Requester").setLevel(logging.INFO)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc