• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

my8100 / logparser / eaf03ac4-f2fc-4471-812d-1be371503e62

01 Jan 2025 05:49AM UTC coverage: 87.404% (+6.2%) from 81.215%
eaf03ac4-f2fc-4471-812d-1be371503e62

push

circleci

my8100
Add SIGINT and SIGBREAK (Windows) to SIGTERM_PATTERN

To match Scrapy's code: https://github.com/scrapy/scrapy/blob/master/scrapy/utils/ossignal.py

1 of 1 new or added line in 1 file covered. (100.0%)

1 existing line in 1 file now uncovered.

798 of 913 relevant lines covered (87.4%)

4.37 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

80.0
/logparser/utils.py
1
# coding: utf-8
2
import json
5✔
3
import logging
5✔
4
import platform
5✔
5
import sys
5✔
6

7
try:
5✔
8
    from scrapy import __version__ as scrapy_version
5✔
9
except ImportError:
5✔
10
    scrapy_version = '0.0.0'
5✔
11
from six.moves.urllib.parse import urlencode
5✔
12
from six.moves.urllib.request import urlopen
5✔
13

14
from .__version__ import __version__
5✔
15
from .settings import (SCRAPYD_SERVER, SCRAPYD_LOGS_DIR, PARSE_ROUND_INTERVAL,
5✔
16
                       ENABLE_TELNET, OVERRIDE_TELNET_CONSOLE_HOST, LOG_ENCODING, LOG_EXTENSIONS,
17
                       LOG_HEAD_LINES, LOG_TAIL_LINES, LOG_CATEGORIES_LIMIT, JOBS_TO_KEEP, CHUNK_SIZE,
18
                       DELETE_EXISTING_JSON_FILES_AT_STARTUP, KEEP_DATA_IN_MEMORY, VERBOSE)
19

20

21
custom_settings = dict(
5✔
22
    scrapyd_server=SCRAPYD_SERVER,
23
    scrapyd_logs_dir=SCRAPYD_LOGS_DIR,
24
    parse_round_interval=PARSE_ROUND_INTERVAL,
25
    enable_telnet=ENABLE_TELNET,
26
    override_telnet_console_host=OVERRIDE_TELNET_CONSOLE_HOST,
27
    log_encoding=LOG_ENCODING,
28
    log_extensions=LOG_EXTENSIONS,
29
    log_head_lines=LOG_HEAD_LINES,
30
    log_tail_lines=LOG_TAIL_LINES,
31
    log_categories_limit=LOG_CATEGORIES_LIMIT,
32
    jobs_to_keep=JOBS_TO_KEEP,
33
    chunk_size=CHUNK_SIZE,
34
    delete_existing_json_files_at_startup=DELETE_EXISTING_JSON_FILES_AT_STARTUP,
35
    keep_data_in_memory=KEEP_DATA_IN_MEMORY,
36
    verbose=VERBOSE,
37
    # main_pid=0,
38
    # debug=False,
39
    # exit_timeout=0
40
)
41

42

43
def get_logger(name, level=logging.DEBUG):
5✔
44
    logger = logging.getLogger(name)
5✔
45
    handler = logging.StreamHandler()
5✔
46
    formatter = logging.Formatter(fmt="[%(asctime)s] %(levelname)-8s in %(name)s: %(message)s")
5✔
47
    handler.setFormatter(formatter)
5✔
48
    logger.addHandler(handler)
5✔
49
    logger.setLevel(level)
5✔
50
    return logger
5✔
51

52

53
def check_update(timeout=5):
5✔
54
    logger = get_logger(__name__)
5✔
55
    js = {}
5✔
56
    try:
5✔
57
        data = dict(custom_settings)
5✔
58
        data['os'] = platform.platform()
5✔
59
        data['py'] = '.'.join([str(n) for n in sys.version_info[:3]])
5✔
60
        data['logparser'] = __version__
5✔
61
        data['scrapy_version'] = scrapy_version
5✔
62
        if sys.version_info.major >= 3:
5✔
63
            data = urlencode(data).encode('utf-8', 'replace')
5✔
64
        else:
UNCOV
65
            data = urlencode(data)
×
66
        req = urlopen('https://my8100.herokuapp.com/check_update', data=data, timeout=timeout)
5✔
67
        text = req.read().decode('utf-8', 'replace')
×
68
        js = json.loads(text)
×
69
        # print(js)
70
    # except Exception as err:
71
        # print(err)
72
    except:
5✔
73
        pass
5✔
74
    else:
75
        if js.get('latest_version') == __version__:
×
76
            logger.info("Running the latest version: %s", __version__)
×
77
        else:
78
            if js.get('info', ''):
×
79
                logger.warning(js['info'])
×
80
            if js.get('force_update', ''):
×
81
                sys.exit("Please update and then restart logparser. ")
×
82
    return js  # For test only
5✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc