• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

spesmilo / electrum / 4911558783926272

11 Apr 2025 05:06PM UTC coverage: 60.306% (+0.03%) from 60.278%
4911558783926272

Pull #9729

CirrusCI

ecdsa
recursive config file

move plugin variables into sub dictionaries of user config
Pull Request #9729: recursive config file

26 of 35 new or added lines in 2 files covered. (74.29%)

92 existing lines in 6 files now uncovered.

21586 of 35794 relevant lines covered (60.31%)

3.01 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

61.69
/electrum/util.py
1
# Electrum - lightweight Bitcoin client
2
# Copyright (C) 2011 Thomas Voegtlin
3
#
4
# Permission is hereby granted, free of charge, to any person
5
# obtaining a copy of this software and associated documentation files
6
# (the "Software"), to deal in the Software without restriction,
7
# including without limitation the rights to use, copy, modify, merge,
8
# publish, distribute, sublicense, and/or sell copies of the Software,
9
# and to permit persons to whom the Software is furnished to do so,
10
# subject to the following conditions:
11
#
12
# The above copyright notice and this permission notice shall be
13
# included in all copies or substantial portions of the Software.
14
#
15
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
18
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
19
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
20
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
21
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22
# SOFTWARE.
23
import binascii
5✔
24
import concurrent.futures
5✔
25
import logging
5✔
26
import os, sys, re
5✔
27
from collections import defaultdict, OrderedDict
5✔
28
from concurrent.futures.process import ProcessPoolExecutor
5✔
29
from typing import (NamedTuple, Union, TYPE_CHECKING, Tuple, Optional, Callable, Any,
5✔
30
                    Sequence, Dict, Generic, TypeVar, List, Iterable, Set, Awaitable)
31
from datetime import datetime, timezone
5✔
32
import decimal
5✔
33
from decimal import Decimal
5✔
34
import urllib
5✔
35
import threading
5✔
36
import hmac
5✔
37
import hashlib
5✔
38
import stat
5✔
39
import locale
5✔
40
import asyncio
5✔
41
import urllib.request, urllib.parse, urllib.error
5✔
42
import builtins
5✔
43
import json
5✔
44
import time
5✔
45
import ssl
5✔
46
import ipaddress
5✔
47
from ipaddress import IPv4Address, IPv6Address
5✔
48
import random
5✔
49
import secrets
5✔
50
import functools
5✔
51
from functools import partial
5✔
52
from abc import abstractmethod, ABC
5✔
53
import socket
5✔
54
import enum
5✔
55
from contextlib import nullcontext
5✔
56
import traceback
5✔
57

58
import attr
5✔
59
import aiohttp
5✔
60
from aiohttp_socks import ProxyConnector, ProxyType
5✔
61
import aiorpcx
5✔
62
import certifi
5✔
63
import dns.resolver
5✔
64

65
from .i18n import _
5✔
66
from .logging import get_logger, Logger
5✔
67

68
if TYPE_CHECKING:
5✔
69
    from .network import Network, ProxySettings
×
70
    from .interface import Interface
×
71
    from .simple_config import SimpleConfig
×
72
    from .paymentrequest import PaymentRequest
×
73

74

75
_logger = get_logger(__name__)
5✔
76

77

78
def inv_dict(d):
5✔
79
    return {v: k for k, v in d.items()}
5✔
80

81

82
def all_subclasses(cls) -> Set:
5✔
83
    """Return all (transitive) subclasses of cls."""
84
    res = set(cls.__subclasses__())
5✔
85
    for sub in res.copy():
5✔
86
        res |= all_subclasses(sub)
5✔
87
    return res
5✔
88

89

90
ca_path = certifi.where()
5✔
91

92

93
base_units = {'BTC':8, 'mBTC':5, 'bits':2, 'sat':0}
5✔
94
base_units_inverse = inv_dict(base_units)
5✔
95
base_units_list = ['BTC', 'mBTC', 'bits', 'sat']  # list(dict) does not guarantee order
5✔
96

97
DECIMAL_POINT_DEFAULT = 5  # mBTC
5✔
98

99

100
class UnknownBaseUnit(Exception): pass
5✔
101

102

103
def decimal_point_to_base_unit_name(dp: int) -> str:
5✔
104
    # e.g. 8 -> "BTC"
105
    try:
5✔
106
        return base_units_inverse[dp]
5✔
107
    except KeyError:
×
108
        raise UnknownBaseUnit(dp) from None
×
109

110

111
def base_unit_name_to_decimal_point(unit_name: str) -> int:
5✔
112
    """Returns the max number of digits allowed after the decimal point."""
113
    # e.g. "BTC" -> 8
114
    try:
×
115
        return base_units[unit_name]
×
116
    except KeyError:
×
117
        raise UnknownBaseUnit(unit_name) from None
×
118

119
def parse_max_spend(amt: Any) -> Optional[int]:
5✔
120
    """Checks if given amount is "spend-max"-like.
121
    Returns None or the positive integer weight for "max". Never raises.
122

123
    When creating invoices and on-chain txs, the user can specify to send "max".
124
    This is done by setting the amount to '!'. Splitting max between multiple
125
    tx outputs is also possible, and custom weights (positive ints) can also be used.
126
    For example, to send 40% of all coins to address1, and 60% to address2:
127
    ```
128
    address1, 2!
129
    address2, 3!
130
    ```
131
    """
132
    if not (isinstance(amt, str) and amt and amt[-1] == '!'):
5✔
133
        return None
5✔
134
    if amt == '!':
5✔
135
        return 1
5✔
136
    x = amt[:-1]
5✔
137
    try:
5✔
138
        x = int(x)
5✔
139
    except ValueError:
×
140
        return None
×
141
    if x > 0:
5✔
142
        return x
5✔
143
    return None
×
144

145
class NotEnoughFunds(Exception):
5✔
146
    def __str__(self):
5✔
147
        return _("Insufficient funds")
5✔
148

149

150
class UneconomicFee(Exception):
5✔
151
    def __str__(self):
5✔
152
        return _("The fee for the transaction is higher than the funds gained from it.")
×
153

154

155
class NoDynamicFeeEstimates(Exception):
5✔
156
    def __str__(self):
5✔
157
        return _('Dynamic fee estimates not available')
×
158

159

160
class BelowDustLimit(Exception):
5✔
161
    pass
5✔
162

163

164
class InvalidPassword(Exception):
5✔
165
    def __init__(self, message: Optional[str] = None):
5✔
166
        self.message = message
5✔
167

168
    def __str__(self):
5✔
169
        if self.message is None:
×
170
            return _("Incorrect password")
×
171
        else:
172
            return str(self.message)
×
173

174

175
class AddTransactionException(Exception):
5✔
176
    pass
5✔
177

178

179
class UnrelatedTransactionException(AddTransactionException):
5✔
180
    def __str__(self):
5✔
181
        return _("Transaction is unrelated to this wallet.")
×
182

183

184
class FileImportFailed(Exception):
5✔
185
    def __init__(self, message=''):
5✔
186
        self.message = str(message)
×
187

188
    def __str__(self):
5✔
189
        return _("Failed to import from file.") + "\n" + self.message
×
190

191

192
class FileExportFailed(Exception):
5✔
193
    def __init__(self, message=''):
5✔
194
        self.message = str(message)
×
195

196
    def __str__(self):
5✔
197
        return _("Failed to export to file.") + "\n" + self.message
×
198

199

200
class WalletFileException(Exception):
5✔
201
    def __init__(self, message='', *, should_report_crash: bool = False):
5✔
202
        Exception.__init__(self, message)
5✔
203
        self.should_report_crash = should_report_crash
5✔
204

205

206
class BitcoinException(Exception): pass
5✔
207

208

209
class UserFacingException(Exception):
5✔
210
    """Exception that contains information intended to be shown to the user."""
211

212

213
class InvoiceError(UserFacingException): pass
5✔
214

215

216
class NetworkOfflineException(UserFacingException):
5✔
217
    """Can be raised if we are running in offline mode (--offline flag)
218
    and the user requests an operation that requires the network.
219
    """
220
    def __str__(self):
5✔
221
        return _("You are offline.")
×
222

223

224
# Throw this exception to unwind the stack like when an error occurs.
225
# However unlike other exceptions the user won't be informed.
226
class UserCancelled(Exception):
5✔
227
    '''An exception that is suppressed from the user'''
228
    pass
5✔
229

230

231
def to_decimal(x: Union[str, float, int, Decimal]) -> Decimal:
5✔
232
    # helper function mainly for float->Decimal conversion, i.e.:
233
    #   >>> Decimal(41754.681)
234
    #   Decimal('41754.680999999996856786310672760009765625')
235
    #   >>> Decimal("41754.681")
236
    #   Decimal('41754.681')
237
    if isinstance(x, Decimal):
5✔
238
        return x
×
239
    return Decimal(str(x))
5✔
240

241

242
# note: this is not a NamedTuple as then its json encoding cannot be customized
243
class Satoshis(object):
5✔
244
    __slots__ = ('value',)
5✔
245

246
    def __new__(cls, value):
5✔
247
        self = super(Satoshis, cls).__new__(cls)
×
248
        # note: 'value' sometimes has msat precision
249
        assert isinstance(value, (int, Decimal)), f"unexpected type for {value=!r}"
×
250
        self.value = value
×
251
        return self
×
252

253
    def __repr__(self):
5✔
254
        return f'Satoshis({self.value})'
×
255

256
    def __str__(self):
5✔
257
        # note: precision is truncated to satoshis here
258
        return format_satoshis(self.value)
×
259

260
    def __eq__(self, other):
5✔
261
        return self.value == other.value
×
262

263
    def __ne__(self, other):
5✔
264
        return not (self == other)
×
265

266
    def __add__(self, other):
5✔
267
        return Satoshis(self.value + other.value)
×
268

269

270
# note: this is not a NamedTuple as then its json encoding cannot be customized
271
class Fiat(object):
5✔
272
    __slots__ = ('value', 'ccy')
5✔
273

274
    def __new__(cls, value: Optional[Decimal], ccy: str):
5✔
275
        self = super(Fiat, cls).__new__(cls)
×
276
        self.ccy = ccy
×
277
        if not isinstance(value, (Decimal, type(None))):
×
278
            raise TypeError(f"value should be Decimal or None, not {type(value)}")
×
279
        self.value = value
×
280
        return self
×
281

282
    def __repr__(self):
5✔
283
        return 'Fiat(%s)'% self.__str__()
×
284

285
    def __str__(self):
5✔
286
        if self.value is None or self.value.is_nan():
×
287
            return _('No Data')
×
288
        else:
289
            return "{:.2f}".format(self.value)
×
290

291
    def to_ui_string(self):
5✔
292
        if self.value is None or self.value.is_nan():
×
293
            return _('No Data')
×
294
        else:
295
            return "{:.2f}".format(self.value) + ' ' + self.ccy
×
296

297
    def __eq__(self, other):
5✔
298
        if not isinstance(other, Fiat):
×
299
            return False
×
300
        if self.ccy != other.ccy:
×
301
            return False
×
302
        if isinstance(self.value, Decimal) and isinstance(other.value, Decimal) \
×
303
                and self.value.is_nan() and other.value.is_nan():
304
            return True
×
305
        return self.value == other.value
×
306

307
    def __ne__(self, other):
5✔
308
        return not (self == other)
×
309

310
    def __add__(self, other):
5✔
311
        assert self.ccy == other.ccy
×
312
        return Fiat(self.value + other.value, self.ccy)
×
313

314

315
class MyEncoder(json.JSONEncoder):
5✔
316
    def default(self, obj):
5✔
317
        # note: this does not get called for namedtuples :(  https://bugs.python.org/issue30343
318
        from .transaction import Transaction, TxOutput
5✔
319
        if isinstance(obj, Transaction):
5✔
320
            return obj.serialize()
5✔
321
        if isinstance(obj, TxOutput):
5✔
322
            return obj.to_legacy_tuple()
5✔
323
        if isinstance(obj, Satoshis):
5✔
324
            return str(obj)
×
325
        if isinstance(obj, Fiat):
5✔
326
            return str(obj)
×
327
        if isinstance(obj, Decimal):
5✔
328
            return str(obj)
×
329
        if isinstance(obj, datetime):
5✔
330
            return obj.isoformat(' ')[:-3]
×
331
        if isinstance(obj, set):
5✔
332
            return list(obj)
×
333
        if isinstance(obj, bytes): # for nametuples in lnchannel
5✔
334
            return obj.hex()
5✔
335
        if hasattr(obj, 'to_json') and callable(obj.to_json):
5✔
336
            return obj.to_json()
5✔
337
        return super(MyEncoder, self).default(obj)
×
338

339

340
class ThreadJob(Logger):
5✔
341
    """A job that is run periodically from a thread's main loop.  run() is
342
    called from that thread's context.
343
    """
344

345
    def __init__(self):
5✔
346
        Logger.__init__(self)
5✔
347

348
    def run(self):
5✔
349
        """Called periodically from the thread"""
350
        pass
×
351

352
class DebugMem(ThreadJob):
5✔
353
    '''A handy class for debugging GC memory leaks'''
354
    def __init__(self, classes, interval=30):
5✔
355
        ThreadJob.__init__(self)
×
356
        self.next_time = 0
×
357
        self.classes = classes
×
358
        self.interval = interval
×
359

360
    def mem_stats(self):
5✔
361
        import gc
×
362
        self.logger.info("Start memscan")
×
363
        gc.collect()
×
364
        objmap = defaultdict(list)
×
365
        for obj in gc.get_objects():
×
366
            for class_ in self.classes:
×
367
                if isinstance(obj, class_):
×
368
                    objmap[class_].append(obj)
×
369
        for class_, objs in objmap.items():
×
370
            self.logger.info(f"{class_.__name__}: {len(objs)}")
×
371
        self.logger.info("Finish memscan")
×
372

373
    def run(self):
5✔
374
        if time.time() > self.next_time:
×
375
            self.mem_stats()
×
376
            self.next_time = time.time() + self.interval
×
377

378
class DaemonThread(threading.Thread, Logger):
5✔
379
    """ daemon thread that terminates cleanly """
380

381
    LOGGING_SHORTCUT = 'd'
5✔
382

383
    def __init__(self):
5✔
384
        threading.Thread.__init__(self)
5✔
385
        Logger.__init__(self)
5✔
386
        self.parent_thread = threading.current_thread()
5✔
387
        self.running = False
5✔
388
        self.running_lock = threading.Lock()
5✔
389
        self.job_lock = threading.Lock()
5✔
390
        self.jobs = []
5✔
391
        self.stopped_event = threading.Event()        # set when fully stopped
5✔
392
        self.stopped_event_async = asyncio.Event()    # set when fully stopped
5✔
393
        self.wake_up_event = threading.Event()  # for perf optimisation of polling in run()
5✔
394

395
    def add_jobs(self, jobs):
5✔
396
        with self.job_lock:
5✔
397
            self.jobs.extend(jobs)
5✔
398

399
    def run_jobs(self):
5✔
400
        # Don't let a throwing job disrupt the thread, future runs of
401
        # itself, or other jobs.  This is useful protection against
402
        # malformed or malicious server responses
403
        with self.job_lock:
5✔
404
            for job in self.jobs:
5✔
405
                try:
5✔
406
                    job.run()
5✔
407
                except Exception as e:
×
408
                    self.logger.exception('')
×
409

410
    def remove_jobs(self, jobs):
5✔
411
        with self.job_lock:
×
412
            for job in jobs:
×
413
                self.jobs.remove(job)
×
414

415
    def start(self):
5✔
416
        with self.running_lock:
5✔
417
            self.running = True
5✔
418
        return threading.Thread.start(self)
5✔
419

420
    def is_running(self):
5✔
421
        with self.running_lock:
5✔
422
            return self.running and self.parent_thread.is_alive()
5✔
423

424
    def stop(self):
5✔
425
        with self.running_lock:
5✔
426
            self.running = False
5✔
427
            self.wake_up_event.set()
5✔
428
            self.wake_up_event.clear()
5✔
429

430
    def on_stop(self):
5✔
431
        if 'ANDROID_DATA' in os.environ:
5✔
432
            import jnius
×
433
            jnius.detach()
×
434
            self.logger.info("jnius detach")
×
435
        self.logger.info("stopped")
5✔
436
        self.stopped_event.set()
5✔
437
        loop = get_asyncio_loop()
5✔
438
        loop.call_soon_threadsafe(self.stopped_event_async.set)
5✔
439

440

441
def print_stderr(*args):
5✔
442
    args = [str(item) for item in args]
×
443
    sys.stderr.write(" ".join(args) + "\n")
×
444
    sys.stderr.flush()
×
445

446
def print_msg(*args):
5✔
447
    # Stringify args
448
    args = [str(item) for item in args]
×
449
    sys.stdout.write(" ".join(args) + "\n")
×
450
    sys.stdout.flush()
×
451

452
def json_encode(obj):
5✔
453
    try:
×
454
        s = json.dumps(obj, sort_keys = True, indent = 4, cls=MyEncoder)
×
455
    except TypeError:
×
456
        s = repr(obj)
×
457
    return s
×
458

459
def json_decode(x):
5✔
460
    try:
5✔
461
        return json.loads(x, parse_float=Decimal)
5✔
462
    except Exception:
5✔
463
        return x
5✔
464

465
def json_normalize(x):
5✔
466
    # note: The return value of commands, when going through the JSON-RPC interface,
467
    #       is json-encoded. The encoder used there cannot handle some types, e.g. electrum.util.Satoshis.
468
    # note: We should not simply do "json_encode(x)" here, as then later x would get doubly json-encoded.
469
    # see #5868
470
    return json_decode(json_encode(x))
×
471

472

473
# taken from Django Source Code
474
def constant_time_compare(val1, val2):
5✔
475
    """Return True if the two strings are equal, False otherwise."""
476
    return hmac.compare_digest(to_bytes(val1, 'utf8'), to_bytes(val2, 'utf8'))
×
477

478

479
_profiler_logger = _logger.getChild('profiler')
5✔
480
def profiler(func=None, *, min_threshold: Union[int, float, None] = None):
5✔
481
    """Function decorator that logs execution time.
482

483
    min_threshold: if set, only log if time taken is higher than threshold
484
    NOTE: does not work with async methods.
485
    """
486
    if func is None:  # to make "@profiler(...)" work. (in addition to bare "@profiler")
5✔
487
        return partial(profiler, min_threshold=min_threshold)
5✔
488
    def do_profile(*args, **kw_args):
5✔
489
        name = func.__qualname__
5✔
490
        t0 = time.time()
5✔
491
        o = func(*args, **kw_args)
5✔
492
        t = time.time() - t0
5✔
493
        if min_threshold is None or t > min_threshold:
5✔
494
            _profiler_logger.debug(f"{name} {t:,.4f} sec")
5✔
495
        return o
5✔
496
    return do_profile
5✔
497

498

499
class AsyncHangDetector:
5✔
500
    """Context manager that logs every `n` seconds if encapsulated context still has not exited."""
501

502
    def __init__(
5✔
503
        self,
504
        *,
505
        period_sec: int = 15,
506
        message: str,
507
        logger: logging.Logger = None,
508
    ):
509
        self.period_sec = period_sec
5✔
510
        self.message = message
5✔
511
        self.logger = logger or _logger
5✔
512

513
    async def _monitor(self):
5✔
514
        # note: this assumes that the event loop itself is not blocked
515
        t0 = time.monotonic()
5✔
516
        while True:
5✔
517
            await asyncio.sleep(self.period_sec)
5✔
518
            t1 = time.monotonic()
×
519
            self.logger.info(f"{self.message} (after {t1 - t0:.2f} sec)")
×
520

521
    async def __aenter__(self):
5✔
522
        self.mtask = asyncio.create_task(self._monitor())
5✔
523

524
    async def __aexit__(self, exc_type, exc, tb):
5✔
525
        self.mtask.cancel()
5✔
526

527

528
def android_ext_dir():
5✔
529
    from android.storage import primary_external_storage_path
×
530
    return primary_external_storage_path()
×
531

532
def android_backup_dir():
5✔
533
    pkgname = get_android_package_name()
×
534
    d = os.path.join(android_ext_dir(), pkgname)
×
535
    if not os.path.exists(d):
×
536
        os.mkdir(d)
×
537
    return d
×
538

539
def android_data_dir():
5✔
540
    import jnius
×
541
    PythonActivity = jnius.autoclass('org.kivy.android.PythonActivity')
×
542
    return PythonActivity.mActivity.getFilesDir().getPath() + '/data'
×
543

544
def ensure_sparse_file(filename):
5✔
545
    # On modern Linux, no need to do anything.
546
    # On Windows, need to explicitly mark file.
547
    if os.name == "nt":
×
548
        try:
×
549
            os.system('fsutil sparse setflag "{}" 1'.format(filename))
×
550
        except Exception as e:
×
551
            _logger.info(f'error marking file {filename} as sparse: {e}')
×
552

553

554
def get_headers_dir(config):
5✔
555
    return config.path
5✔
556

557

558
def assert_datadir_available(config_path):
5✔
559
    path = config_path
5✔
560
    if os.path.exists(path):
5✔
561
        return
5✔
562
    else:
563
        raise FileNotFoundError(
×
564
            'Electrum datadir does not exist. Was it deleted while running?' + '\n' +
565
            'Should be at {}'.format(path))
566

567

568
def assert_file_in_datadir_available(path, config_path):
5✔
569
    if os.path.exists(path):
×
570
        return
×
571
    else:
572
        assert_datadir_available(config_path)
×
573
        raise FileNotFoundError(
×
574
            'Cannot find file but datadir is there.' + '\n' +
575
            'Should be at {}'.format(path))
576

577

578
def standardize_path(path):
5✔
579
    # note: os.path.realpath() is not used, as on Windows it can return non-working paths (see #8495).
580
    #       This means that we don't resolve symlinks!
581
    return os.path.normcase(
5✔
582
                os.path.abspath(
583
                    os.path.expanduser(
584
                        path
585
    )))
586

587

588
def get_new_wallet_name(wallet_folder: str) -> str:
5✔
589
    """Returns a file basename for a new wallet to be used.
590
    Can raise OSError.
591
    """
592
    i = 1
5✔
593
    while True:
5✔
594
        filename = "wallet_%d" % i
5✔
595
        if filename in os.listdir(wallet_folder):
5✔
596
            i += 1
5✔
597
        else:
598
            break
5✔
599
    return filename
5✔
600

601

602
def is_android_debug_apk() -> bool:
5✔
603
    is_android = 'ANDROID_DATA' in os.environ
×
604
    if not is_android:
×
605
        return False
×
606
    from jnius import autoclass
×
607
    pkgname = get_android_package_name()
×
608
    build_config = autoclass(f"{pkgname}.BuildConfig")
×
609
    return bool(build_config.DEBUG)
×
610

611

612
def get_android_package_name() -> str:
5✔
613
    is_android = 'ANDROID_DATA' in os.environ
×
614
    assert is_android
×
615
    from jnius import autoclass
×
616
    from android.config import ACTIVITY_CLASS_NAME
×
617
    activity = autoclass(ACTIVITY_CLASS_NAME).mActivity
×
618
    pkgname = str(activity.getPackageName())
×
619
    return pkgname
×
620

621

622
def assert_bytes(*args):
5✔
623
    """
624
    porting helper, assert args type
625
    """
626
    try:
5✔
627
        for x in args:
5✔
628
            assert isinstance(x, (bytes, bytearray))
5✔
629
    except Exception:
×
630
        print('assert bytes failed', list(map(type, args)))
×
631
        raise
×
632

633

634
def assert_str(*args):
5✔
635
    """
636
    porting helper, assert args type
637
    """
638
    for x in args:
×
639
        assert isinstance(x, str)
×
640

641

642
def to_string(x, enc) -> str:
5✔
643
    if isinstance(x, (bytes, bytearray)):
5✔
644
        return x.decode(enc)
5✔
645
    if isinstance(x, str):
×
646
        return x
×
647
    else:
648
        raise TypeError("Not a string or bytes like object")
×
649

650

651
def to_bytes(something, encoding='utf8') -> bytes:
5✔
652
    """
653
    cast string to bytes() like object, but for python2 support it's bytearray copy
654
    """
655
    if isinstance(something, bytes):
5✔
656
        return something
5✔
657
    if isinstance(something, str):
5✔
658
        return something.encode(encoding)
5✔
659
    elif isinstance(something, bytearray):
5✔
660
        return bytes(something)
5✔
661
    else:
662
        raise TypeError("Not a string or bytes like object")
5✔
663

664

665
bfh = bytes.fromhex
5✔
666

667

668
def xor_bytes(a: bytes, b: bytes) -> bytes:
5✔
669
    size = min(len(a), len(b))
5✔
670
    return ((int.from_bytes(a[:size], "big") ^ int.from_bytes(b[:size], "big"))
5✔
671
            .to_bytes(size, "big"))
672

673

674
def user_dir():
5✔
675
    if "ELECTRUMDIR" in os.environ:
5✔
676
        return os.environ["ELECTRUMDIR"]
×
677
    elif 'ANDROID_DATA' in os.environ:
5✔
678
        return android_data_dir()
×
679
    elif os.name == 'posix':
5✔
680
        return os.path.join(os.environ["HOME"], ".electrum")
5✔
681
    elif "APPDATA" in os.environ:
×
682
        return os.path.join(os.environ["APPDATA"], "Electrum")
×
683
    elif "LOCALAPPDATA" in os.environ:
×
684
        return os.path.join(os.environ["LOCALAPPDATA"], "Electrum")
×
685
    else:
686
        #raise Exception("No home directory found in environment variables.")
687
        return
×
688

689

690
def resource_path(*parts):
5✔
691
    return os.path.join(pkg_dir, *parts)
5✔
692

693

694
# absolute path to python package folder of electrum ("lib")
695
pkg_dir = os.path.split(os.path.realpath(__file__))[0]
5✔
696

697

698
def is_valid_email(s):
5✔
699
    regexp = r"[^@]+@[^@]+\.[^@]+"
×
700
    return re.match(regexp, s) is not None
×
701

702

703
def is_hash256_str(text: Any) -> bool:
5✔
704
    if not isinstance(text, str): return False
5✔
705
    if len(text) != 64: return False
5✔
706
    return is_hex_str(text)
5✔
707

708

709
def is_hex_str(text: Any) -> bool:
5✔
710
    if not isinstance(text, str): return False
5✔
711
    try:
5✔
712
        b = bytes.fromhex(text)
5✔
713
    except Exception:
5✔
714
        return False
5✔
715
    # forbid whitespaces in text:
716
    if len(text) != 2 * len(b):
5✔
717
        return False
5✔
718
    return True
5✔
719

720

721
def is_integer(val: Any) -> bool:
5✔
722
    return isinstance(val, int)
5✔
723

724

725
def is_non_negative_integer(val: Any) -> bool:
5✔
726
    if is_integer(val):
5✔
727
        return val >= 0
5✔
728
    return False
5✔
729

730

731
def is_int_or_float(val: Any) -> bool:
5✔
732
    return isinstance(val, (int, float))
5✔
733

734

735
def is_non_negative_int_or_float(val: Any) -> bool:
5✔
736
    if is_int_or_float(val):
5✔
737
        return val >= 0
5✔
738
    return False
5✔
739

740

741
def chunks(items, size: int):
5✔
742
    """Break up items, an iterable, into chunks of length size."""
743
    if size < 1:
5✔
744
        raise ValueError(f"size must be positive, not {repr(size)}")
5✔
745
    for i in range(0, len(items), size):
5✔
746
        yield items[i: i + size]
5✔
747

748

749
def format_satoshis_plain(
5✔
750
        x: Union[int, float, Decimal, str],  # amount in satoshis,
751
        *,
752
        decimal_point: int = 8,  # how much to shift decimal point to left (default: sat->BTC)
753
) -> str:
754
    """Display a satoshi amount scaled.  Always uses a '.' as a decimal
755
    point and has no thousands separator"""
756
    if parse_max_spend(x):
5✔
757
        return f'max({x})'
×
758
    assert isinstance(x, (int, float, Decimal)), f"{x!r} should be a number"
5✔
759
    scale_factor = pow(10, decimal_point)
5✔
760
    return "{:.8f}".format(Decimal(x) / scale_factor).rstrip('0').rstrip('.')
5✔
761

762

763
# Check that Decimal precision is sufficient.
764
# We need at the very least ~20, as we deal with msat amounts, and
765
# log10(21_000_000 * 10**8 * 1000) ~= 18.3
766
# decimal.DefaultContext.prec == 28 by default, but it is mutable.
767
# We enforce that we have at least that available.
768
assert decimal.getcontext().prec >= 28, f"PyDecimal precision too low: {decimal.getcontext().prec}"
5✔
769

770
# DECIMAL_POINT = locale.localeconv()['decimal_point']  # type: str
771
DECIMAL_POINT = "."
5✔
772
THOUSANDS_SEP = " "
5✔
773
assert len(DECIMAL_POINT) == 1, f"DECIMAL_POINT has unexpected len. {DECIMAL_POINT!r}"
5✔
774
assert len(THOUSANDS_SEP) == 1, f"THOUSANDS_SEP has unexpected len. {THOUSANDS_SEP!r}"
5✔
775

776

777
def format_satoshis(
5✔
778
        x: Union[int, float, Decimal, str, None],  # amount in satoshis
779
        *,
780
        num_zeros: int = 0,
781
        decimal_point: int = 8,  # how much to shift decimal point to left (default: sat->BTC)
782
        precision: int = 0,  # extra digits after satoshi precision
783
        is_diff: bool = False,  # if True, enforce a leading sign (+/-)
784
        whitespaces: bool = False,  # if True, add whitespaces, to align numbers in a column
785
        add_thousands_sep: bool = False,  # if True, add whitespaces, for better readability of the numbers
786
) -> str:
787
    if x is None:
5✔
788
        return 'unknown'
×
789
    if parse_max_spend(x):
5✔
790
        return f'max({x})'
×
791
    assert isinstance(x, (int, float, Decimal)), f"{x!r} should be a number"
5✔
792
    # lose redundant precision
793
    x = Decimal(x).quantize(Decimal(10) ** (-precision))
5✔
794
    # format string
795
    overall_precision = decimal_point + precision  # max digits after final decimal point
5✔
796
    decimal_format = "." + str(overall_precision) if overall_precision > 0 else ""
5✔
797
    if is_diff:
5✔
798
        decimal_format = '+' + decimal_format
5✔
799
    # initial result
800
    scale_factor = pow(10, decimal_point)
5✔
801
    result = ("{:" + decimal_format + "f}").format(x / scale_factor)
5✔
802
    if "." not in result: result += "."
5✔
803
    result = result.rstrip('0')
5✔
804
    # add extra decimal places (zeros)
805
    integer_part, fract_part = result.split(".")
5✔
806
    if len(fract_part) < num_zeros:
5✔
807
        fract_part += "0" * (num_zeros - len(fract_part))
5✔
808
    # add whitespaces as thousands' separator for better readability of numbers
809
    if add_thousands_sep:
5✔
810
        sign = integer_part[0] if integer_part[0] in ("+", "-") else ""
5✔
811
        if sign == "-":
5✔
812
            integer_part = integer_part[1:]
5✔
813
        integer_part = "{:,}".format(int(integer_part)).replace(',', THOUSANDS_SEP)
5✔
814
        integer_part = sign + integer_part
5✔
815
        fract_part = THOUSANDS_SEP.join(fract_part[i:i+3] for i in range(0, len(fract_part), 3))
5✔
816
    result = integer_part + DECIMAL_POINT + fract_part
5✔
817
    # add leading/trailing whitespaces so that numbers can be aligned in a column
818
    if whitespaces:
5✔
819
        target_fract_len = overall_precision
5✔
820
        target_integer_len = 14 - decimal_point  # should be enough for up to unsigned 999999 BTC
5✔
821
        if add_thousands_sep:
5✔
822
            target_fract_len += max(0, (target_fract_len - 1) // 3)
5✔
823
            target_integer_len += max(0, (target_integer_len - 1) // 3)
5✔
824
        # add trailing whitespaces
825
        result += " " * (target_fract_len - len(fract_part))
5✔
826
        # add leading whitespaces
827
        target_total_len = target_integer_len + 1 + target_fract_len
5✔
828
        result = " " * (target_total_len - len(result)) + result
5✔
829
    return result
5✔
830

831

832
FEERATE_PRECISION = 1  # num fractional decimal places for sat/byte fee rates
5✔
833
_feerate_quanta = Decimal(10) ** (-FEERATE_PRECISION)
5✔
834
UI_UNIT_NAME_FEERATE_SAT_PER_VBYTE = "sat/vbyte"
5✔
835
UI_UNIT_NAME_FEERATE_SAT_PER_VB = "sat/vB"
5✔
836
UI_UNIT_NAME_TXSIZE_VBYTES = "vbytes"
5✔
837
UI_UNIT_NAME_MEMPOOL_MB = "vMB"
5✔
838

839

840
def format_fee_satoshis(fee, *, num_zeros=0, precision=None):
5✔
841
    if precision is None:
5✔
842
        precision = FEERATE_PRECISION
5✔
843
    num_zeros = min(num_zeros, FEERATE_PRECISION)  # no more zeroes than available prec
5✔
844
    return format_satoshis(fee, num_zeros=num_zeros, decimal_point=0, precision=precision)
5✔
845

846

847
def quantize_feerate(fee) -> Union[None, Decimal, int]:
5✔
848
    """Strip sat/byte fee rate of excess precision."""
849
    if fee is None:
5✔
850
        return None
×
851
    return Decimal(fee).quantize(_feerate_quanta, rounding=decimal.ROUND_HALF_DOWN)
5✔
852

853

854
def timestamp_to_datetime(timestamp: Union[int, float, None], *, utc: bool = False) -> Optional[datetime]:
5✔
855
    if timestamp is None:
5✔
856
        return None
×
857
    tz = None
5✔
858
    if utc:
5✔
859
        tz = timezone.utc
×
860
    return datetime.fromtimestamp(timestamp, tz=tz)
5✔
861

862

863
def format_time(timestamp: Union[int, float, None]) -> str:
5✔
864
    date = timestamp_to_datetime(timestamp)
×
865
    return date.isoformat(' ', timespec="minutes") if date else _("Unknown")
×
866

867

868
def age(
5✔
869
    from_date: Union[int, float, None],  # POSIX timestamp
870
    *,
871
    since_date: datetime = None,
872
    target_tz=None,
873
    include_seconds: bool = False,
874
) -> str:
875
    """Takes a timestamp and returns a string with the approximation of the age"""
876
    if from_date is None:
5✔
877
        return _("Unknown")
5✔
878

879
    from_date = datetime.fromtimestamp(from_date)
5✔
880
    if since_date is None:
5✔
881
        since_date = datetime.now(target_tz)
×
882

883
    distance_in_time = from_date - since_date
5✔
884
    is_in_past = from_date < since_date
5✔
885
    distance_in_seconds = int(round(abs(distance_in_time.days * 86400 + distance_in_time.seconds)))
5✔
886
    distance_in_minutes = int(round(distance_in_seconds / 60))
5✔
887

888
    if distance_in_minutes == 0:
5✔
889
        if include_seconds:
5✔
890
            if is_in_past:
5✔
891
                return _("{} seconds ago").format(distance_in_seconds)
5✔
892
            else:
893
                return _("in {} seconds").format(distance_in_seconds)
5✔
894
        else:
895
            if is_in_past:
5✔
896
                return _("less than a minute ago")
5✔
897
            else:
898
                return _("in less than a minute")
5✔
899
    elif distance_in_minutes < 45:
5✔
900
        if is_in_past:
5✔
901
            return _("about {} minutes ago").format(distance_in_minutes)
5✔
902
        else:
903
            return _("in about {} minutes").format(distance_in_minutes)
5✔
904
    elif distance_in_minutes < 90:
5✔
905
        if is_in_past:
5✔
906
            return _("about 1 hour ago")
5✔
907
        else:
908
            return _("in about 1 hour")
5✔
909
    elif distance_in_minutes < 1440:
5✔
910
        if is_in_past:
5✔
911
            return _("about {} hours ago").format(round(distance_in_minutes / 60.0))
5✔
912
        else:
913
            return _("in about {} hours").format(round(distance_in_minutes / 60.0))
5✔
914
    elif distance_in_minutes < 2880:
5✔
915
        if is_in_past:
5✔
916
            return _("about 1 day ago")
5✔
917
        else:
918
            return _("in about 1 day")
5✔
919
    elif distance_in_minutes < 43220:
5✔
920
        if is_in_past:
5✔
921
            return _("about {} days ago").format(round(distance_in_minutes / 1440))
5✔
922
        else:
923
            return _("in about {} days").format(round(distance_in_minutes / 1440))
5✔
924
    elif distance_in_minutes < 86400:
5✔
925
        if is_in_past:
5✔
926
            return _("about 1 month ago")
5✔
927
        else:
928
            return _("in about 1 month")
5✔
929
    elif distance_in_minutes < 525600:
5✔
930
        if is_in_past:
5✔
931
            return _("about {} months ago").format(round(distance_in_minutes / 43200))
5✔
932
        else:
933
            return _("in about {} months").format(round(distance_in_minutes / 43200))
5✔
934
    elif distance_in_minutes < 1051200:
5✔
935
        if is_in_past:
5✔
936
            return _("about 1 year ago")
5✔
937
        else:
938
            return _("in about 1 year")
5✔
939
    else:
940
        if is_in_past:
5✔
941
            return _("over {} years ago").format(round(distance_in_minutes / 525600))
5✔
942
        else:
943
            return _("in over {} years").format(round(distance_in_minutes / 525600))
5✔
944

945
mainnet_block_explorers = {
5✔
946
    '3xpl.com': ('https://3xpl.com/bitcoin/',
947
                        {'tx': 'transaction/', 'addr': 'address/'}),
948
    'Bitflyer.jp': ('https://chainflyer.bitflyer.jp/',
949
                        {'tx': 'Transaction/', 'addr': 'Address/'}),
950
    'Blockchain.info': ('https://blockchain.com/btc/',
951
                        {'tx': 'tx/', 'addr': 'address/'}),
952
    'Blockstream.info': ('https://blockstream.info/',
953
                        {'tx': 'tx/', 'addr': 'address/'}),
954
    'Bitaps.com': ('https://btc.bitaps.com/',
955
                        {'tx': '', 'addr': ''}),
956
    'BTC.com': ('https://btc.com/',
957
                        {'tx': '', 'addr': ''}),
958
    'Chain.so': ('https://www.chain.so/',
959
                        {'tx': 'tx/BTC/', 'addr': 'address/BTC/'}),
960
    'Insight.is': ('https://insight.bitpay.com/',
961
                        {'tx': 'tx/', 'addr': 'address/'}),
962
    'BlockCypher.com': ('https://live.blockcypher.com/btc/',
963
                        {'tx': 'tx/', 'addr': 'address/'}),
964
    'Blockchair.com': ('https://blockchair.com/bitcoin/',
965
                        {'tx': 'transaction/', 'addr': 'address/'}),
966
    'blockonomics.co': ('https://www.blockonomics.co/',
967
                        {'tx': 'api/tx?txid=', 'addr': '#/search?q='}),
968
    'mempool.space': ('https://mempool.space/',
969
                        {'tx': 'tx/', 'addr': 'address/'}),
970
    'mempool.emzy.de': ('https://mempool.emzy.de/',
971
                        {'tx': 'tx/', 'addr': 'address/'}),
972
    'OXT.me': ('https://oxt.me/',
973
                        {'tx': 'transaction/', 'addr': 'address/'}),
974
    'mynode.local': ('http://mynode.local:3002/',
975
                        {'tx': 'tx/', 'addr': 'address/'}),
976
    'system default': ('blockchain:/',
977
                        {'tx': 'tx/', 'addr': 'address/'}),
978
}
979

980
testnet_block_explorers = {
5✔
981
    'Bitaps.com': ('https://tbtc.bitaps.com/',
982
                       {'tx': '', 'addr': ''}),
983
    'BlockCypher.com': ('https://live.blockcypher.com/btc-testnet/',
984
                       {'tx': 'tx/', 'addr': 'address/'}),
985
    'Blockchain.info': ('https://www.blockchain.com/btc-testnet/',
986
                       {'tx': 'tx/', 'addr': 'address/'}),
987
    'Blockstream.info': ('https://blockstream.info/testnet/',
988
                        {'tx': 'tx/', 'addr': 'address/'}),
989
    'mempool.space': ('https://mempool.space/testnet/',
990
                        {'tx': 'tx/', 'addr': 'address/'}),
991
    'smartbit.com.au': ('https://testnet.smartbit.com.au/',
992
                       {'tx': 'tx/', 'addr': 'address/'}),
993
    'system default': ('blockchain://000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943/',
994
                       {'tx': 'tx/', 'addr': 'address/'}),
995
}
996

997
testnet4_block_explorers = {
5✔
998
    'mempool.space': ('https://mempool.space/testnet4/',
999
                        {'tx': 'tx/', 'addr': 'address/'}),
1000
    'wakiyamap.dev': ('https://testnet4-explorer.wakiyamap.dev/',
1001
                       {'tx': 'tx/', 'addr': 'address/'}),
1002
}
1003

1004
signet_block_explorers = {
5✔
1005
    'bc-2.jp': ('https://explorer.bc-2.jp/',
1006
                        {'tx': 'tx/', 'addr': 'address/'}),
1007
    'mempool.space': ('https://mempool.space/signet/',
1008
                        {'tx': 'tx/', 'addr': 'address/'}),
1009
    'bitcoinexplorer.org': ('https://signet.bitcoinexplorer.org/',
1010
                       {'tx': 'tx/', 'addr': 'address/'}),
1011
    'wakiyamap.dev': ('https://signet-explorer.wakiyamap.dev/',
1012
                       {'tx': 'tx/', 'addr': 'address/'}),
1013
    'ex.signet.bublina.eu.org': ('https://ex.signet.bublina.eu.org/',
1014
                       {'tx': 'tx/', 'addr': 'address/'}),
1015
    'system default': ('blockchain:/',
1016
                       {'tx': 'tx/', 'addr': 'address/'}),
1017
}
1018

1019
_block_explorer_default_api_loc = {'tx': 'tx/', 'addr': 'address/'}
5✔
1020

1021

1022
def block_explorer_info():
5✔
1023
    from . import constants
×
1024
    if constants.net.NET_NAME == "testnet":
×
1025
        return testnet_block_explorers
×
1026
    elif constants.net.NET_NAME == "testnet4":
×
1027
        return testnet4_block_explorers
×
1028
    elif constants.net.NET_NAME == "signet":
×
1029
        return signet_block_explorers
×
1030
    return mainnet_block_explorers
×
1031

1032

1033
def block_explorer(config: 'SimpleConfig') -> Optional[str]:
5✔
1034
    """Returns name of selected block explorer,
1035
    or None if a custom one (not among hardcoded ones) is configured.
1036
    """
1037
    if config.BLOCK_EXPLORER_CUSTOM is not None:
×
1038
        return None
×
1039
    be_key = config.BLOCK_EXPLORER
×
1040
    be_tuple = block_explorer_info().get(be_key)
×
1041
    if be_tuple is None:
×
1042
        be_key = config.cv.BLOCK_EXPLORER.get_default_value()
×
1043
    assert isinstance(be_key, str), f"{be_key!r} should be str"
×
1044
    return be_key
×
1045

1046

1047
def block_explorer_tuple(config: 'SimpleConfig') -> Optional[Tuple[str, dict]]:
5✔
1048
    custom_be = config.BLOCK_EXPLORER_CUSTOM
×
1049
    if custom_be:
×
1050
        if isinstance(custom_be, str):
×
1051
            return custom_be, _block_explorer_default_api_loc
×
1052
        if isinstance(custom_be, (tuple, list)) and len(custom_be) == 2:
×
1053
            return tuple(custom_be)
×
1054
        _logger.warning(f"not using {config.cv.BLOCK_EXPLORER_CUSTOM.key()!r} from config. "
×
1055
                        f"expected a str or a pair but got {custom_be!r}")
1056
        return None
×
1057
    else:
1058
        # using one of the hardcoded block explorers
1059
        return block_explorer_info().get(block_explorer(config))
×
1060

1061

1062
def block_explorer_URL(config: 'SimpleConfig', kind: str, item: str) -> Optional[str]:
5✔
1063
    be_tuple = block_explorer_tuple(config)
×
1064
    if not be_tuple:
×
1065
        return
×
1066
    explorer_url, explorer_dict = be_tuple
×
1067
    kind_str = explorer_dict.get(kind)
×
1068
    if kind_str is None:
×
1069
        return
×
1070
    if explorer_url[-1] != "/":
×
1071
        explorer_url += "/"
×
1072
    url_parts = [explorer_url, kind_str, item]
×
1073
    return ''.join(url_parts)
×
1074

1075

1076

1077

1078

1079
# Python bug (http://bugs.python.org/issue1927) causes raw_input
1080
# to be redirected improperly between stdin/stderr on Unix systems
1081
#TODO: py3
1082
def raw_input(prompt=None):
5✔
1083
    if prompt:
×
1084
        sys.stdout.write(prompt)
×
1085
    return builtin_raw_input()
×
1086

1087
builtin_raw_input = builtins.input
5✔
1088
builtins.input = raw_input
5✔
1089

1090

1091
def parse_json(message):
5✔
1092
    # TODO: check \r\n pattern
1093
    n = message.find(b'\n')
×
1094
    if n==-1:
×
1095
        return None, message
×
1096
    try:
×
1097
        j = json.loads(message[0:n].decode('utf8'))
×
1098
    except Exception:
×
1099
        j = None
×
1100
    return j, message[n+1:]
×
1101

1102

1103
def setup_thread_excepthook():
5✔
1104
    """
1105
    Workaround for `sys.excepthook` thread bug from:
1106
    http://bugs.python.org/issue1230540
1107

1108
    Call once from the main thread before creating any threads.
1109
    """
1110

1111
    init_original = threading.Thread.__init__
×
1112

1113
    def init(self, *args, **kwargs):
×
1114

1115
        init_original(self, *args, **kwargs)
×
1116
        run_original = self.run
×
1117

1118
        def run_with_except_hook(*args2, **kwargs2):
×
1119
            try:
×
1120
                run_original(*args2, **kwargs2)
×
1121
            except Exception:
×
1122
                sys.excepthook(*sys.exc_info())
×
1123

1124
        self.run = run_with_except_hook
×
1125

1126
    threading.Thread.__init__ = init
×
1127

1128

1129
def send_exception_to_crash_reporter(e: BaseException):
5✔
1130
    from .base_crash_reporter import send_exception_to_crash_reporter
×
1131
    send_exception_to_crash_reporter(e)
×
1132

1133

1134
def versiontuple(v):
5✔
1135
    return tuple(map(int, (v.split("."))))
5✔
1136

1137

1138
def read_json_file(path):
5✔
1139
    try:
5✔
1140
        with open(path, 'r', encoding='utf-8') as f:
5✔
1141
            data = json.loads(f.read())
5✔
1142
    except json.JSONDecodeError:
×
1143
        _logger.exception('')
×
1144
        raise FileImportFailed(_("Invalid JSON code."))
×
1145
    except BaseException as e:
×
1146
        _logger.exception('')
×
1147
        raise FileImportFailed(e)
×
1148
    return data
5✔
1149

1150

1151
def write_json_file(path, data):
5✔
1152
    try:
×
1153
        with open(path, 'w+', encoding='utf-8') as f:
×
1154
            json.dump(data, f, indent=4, sort_keys=True, cls=MyEncoder)
×
1155
    except (IOError, os.error) as e:
×
1156
        _logger.exception('')
×
1157
        raise FileExportFailed(e)
×
1158

1159

1160
def os_chmod(path, mode):
5✔
1161
    """os.chmod aware of tmpfs"""
1162
    try:
5✔
1163
        os.chmod(path, mode)
5✔
1164
    except OSError as e:
×
1165
        xdg_runtime_dir = os.environ.get("XDG_RUNTIME_DIR", None)
×
1166
        if xdg_runtime_dir and is_subpath(path, xdg_runtime_dir):
×
1167
            _logger.info(f"Tried to chmod in tmpfs. Skipping... {e!r}")
×
1168
        else:
1169
            raise
×
1170

1171

1172
def make_dir(path, allow_symlink=True):
5✔
1173
    """Make directory if it does not yet exist."""
1174
    if not os.path.exists(path):
5✔
1175
        if not allow_symlink and os.path.islink(path):
5✔
1176
            raise Exception('Dangling link: ' + path)
×
1177
        os.mkdir(path)
5✔
1178
        os_chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
5✔
1179

1180

1181
def is_subpath(long_path: str, short_path: str) -> bool:
5✔
1182
    """Returns whether long_path is a sub-path of short_path."""
1183
    try:
5✔
1184
        common = os.path.commonpath([long_path, short_path])
5✔
1185
    except ValueError:
5✔
1186
        return False
5✔
1187
    short_path = standardize_path(short_path)
5✔
1188
    common     = standardize_path(common)
5✔
1189
    return short_path == common
5✔
1190

1191

1192
def log_exceptions(func):
5✔
1193
    """Decorator to log AND re-raise exceptions."""
1194
    assert asyncio.iscoroutinefunction(func), 'func needs to be a coroutine'
5✔
1195
    @functools.wraps(func)
5✔
1196
    async def wrapper(*args, **kwargs):
5✔
1197
        self = args[0] if len(args) > 0 else None
5✔
1198
        try:
5✔
1199
            return await func(*args, **kwargs)
5✔
1200
        except asyncio.CancelledError as e:
5✔
1201
            raise
5✔
1202
        except BaseException as e:
5✔
1203
            mylogger = self.logger if hasattr(self, 'logger') else _logger
5✔
1204
            try:
5✔
1205
                mylogger.exception(f"Exception in {func.__name__}: {repr(e)}")
5✔
1206
            except BaseException as e2:
×
1207
                print(f"logging exception raised: {repr(e2)}... orig exc: {repr(e)} in {func.__name__}")
×
1208
            raise
5✔
1209
    return wrapper
5✔
1210

1211

1212
def ignore_exceptions(func):
5✔
1213
    """Decorator to silently swallow all exceptions."""
1214
    assert asyncio.iscoroutinefunction(func), 'func needs to be a coroutine'
5✔
1215
    @functools.wraps(func)
5✔
1216
    async def wrapper(*args, **kwargs):
5✔
1217
        try:
×
1218
            return await func(*args, **kwargs)
×
1219
        except Exception as e:
×
1220
            pass
×
1221
    return wrapper
5✔
1222

1223

1224
def with_lock(func):
5✔
1225
    """Decorator to enforce a lock on a function call."""
1226
    def func_wrapper(self, *args, **kwargs):
5✔
1227
        with self.lock:
5✔
1228
            return func(self, *args, **kwargs)
5✔
1229
    return func_wrapper
5✔
1230

1231

1232
class TxMinedInfo(NamedTuple):
5✔
1233
    height: int                        # height of block that mined tx
5✔
1234
    conf: Optional[int] = None         # number of confirmations, SPV verified. >=0, or None (None means unknown)
5✔
1235
    timestamp: Optional[int] = None    # timestamp of block that mined tx
5✔
1236
    txpos: Optional[int] = None        # position of tx in serialized block
5✔
1237
    header_hash: Optional[str] = None  # hash of block that mined tx
5✔
1238
    wanted_height: Optional[int] = None  # in case of timelock, min abs block height
5✔
1239

1240
    def short_id(self) -> Optional[str]:
5✔
1241
        if self.txpos is not None and self.txpos >= 0:
×
1242
            assert self.height > 0
×
1243
            return f"{self.height}x{self.txpos}"
×
1244
        return None
×
1245

1246
    def is_local_like(self) -> bool:
5✔
1247
        """Returns whether the tx is local-like (LOCAL/FUTURE)."""
1248
        from .address_synchronizer import TX_HEIGHT_UNCONFIRMED, TX_HEIGHT_UNCONF_PARENT
×
1249
        if self.height > 0:
×
1250
            return False
×
1251
        if self.height in (TX_HEIGHT_UNCONFIRMED, TX_HEIGHT_UNCONF_PARENT):
×
1252
            return False
×
1253
        return True
×
1254

1255

1256
class ShortID(bytes):
5✔
1257

1258
    def __repr__(self):
5✔
1259
        return f"<ShortID: {format_short_id(self)}>"
5✔
1260

1261
    def __str__(self):
5✔
1262
        return format_short_id(self)
5✔
1263

1264
    @classmethod
5✔
1265
    def from_components(cls, block_height: int, tx_pos_in_block: int, output_index: int) -> 'ShortID':
5✔
1266
        bh = block_height.to_bytes(3, byteorder='big')
5✔
1267
        tpos = tx_pos_in_block.to_bytes(3, byteorder='big')
5✔
1268
        oi = output_index.to_bytes(2, byteorder='big')
5✔
1269
        return ShortID(bh + tpos + oi)
5✔
1270

1271
    @classmethod
5✔
1272
    def from_str(cls, scid: str) -> 'ShortID':
5✔
1273
        """Parses a formatted scid str, e.g. '643920x356x0'."""
1274
        components = scid.split("x")
5✔
1275
        if len(components) != 3:
5✔
1276
            raise ValueError(f"failed to parse ShortID: {scid!r}")
×
1277
        try:
5✔
1278
            components = [int(x) for x in components]
5✔
1279
        except ValueError:
×
1280
            raise ValueError(f"failed to parse ShortID: {scid!r}") from None
×
1281
        return ShortID.from_components(*components)
5✔
1282

1283
    @classmethod
5✔
1284
    def normalize(cls, data: Union[None, str, bytes, 'ShortID']) -> Optional['ShortID']:
5✔
1285
        if isinstance(data, ShortID) or data is None:
5✔
1286
            return data
5✔
1287
        if isinstance(data, str):
5✔
1288
            assert len(data) == 16
5✔
1289
            return ShortID.fromhex(data)
5✔
1290
        if isinstance(data, (bytes, bytearray)):
5✔
1291
            assert len(data) == 8
5✔
1292
            return ShortID(data)
5✔
1293

1294
    @property
5✔
1295
    def block_height(self) -> int:
5✔
1296
        return int.from_bytes(self[:3], byteorder='big')
5✔
1297

1298
    @property
5✔
1299
    def txpos(self) -> int:
5✔
1300
        return int.from_bytes(self[3:6], byteorder='big')
5✔
1301

1302
    @property
5✔
1303
    def output_index(self) -> int:
5✔
1304
        return int.from_bytes(self[6:8], byteorder='big')
5✔
1305

1306

1307
def format_short_id(short_channel_id: Optional[bytes]):
5✔
1308
    if not short_channel_id:
5✔
1309
        return _('Not yet available')
×
1310
    return str(int.from_bytes(short_channel_id[:3], 'big')) \
5✔
1311
        + 'x' + str(int.from_bytes(short_channel_id[3:6], 'big')) \
1312
        + 'x' + str(int.from_bytes(short_channel_id[6:], 'big'))
1313

1314

1315
def make_aiohttp_proxy_connector(proxy: 'ProxySettings', ssl_context: Optional[ssl.SSLContext] = None) -> ProxyConnector:
5✔
1316
    return ProxyConnector(
×
1317
        proxy_type=ProxyType.SOCKS5 if proxy.mode == 'socks5' else ProxyType.SOCKS4,
1318
        host=proxy.host,
1319
        port=int(proxy.port),
1320
        username=proxy.user,
1321
        password=proxy.password,
1322
        rdns=True,  # needed to prevent DNS leaks over proxy
1323
        ssl=ssl_context,
1324
    )
1325

1326

1327
def make_aiohttp_session(proxy: Optional['ProxySettings'], headers=None, timeout=None):
5✔
1328
    if headers is None:
×
1329
        headers = {'User-Agent': 'Electrum'}
×
1330
    if timeout is None:
×
1331
        # The default timeout is high intentionally.
1332
        # DNS on some systems can be really slow, see e.g. #5337
1333
        timeout = aiohttp.ClientTimeout(total=45)
×
1334
    elif isinstance(timeout, (int, float)):
×
1335
        timeout = aiohttp.ClientTimeout(total=timeout)
×
1336
    ssl_context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=ca_path)
×
1337

1338
    if proxy and proxy.enabled:
×
1339
        connector = make_aiohttp_proxy_connector(proxy, ssl_context)
×
1340
    else:
1341
        connector = aiohttp.TCPConnector(ssl=ssl_context)
×
1342

1343
    return aiohttp.ClientSession(headers=headers, timeout=timeout, connector=connector)
×
1344

1345

1346
class OldTaskGroup(aiorpcx.TaskGroup):
5✔
1347
    """Automatically raises exceptions on join; as in aiorpcx prior to version 0.20.
1348
    That is, when using TaskGroup as a context manager, if any task encounters an exception,
1349
    we would like that exception to be re-raised (propagated out). For the wait=all case,
1350
    the OldTaskGroup class is emulating the following code-snippet:
1351
    ```
1352
    async with TaskGroup() as group:
1353
        await group.spawn(task1())
1354
        await group.spawn(task2())
1355

1356
        async for task in group:
1357
            if not task.cancelled():
1358
                task.result()
1359
    ```
1360
    So instead of the above, one can just write:
1361
    ```
1362
    async with OldTaskGroup() as group:
1363
        await group.spawn(task1())
1364
        await group.spawn(task2())
1365
    ```
1366
    # TODO see if we can migrate to asyncio.timeout, introduced in python 3.11, and use stdlib instead of aiorpcx.curio...
1367
    """
1368
    async def join(self):
5✔
1369
        if self._wait is all:
5✔
1370
            exc = False
5✔
1371
            try:
5✔
1372
                async for task in self:
5✔
1373
                    if not task.cancelled():
5✔
1374
                        task.result()
5✔
1375
            except BaseException:  # including asyncio.CancelledError
5✔
1376
                exc = True
5✔
1377
                raise
5✔
1378
            finally:
1379
                if exc:
5✔
1380
                    await self.cancel_remaining()
5✔
1381
                await super().join()
5✔
1382
        else:
1383
            await super().join()
5✔
1384
            if self.completed:
5✔
1385
                self.completed.result()
5✔
1386

1387
# We monkey-patch aiorpcx TimeoutAfter (used by timeout_after and ignore_after API),
1388
# to fix a timing issue present in asyncio as a whole re timing out tasks.
1389
# To see the issue we are trying to fix, consider example:
1390
#     async def outer_task():
1391
#         async with timeout_after(0.1):
1392
#             await inner_task()
1393
# When the 0.1 sec timeout expires, inner_task will get cancelled by timeout_after (=internal cancellation).
1394
# If around the same time (in terms of event loop iterations) another coroutine
1395
# cancels outer_task (=external cancellation), there will be a race.
1396
# Both cancellations work by propagating a CancelledError out to timeout_after, which then
1397
# needs to decide (in TimeoutAfter.__aexit__) whether it's due to an internal or external cancellation.
1398
# AFAICT asyncio provides no reliable way of distinguishing between the two.
1399
# This patch tries to always give priority to external cancellations.
1400
# see https://github.com/kyuupichan/aiorpcX/issues/44
1401
# see https://github.com/aio-libs/async-timeout/issues/229
1402
# see https://bugs.python.org/issue42130 and https://bugs.python.org/issue45098
1403
# TODO see if we can migrate to asyncio.timeout, introduced in python 3.11, and use stdlib instead of aiorpcx.curio...
1404
def _aiorpcx_monkeypatched_set_new_deadline(task, deadline):
5✔
1405
    def timeout_task():
5✔
1406
        task._orig_cancel()
5✔
1407
        task._timed_out = None if getattr(task, "_externally_cancelled", False) else deadline
5✔
1408
    def mycancel(*args, **kwargs):
5✔
1409
        task._orig_cancel(*args, **kwargs)
5✔
1410
        task._externally_cancelled = True
5✔
1411
        task._timed_out = None
5✔
1412
    if not hasattr(task, "_orig_cancel"):
5✔
1413
        task._orig_cancel = task.cancel
5✔
1414
        task.cancel = mycancel
5✔
1415
    task._deadline_handle = task._loop.call_at(deadline, timeout_task)
5✔
1416

1417

1418
def _aiorpcx_monkeypatched_set_task_deadline(task, deadline):
5✔
1419
    ret = _aiorpcx_orig_set_task_deadline(task, deadline)
5✔
1420
    task._externally_cancelled = None
5✔
1421
    return ret
5✔
1422

1423

1424
def _aiorpcx_monkeypatched_unset_task_deadline(task):
5✔
1425
    if hasattr(task, "_orig_cancel"):
5✔
1426
        task.cancel = task._orig_cancel
5✔
1427
        del task._orig_cancel
5✔
1428
    return _aiorpcx_orig_unset_task_deadline(task)
5✔
1429

1430

1431
_aiorpcx_orig_set_task_deadline    = aiorpcx.curio._set_task_deadline
5✔
1432
_aiorpcx_orig_unset_task_deadline  = aiorpcx.curio._unset_task_deadline
5✔
1433

1434
aiorpcx.curio._set_new_deadline    = _aiorpcx_monkeypatched_set_new_deadline
5✔
1435
aiorpcx.curio._set_task_deadline   = _aiorpcx_monkeypatched_set_task_deadline
5✔
1436
aiorpcx.curio._unset_task_deadline = _aiorpcx_monkeypatched_unset_task_deadline
5✔
1437

1438

1439
async def wait_for2(fut: Awaitable, timeout: Union[int, float, None]):
5✔
1440
    """Replacement for asyncio.wait_for,
1441
     due to bugs: https://bugs.python.org/issue42130 and https://github.com/python/cpython/issues/86296 ,
1442
     which are only fixed in python 3.12+.
1443
     """
1444
    if sys.version_info[:3] >= (3, 12):
5✔
1445
        return await asyncio.wait_for(fut, timeout)
3✔
1446
    else:
UNCOV
1447
        async with async_timeout(timeout):
2✔
UNCOV
1448
            return await asyncio.ensure_future(fut, loop=get_running_loop())
2✔
1449

1450

1451
if hasattr(asyncio, 'timeout'):  # python 3.11+
5✔
1452
    async_timeout = asyncio.timeout
4✔
1453
else:
UNCOV
1454
    class TimeoutAfterAsynciolike(aiorpcx.curio.TimeoutAfter):
1✔
UNCOV
1455
        async def __aexit__(self, exc_type, exc_value, tb):
1✔
UNCOV
1456
            try:
1✔
UNCOV
1457
                await super().__aexit__(exc_type, exc_value, tb)
1✔
1458
            except (aiorpcx.TaskTimeout, aiorpcx.UncaughtTimeoutError):
×
1459
                raise asyncio.TimeoutError from None
×
1460
            except aiorpcx.TimeoutCancellationError:
×
1461
                raise asyncio.CancelledError from None
×
1462

UNCOV
1463
    def async_timeout(delay: Union[int, float, None]):
1✔
UNCOV
1464
        if delay is None:
1✔
1465
            return nullcontext()
×
UNCOV
1466
        return TimeoutAfterAsynciolike(delay)
1✔
1467

1468

1469
class NetworkJobOnDefaultServer(Logger, ABC):
5✔
1470
    """An abstract base class for a job that runs on the main network
1471
    interface. Every time the main interface changes, the job is
1472
    restarted, and some of its internals are reset.
1473
    """
1474
    def __init__(self, network: 'Network'):
5✔
1475
        Logger.__init__(self)
5✔
1476
        self.network = network
5✔
1477
        self.interface = None  # type: Interface
5✔
1478
        self._restart_lock = asyncio.Lock()
5✔
1479
        # Ensure fairness between NetworkJobs. e.g. if multiple wallets
1480
        # are open, a large wallet's Synchronizer should not starve the small wallets:
1481
        self._network_request_semaphore = asyncio.Semaphore(100)
5✔
1482

1483
        self._reset()
5✔
1484
        # every time the main interface changes, restart:
1485
        register_callback(self._restart, ['default_server_changed'])
5✔
1486
        # also schedule a one-off restart now, as there might already be a main interface:
1487
        asyncio.run_coroutine_threadsafe(self._restart(), network.asyncio_loop)
5✔
1488

1489
    def _reset(self):
5✔
1490
        """Initialise fields. Called every time the underlying
1491
        server connection changes.
1492
        """
1493
        self.taskgroup = OldTaskGroup()
5✔
1494
        self.reset_request_counters()
5✔
1495

1496
    async def _start(self, interface: 'Interface'):
5✔
1497
        self.logger.debug(f"starting. interface.server={repr(str(interface.server))}")
×
1498
        self.interface = interface
×
1499

1500
        taskgroup = self.taskgroup
×
1501
        async def run_tasks_wrapper():
×
1502
            self.logger.debug(f"starting taskgroup ({hex(id(taskgroup))}).")
×
1503
            try:
×
1504
                await self._run_tasks(taskgroup=taskgroup)
×
1505
            except Exception as e:
×
1506
                self.logger.error(f"taskgroup died ({hex(id(taskgroup))}). exc={e!r}")
×
1507
                raise
×
1508
            finally:
1509
                self.logger.debug(f"taskgroup stopped ({hex(id(taskgroup))}).")
×
1510
        await interface.taskgroup.spawn(run_tasks_wrapper)
×
1511

1512
    @abstractmethod
5✔
1513
    async def _run_tasks(self, *, taskgroup: OldTaskGroup) -> None:
5✔
1514
        """Start tasks in taskgroup. Called every time the underlying
1515
        server connection changes.
1516
        """
1517
        # If self.taskgroup changed, don't start tasks. This can happen if we have
1518
        # been restarted *just now*, i.e. after the _run_tasks coroutine object was created.
1519
        if taskgroup != self.taskgroup:
×
1520
            raise asyncio.CancelledError()
×
1521

1522
    async def stop(self, *, full_shutdown: bool = True):
5✔
1523
        self.logger.debug(f"stopping. {full_shutdown=}")
×
1524
        if full_shutdown:
×
1525
            unregister_callback(self._restart)
×
1526
        await self.taskgroup.cancel_remaining()
×
1527

1528
    @log_exceptions
5✔
1529
    async def _restart(self, *args):
5✔
1530
        interface = self.network.interface
5✔
1531
        if interface is None:
5✔
1532
            return  # we should get called again soon
5✔
1533

1534
        async with self._restart_lock:
×
1535
            await self.stop(full_shutdown=False)
×
1536
            self._reset()
×
1537
            await self._start(interface)
×
1538

1539
    def reset_request_counters(self):
5✔
1540
        self._requests_sent = 0
5✔
1541
        self._requests_answered = 0
5✔
1542

1543
    def num_requests_sent_and_answered(self) -> Tuple[int, int]:
5✔
1544
        return self._requests_sent, self._requests_answered
×
1545

1546
    @property
5✔
1547
    def session(self):
5✔
1548
        s = self.interface.session
×
1549
        assert s is not None
×
1550
        return s
×
1551

1552

1553
async def detect_tor_socks_proxy() -> Optional[Tuple[str, int]]:
5✔
1554
    # Probable ports for Tor to listen at
1555
    candidates = [
×
1556
        ("127.0.0.1", 9050),
1557
        ("127.0.0.1", 9051),
1558
        ("127.0.0.1", 9150),
1559
    ]
1560

1561
    proxy_addr = None
×
1562
    async def test_net_addr(net_addr):
×
1563
        is_tor = await is_tor_socks_port(*net_addr)
×
1564
        # set result, and cancel remaining probes
1565
        if is_tor:
×
1566
            nonlocal proxy_addr
1567
            proxy_addr = net_addr
×
1568
            await group.cancel_remaining()
×
1569

1570
    async with OldTaskGroup() as group:
×
1571
        for net_addr in candidates:
×
1572
            await group.spawn(test_net_addr(net_addr))
×
1573
    return proxy_addr
×
1574

1575

1576
@log_exceptions
5✔
1577
async def is_tor_socks_port(host: str, port: int) -> bool:
5✔
1578
    # mimic "tor-resolve 0.0.0.0".
1579
    # see https://github.com/spesmilo/electrum/issues/7317#issuecomment-1369281075
1580
    # > this is a socks5 handshake, followed by a socks RESOLVE request as defined in
1581
    # > [tor's socks extension spec](https://github.com/torproject/torspec/blob/7116c9cdaba248aae07a3f1d0e15d9dd102f62c5/socks-extensions.txt#L63),
1582
    # > resolving 0.0.0.0, which being an IP, tor resolves itself without needing to ask a relay.
1583
    writer = None
×
1584
    try:
×
1585
        async with async_timeout(10):
×
1586
            reader, writer = await asyncio.open_connection(host, port)
×
1587
            writer.write(b'\x05\x01\x00\x05\xf0\x00\x03\x070.0.0.0\x00\x00')
×
1588
            await writer.drain()
×
1589
            data = await reader.read(1024)
×
1590
            if data == b'\x05\x00\x05\x00\x00\x01\x00\x00\x00\x00\x00\x00':
×
1591
                return True
×
1592
            return False
×
1593
    except (OSError, asyncio.TimeoutError):
×
1594
        return False
×
1595
    finally:
1596
        if writer:
×
1597
            writer.close()
×
1598

1599

1600
AS_LIB_USER_I_WANT_TO_MANAGE_MY_OWN_ASYNCIO_LOOP = False  # used by unit tests
5✔
1601

1602
_asyncio_event_loop = None  # type: Optional[asyncio.AbstractEventLoop]
5✔
1603
def get_asyncio_loop() -> asyncio.AbstractEventLoop:
5✔
1604
    """Returns the global asyncio event loop we use."""
1605
    if loop := _asyncio_event_loop:
5✔
1606
        return loop
5✔
1607
    if AS_LIB_USER_I_WANT_TO_MANAGE_MY_OWN_ASYNCIO_LOOP:
5✔
1608
        if loop := get_running_loop():
5✔
1609
            return loop
5✔
1610
    raise Exception("event loop not created yet")
×
1611

1612

1613
def create_and_start_event_loop() -> Tuple[asyncio.AbstractEventLoop,
5✔
1614
                                           asyncio.Future,
1615
                                           threading.Thread]:
1616
    global _asyncio_event_loop
1617
    if _asyncio_event_loop is not None:
×
1618
        raise Exception("there is already a running event loop")
×
1619

1620
    # asyncio.get_event_loop() became deprecated in python3.10. (see https://github.com/python/cpython/issues/83710)
1621
    # We set a custom event loop policy purely to be compatible with code that
1622
    # relies on asyncio.get_event_loop().
1623
    # - in python 3.8-3.9, asyncio.Event.__init__, asyncio.Lock.__init__,
1624
    #   and similar, calls get_event_loop. see https://github.com/python/cpython/pull/23420
1625
    class MyEventLoopPolicy(asyncio.DefaultEventLoopPolicy):
×
1626
        def get_event_loop(self):
×
1627
            # In case electrum is being used as a library, there might be other
1628
            # event loops in use besides ours. To minimise interfering with those,
1629
            # if there is a loop running in the current thread, return that:
1630
            running_loop = get_running_loop()
×
1631
            if running_loop is not None:
×
1632
                return running_loop
×
1633
            # Otherwise, return our global loop:
1634
            return get_asyncio_loop()
×
1635
    asyncio.set_event_loop_policy(MyEventLoopPolicy())
×
1636

1637
    loop = asyncio.new_event_loop()
×
1638
    _asyncio_event_loop = loop
×
1639

1640
    def on_exception(loop, context):
×
1641
        """Suppress spurious messages it appears we cannot control."""
1642
        SUPPRESS_MESSAGE_REGEX = re.compile('SSL handshake|Fatal read error on|'
×
1643
                                            'SSL error in data received')
1644
        message = context.get('message')
×
1645
        if message and SUPPRESS_MESSAGE_REGEX.match(message):
×
1646
            return
×
1647
        loop.default_exception_handler(context)
×
1648

1649
    def run_event_loop():
×
1650
        try:
×
1651
            loop.run_until_complete(stopping_fut)
×
1652
        finally:
1653
            # clean-up
1654
            global _asyncio_event_loop
1655
            _asyncio_event_loop = None
×
1656

1657
    loop.set_exception_handler(on_exception)
×
1658
    _set_custom_task_factory(loop)
×
1659
    # loop.set_debug(True)
1660
    stopping_fut = loop.create_future()
×
1661
    loop_thread = threading.Thread(
×
1662
        target=run_event_loop,
1663
        name='EventLoop',
1664
    )
1665
    loop_thread.start()
×
1666
    # Wait until the loop actually starts.
1667
    # On a slow PC, or with a debugger attached, this can take a few dozens of ms,
1668
    # and if we returned without a running loop, weird things can happen...
1669
    t0 = time.monotonic()
×
1670
    while not loop.is_running():
×
1671
        time.sleep(0.01)
×
1672
        if time.monotonic() - t0 > 5:
×
1673
            raise Exception("been waiting for 5 seconds but asyncio loop would not start!")
×
1674
    return loop, stopping_fut, loop_thread
×
1675

1676

1677
_running_asyncio_tasks = set()  # type: Set[asyncio.Future]
5✔
1678
def _set_custom_task_factory(loop: asyncio.AbstractEventLoop):
5✔
1679
    """Wrap task creation to track pending and running tasks.
1680
    When tasks are created, asyncio only maintains a weak reference to them.
1681
    Hence, the garbage collector might destroy the task mid-execution.
1682
    To avoid this, we store a strong reference for the task until it completes.
1683

1684
    Without this, a lot of APIs are basically Heisenbug-generators... e.g.:
1685
    - "asyncio.create_task"
1686
    - "loop.create_task"
1687
    - "asyncio.ensure_future"
1688
    - "asyncio.run_coroutine_threadsafe"
1689

1690
    related:
1691
        - https://bugs.python.org/issue44665
1692
        - https://github.com/python/cpython/issues/88831
1693
        - https://github.com/python/cpython/issues/91887
1694
        - https://textual.textualize.io/blog/2023/02/11/the-heisenbug-lurking-in-your-async-code/
1695
        - https://github.com/python/cpython/issues/91887#issuecomment-1434816045
1696
        - "Task was destroyed but it is pending!"
1697
    """
1698

1699
    platform_task_factory = loop.get_task_factory()
5✔
1700

1701
    def factory(loop_, coro, **kwargs):
5✔
1702
        if platform_task_factory is not None:
5✔
1703
            task = platform_task_factory(loop_, coro, **kwargs)
×
1704
        else:
1705
            task = asyncio.Task(coro, loop=loop_, **kwargs)
5✔
1706
        _running_asyncio_tasks.add(task)
5✔
1707
        task.add_done_callback(_running_asyncio_tasks.discard)
5✔
1708
        return task
5✔
1709

1710
    loop.set_task_factory(factory)
5✔
1711

1712

1713
def run_sync_function_on_asyncio_thread(func: Callable, *, block: bool) -> None:
5✔
1714
    """Run a non-async fn on the asyncio thread. Can be called from any thread.
1715

1716
    If the current thread is already the asyncio thread, func is guaranteed
1717
    to have been completed when this method returns.
1718

1719
    For any other thread, we only wait for completion if `block` is True.
1720
    """
1721
    assert not asyncio.iscoroutinefunction(func), "func must be a non-async function"
5✔
1722
    asyncio_loop = get_asyncio_loop()
5✔
1723
    if get_running_loop() == asyncio_loop:  # we are running on the asyncio thread
5✔
1724
        func()
5✔
1725
    else:  # non-asyncio thread
1726
        async def wrapper():
×
1727
            return func()
×
1728
        fut = asyncio.run_coroutine_threadsafe(wrapper(), loop=asyncio_loop)
×
1729
        if block:
×
1730
            fut.result()
×
1731
        else:
1732
            # add explicit logging of exceptions, otherwise they might get lost
1733
            tb1 = traceback.format_stack()[:-1]
×
1734
            tb1_str = "".join(tb1)
×
1735
            def on_done(fut_: concurrent.futures.Future):
×
1736
                assert fut_.done()
×
1737
                if fut_.cancelled():
×
1738
                    _logger.debug(f"func cancelled. {func=}.")
×
1739
                elif exc := fut_.exception():
×
1740
                    # note: We explicitly log the first part of the traceback, tb1_str.
1741
                    #       The second part gets logged by setting "exc_info".
1742
                    _logger.error(
×
1743
                        f"func errored. {func=}. {exc=}"
1744
                        f"\n{tb1_str}", exc_info=exc)
1745
            fut.add_done_callback(on_done)
×
1746

1747

1748
class OrderedDictWithIndex(OrderedDict):
5✔
1749
    """An OrderedDict that keeps track of the positions of keys.
1750

1751
    Note: very inefficient to modify contents, except to add new items.
1752
    """
1753

1754
    def __init__(self):
5✔
1755
        super().__init__()
×
1756
        self._key_to_pos = {}
×
1757
        self._pos_to_key = {}
×
1758

1759
    def _recalc_index(self):
5✔
1760
        self._key_to_pos = {key: pos for (pos, key) in enumerate(self.keys())}
×
1761
        self._pos_to_key = {pos: key for (pos, key) in enumerate(self.keys())}
×
1762

1763
    def pos_from_key(self, key):
5✔
1764
        return self._key_to_pos[key]
×
1765

1766
    def value_from_pos(self, pos):
5✔
1767
        key = self._pos_to_key[pos]
×
1768
        return self[key]
×
1769

1770
    def popitem(self, *args, **kwargs):
5✔
1771
        ret = super().popitem(*args, **kwargs)
×
1772
        self._recalc_index()
×
1773
        return ret
×
1774

1775
    def move_to_end(self, *args, **kwargs):
5✔
1776
        ret = super().move_to_end(*args, **kwargs)
×
1777
        self._recalc_index()
×
1778
        return ret
×
1779

1780
    def clear(self):
5✔
1781
        ret = super().clear()
×
1782
        self._recalc_index()
×
1783
        return ret
×
1784

1785
    def pop(self, *args, **kwargs):
5✔
1786
        ret = super().pop(*args, **kwargs)
×
1787
        self._recalc_index()
×
1788
        return ret
×
1789

1790
    def update(self, *args, **kwargs):
5✔
1791
        ret = super().update(*args, **kwargs)
×
1792
        self._recalc_index()
×
1793
        return ret
×
1794

1795
    def __delitem__(self, *args, **kwargs):
5✔
1796
        ret = super().__delitem__(*args, **kwargs)
×
1797
        self._recalc_index()
×
1798
        return ret
×
1799

1800
    def __setitem__(self, key, *args, **kwargs):
5✔
1801
        is_new_key = key not in self
×
1802
        ret = super().__setitem__(key, *args, **kwargs)
×
1803
        if is_new_key:
×
1804
            pos = len(self) - 1
×
1805
            self._key_to_pos[key] = pos
×
1806
            self._pos_to_key[pos] = key
×
1807
        return ret
×
1808

1809

1810
def multisig_type(wallet_type):
5✔
1811
    '''If wallet_type is mofn multi-sig, return [m, n],
1812
    otherwise return None.'''
1813
    if not wallet_type:
5✔
1814
        return None
×
1815
    match = re.match(r'(\d+)of(\d+)', wallet_type)
5✔
1816
    if match:
5✔
1817
        match = [int(x) for x in match.group(1, 2)]
5✔
1818
    return match
5✔
1819

1820

1821
def is_ip_address(x: Union[str, bytes]) -> bool:
5✔
1822
    if isinstance(x, bytes):
5✔
1823
        x = x.decode("utf-8")
×
1824
    try:
5✔
1825
        ipaddress.ip_address(x)
5✔
1826
        return True
5✔
1827
    except ValueError:
5✔
1828
        return False
5✔
1829

1830

1831
def is_localhost(host: str) -> bool:
5✔
1832
    if str(host) in ('localhost', 'localhost.',):
5✔
1833
        return True
5✔
1834
    if host[0] == '[' and host[-1] == ']':  # IPv6
5✔
1835
        host = host[1:-1]
5✔
1836
    try:
5✔
1837
        ip_addr = ipaddress.ip_address(host)  # type: Union[IPv4Address, IPv6Address]
5✔
1838
        return ip_addr.is_loopback
5✔
1839
    except ValueError:
5✔
1840
        pass  # not an IP
5✔
1841
    return False
5✔
1842

1843

1844
def is_private_netaddress(host: str) -> bool:
5✔
1845
    if is_localhost(host):
5✔
1846
        return True
5✔
1847
    if host[0] == '[' and host[-1] == ']':  # IPv6
5✔
1848
        host = host[1:-1]
5✔
1849
    try:
5✔
1850
        ip_addr = ipaddress.ip_address(host)  # type: Union[IPv4Address, IPv6Address]
5✔
1851
        return ip_addr.is_private
5✔
1852
    except ValueError:
5✔
1853
        pass  # not an IP
5✔
1854
    return False
5✔
1855

1856

1857
def list_enabled_bits(x: int) -> Sequence[int]:
5✔
1858
    """e.g. 77 (0b1001101) --> (0, 2, 3, 6)"""
1859
    binary = bin(x)[2:]
5✔
1860
    rev_bin = reversed(binary)
5✔
1861
    return tuple(i for i, b in enumerate(rev_bin) if b == '1')
5✔
1862

1863

1864
def resolve_dns_srv(host: str):
5✔
1865
    # FIXME this method is not using the network proxy. (although the proxy might not support UDP?)
1866
    srv_records = dns.resolver.resolve(host, 'SRV')
×
1867
    # priority: prefer lower
1868
    # weight: tie breaker; prefer higher
1869
    srv_records = sorted(srv_records, key=lambda x: (x.priority, -x.weight))
×
1870

1871
    def dict_from_srv_record(srv):
×
1872
        return {
×
1873
            'host': str(srv.target),
1874
            'port': srv.port,
1875
        }
1876
    return [dict_from_srv_record(srv) for srv in srv_records]
×
1877

1878

1879
def randrange(bound: int) -> int:
5✔
1880
    """Return a random integer k such that 1 <= k < bound, uniformly
1881
    distributed across that range.
1882
    This is guaranteed to be cryptographically strong.
1883
    """
1884
    # secrets.randbelow(bound) returns a random int: 0 <= r < bound,
1885
    # hence transformations:
1886
    return secrets.randbelow(bound - 1) + 1
5✔
1887

1888

1889
class CallbackManager(Logger):
5✔
1890
    # callbacks set by the GUI or any thread
1891
    # guarantee: the callbacks will always get triggered from the asyncio thread.
1892

1893
    def __init__(self):
5✔
1894
        Logger.__init__(self)
5✔
1895
        self.callback_lock = threading.Lock()
5✔
1896
        self.callbacks = defaultdict(list)      # note: needs self.callback_lock
5✔
1897

1898
    def register_callback(self, func, events):
5✔
1899
        with self.callback_lock:
5✔
1900
            for event in events:
5✔
1901
                self.callbacks[event].append(func)
5✔
1902

1903
    def unregister_callback(self, callback):
5✔
1904
        with self.callback_lock:
5✔
1905
            for callbacks in self.callbacks.values():
5✔
1906
                if callback in callbacks:
5✔
1907
                    callbacks.remove(callback)
5✔
1908

1909
    def trigger_callback(self, event, *args):
5✔
1910
        """Trigger a callback with given arguments.
1911
        Can be called from any thread. The callback itself will get scheduled
1912
        on the event loop.
1913
        """
1914
        loop = get_asyncio_loop()
5✔
1915
        assert loop.is_running(), "event loop not running"
5✔
1916
        with self.callback_lock:
5✔
1917
            callbacks = self.callbacks[event][:]
5✔
1918
        for callback in callbacks:
5✔
1919
            if asyncio.iscoroutinefunction(callback):  # async cb
5✔
1920
                fut = asyncio.run_coroutine_threadsafe(callback(*args), loop)
5✔
1921
                def on_done(fut_: concurrent.futures.Future):
5✔
1922
                    assert fut_.done()
5✔
1923
                    if fut_.cancelled():
5✔
1924
                        self.logger.debug(f"cb cancelled. {event=}.")
4✔
1925
                    elif exc := fut_.exception():
5✔
1926
                        self.logger.error(f"cb errored. {event=}. {exc=}", exc_info=exc)
×
1927
                fut.add_done_callback(on_done)
5✔
1928
            else:  # non-async cb
1929
                run_sync_function_on_asyncio_thread(partial(callback, *args), block=False)
5✔
1930

1931

1932
callback_mgr = CallbackManager()
5✔
1933
trigger_callback = callback_mgr.trigger_callback
5✔
1934
register_callback = callback_mgr.register_callback
5✔
1935
unregister_callback = callback_mgr.unregister_callback
5✔
1936
_event_listeners = defaultdict(set)  # type: Dict[str, Set[str]]
5✔
1937

1938

1939
class EventListener:
5✔
1940
    """Use as a mixin for a class that has methods to be triggered on events.
1941
    - Methods that receive the callbacks should be named "on_event_*" and decorated with @event_listener.
1942
    - register_callbacks() should be called exactly once per instance of EventListener, e.g. in __init__
1943
    - unregister_callbacks() should be called at least once, e.g. when the instance is destroyed
1944
    """
1945

1946
    def _list_callbacks(self):
5✔
1947
        for c in self.__class__.__mro__:
5✔
1948
            classpath = f"{c.__module__}.{c.__name__}"
5✔
1949
            for method_name in _event_listeners[classpath]:
5✔
1950
                method = getattr(self, method_name)
5✔
1951
                assert callable(method)
5✔
1952
                assert method_name.startswith('on_event_')
5✔
1953
                yield method_name[len('on_event_'):], method
5✔
1954

1955
    def register_callbacks(self):
5✔
1956
        for name, method in self._list_callbacks():
5✔
1957
            #_logger.debug(f'registering callback {method}')
1958
            register_callback(method, [name])
5✔
1959

1960
    def unregister_callbacks(self):
5✔
1961
        for name, method in self._list_callbacks():
5✔
1962
            #_logger.debug(f'unregistering callback {method}')
1963
            unregister_callback(method)
5✔
1964

1965

1966
def event_listener(func):
5✔
1967
    """To be used in subclasses of EventListener only. (how to enforce this programmatically?)"""
1968
    classname, method_name = func.__qualname__.split('.')
5✔
1969
    assert method_name.startswith('on_event_')
5✔
1970
    classpath = f"{func.__module__}.{classname}"
5✔
1971
    _event_listeners[classpath].add(method_name)
5✔
1972
    return func
5✔
1973

1974

1975
_NetAddrType = TypeVar("_NetAddrType")
5✔
1976
# requirements for _NetAddrType:
1977
# - reasonable __hash__() implementation (e.g. based on host/port of remote endpoint)
1978

1979
class NetworkRetryManager(Generic[_NetAddrType]):
5✔
1980
    """Truncated Exponential Backoff for network connections."""
1981

1982
    def __init__(
5✔
1983
            self, *,
1984
            max_retry_delay_normal: float,
1985
            init_retry_delay_normal: float,
1986
            max_retry_delay_urgent: float = None,
1987
            init_retry_delay_urgent: float = None,
1988
    ):
1989
        self._last_tried_addr = {}  # type: Dict[_NetAddrType, Tuple[float, int]]  # (unix ts, num_attempts)
5✔
1990

1991
        # note: these all use "seconds" as unit
1992
        if max_retry_delay_urgent is None:
5✔
1993
            max_retry_delay_urgent = max_retry_delay_normal
5✔
1994
        if init_retry_delay_urgent is None:
5✔
1995
            init_retry_delay_urgent = init_retry_delay_normal
5✔
1996
        self._max_retry_delay_normal = max_retry_delay_normal
5✔
1997
        self._init_retry_delay_normal = init_retry_delay_normal
5✔
1998
        self._max_retry_delay_urgent = max_retry_delay_urgent
5✔
1999
        self._init_retry_delay_urgent = init_retry_delay_urgent
5✔
2000

2001
    def _trying_addr_now(self, addr: _NetAddrType) -> None:
5✔
2002
        last_time, num_attempts = self._last_tried_addr.get(addr, (0, 0))
×
2003
        # we add up to 1 second of noise to the time, so that clients are less likely
2004
        # to get synchronised and bombard the remote in connection waves:
2005
        cur_time = time.time() + random.random()
×
2006
        self._last_tried_addr[addr] = cur_time, num_attempts + 1
×
2007

2008
    def _on_connection_successfully_established(self, addr: _NetAddrType) -> None:
5✔
2009
        self._last_tried_addr[addr] = time.time(), 0
×
2010

2011
    def _can_retry_addr(self, addr: _NetAddrType, *,
5✔
2012
                        now: float = None, urgent: bool = False) -> bool:
2013
        if now is None:
×
2014
            now = time.time()
×
2015
        last_time, num_attempts = self._last_tried_addr.get(addr, (0, 0))
×
2016
        if urgent:
×
2017
            max_delay = self._max_retry_delay_urgent
×
2018
            init_delay = self._init_retry_delay_urgent
×
2019
        else:
2020
            max_delay = self._max_retry_delay_normal
×
2021
            init_delay = self._init_retry_delay_normal
×
2022
        delay = self.__calc_delay(multiplier=init_delay, max_delay=max_delay, num_attempts=num_attempts)
×
2023
        next_time = last_time + delay
×
2024
        return next_time < now
×
2025

2026
    @classmethod
5✔
2027
    def __calc_delay(cls, *, multiplier: float, max_delay: float,
5✔
2028
                     num_attempts: int) -> float:
2029
        num_attempts = min(num_attempts, 100_000)
×
2030
        try:
×
2031
            res = multiplier * 2 ** num_attempts
×
2032
        except OverflowError:
×
2033
            return max_delay
×
2034
        return max(0, min(max_delay, res))
×
2035

2036
    def _clear_addr_retry_times(self) -> None:
5✔
2037
        self._last_tried_addr.clear()
5✔
2038

2039

2040
class ESocksProxy(aiorpcx.SOCKSProxy):
5✔
2041
    # note: proxy will not leak DNS as create_connection()
2042
    # sets (local DNS) resolve=False by default
2043

2044
    async def open_connection(self, host=None, port=None, **kwargs):
5✔
2045
        loop = asyncio.get_running_loop()
×
2046
        reader = asyncio.StreamReader(loop=loop)
×
2047
        protocol = asyncio.StreamReaderProtocol(reader, loop=loop)
×
2048
        transport, _ = await self.create_connection(
×
2049
            lambda: protocol, host, port, **kwargs)
2050
        writer = asyncio.StreamWriter(transport, protocol, reader, loop)
×
2051
        return reader, writer
×
2052

2053
    @classmethod
5✔
2054
    def from_network_settings(cls, network: Optional['Network']) -> Optional['ESocksProxy']:
5✔
2055
        if not network or not network.proxy or not network.proxy.enabled:
5✔
2056
            return None
5✔
2057
        proxy = network.proxy
×
2058
        username, pw = proxy.user, proxy.password
×
2059
        if not username or not pw:
×
2060
            # is_proxy_tor is tri-state; None indicates it is still probing the proxy to test for TOR
2061
            if network.is_proxy_tor:
×
2062
                auth = aiorpcx.socks.SOCKSRandomAuth()
×
2063
            else:
2064
                auth = None
×
2065
        else:
2066
            auth = aiorpcx.socks.SOCKSUserAuth(username, pw)
×
2067
        addr = aiorpcx.NetAddress(proxy.host, proxy.port)
×
2068
        if proxy.mode == "socks4":
×
2069
            ret = cls(addr, aiorpcx.socks.SOCKS4a, auth)
×
2070
        elif proxy.mode == "socks5":
×
2071
            ret = cls(addr, aiorpcx.socks.SOCKS5, auth)
×
2072
        else:
2073
            raise NotImplementedError  # http proxy not available with aiorpcx
×
2074
        return ret
×
2075

2076

2077
class JsonRPCError(Exception):
5✔
2078

2079
    class Codes(enum.IntEnum):
5✔
2080
        # application-specific error codes
2081
        USERFACING = 1
5✔
2082
        INTERNAL = 2
5✔
2083

2084
    def __init__(self, *, code: int, message: str, data: Optional[dict] = None):
5✔
2085
        Exception.__init__(self)
×
2086
        self.code = code
×
2087
        self.message = message
×
2088
        self.data = data
×
2089

2090

2091
class JsonRPCClient:
5✔
2092

2093
    def __init__(self, session: aiohttp.ClientSession, url: str):
5✔
2094
        self.session = session
×
2095
        self.url = url
×
2096
        self._id = 0
×
2097

2098
    async def request(self, endpoint, *args):
5✔
2099
        """Send request to server, parse and return result.
2100
        note: parsing code is naive, the server is assumed to be well-behaved.
2101
              Up to the caller to handle exceptions, including those arising from parsing errors.
2102
        """
2103
        self._id += 1
×
2104
        data = ('{"jsonrpc": "2.0", "id":"%d", "method": "%s", "params": %s }'
×
2105
                % (self._id, endpoint, json.dumps(args)))
2106
        async with self.session.post(self.url, data=data) as resp:
×
2107
            if resp.status == 200:
×
2108
                r = await resp.json()
×
2109
                result = r.get('result')
×
2110
                error = r.get('error')
×
2111
                if error:
×
2112
                    raise JsonRPCError(code=error["code"], message=error["message"], data=error.get("data"))
×
2113
                else:
2114
                    return result
×
2115
            else:
2116
                text = await resp.text()
×
2117
                return 'Error: ' + str(text)
×
2118

2119
    def add_method(self, endpoint):
5✔
2120
        async def coro(*args):
×
2121
            return await self.request(endpoint, *args)
×
2122
        setattr(self, endpoint, coro)
×
2123

2124

2125
T = TypeVar('T')
5✔
2126

2127
def random_shuffled_copy(x: Iterable[T]) -> List[T]:
5✔
2128
    """Returns a shuffled copy of the input."""
2129
    x_copy = list(x)  # copy
5✔
2130
    random.shuffle(x_copy)  # shuffle in-place
5✔
2131
    return x_copy
5✔
2132

2133

2134
def test_read_write_permissions(path) -> None:
5✔
2135
    # note: There might already be a file at 'path'.
2136
    #       Make sure we do NOT overwrite/corrupt that!
2137
    temp_path = "%s.tmptest.%s" % (path, os.getpid())
5✔
2138
    echo = "fs r/w test"
5✔
2139
    try:
5✔
2140
        # test READ permissions for actual path
2141
        if os.path.exists(path):
5✔
2142
            with open(path, "rb") as f:
5✔
2143
                f.read(1)  # read 1 byte
5✔
2144
        # test R/W sanity for "similar" path
2145
        with open(temp_path, "w", encoding='utf-8') as f:
5✔
2146
            f.write(echo)
5✔
2147
        with open(temp_path, "r", encoding='utf-8') as f:
5✔
2148
            echo2 = f.read()
5✔
2149
        os.remove(temp_path)
5✔
2150
    except Exception as e:
×
2151
        raise IOError(e) from e
×
2152
    if echo != echo2:
5✔
2153
        raise IOError('echo sanity-check failed')
×
2154

2155

2156
class classproperty(property):
5✔
2157
    """~read-only class-level @property
2158
    from https://stackoverflow.com/a/13624858 by denis-ryzhkov
2159
    """
2160
    def __get__(self, owner_self, owner_cls):
5✔
2161
        return self.fget(owner_cls)
5✔
2162

2163

2164
def get_running_loop() -> Optional[asyncio.AbstractEventLoop]:
5✔
2165
    """Returns the asyncio event loop that is *running in this thread*, if any."""
2166
    try:
5✔
2167
        return asyncio.get_running_loop()
5✔
2168
    except RuntimeError:
×
2169
        return None
×
2170

2171

2172
def error_text_str_to_safe_str(err: str, *, max_len: Optional[int] = 500) -> str:
5✔
2173
    """Converts an untrusted error string to a sane printable ascii str.
2174
    Never raises.
2175
    """
2176
    text = error_text_bytes_to_safe_str(
5✔
2177
        err.encode("ascii", errors='backslashreplace'),
2178
        max_len=None)
2179
    return truncate_text(text, max_len=max_len)
5✔
2180

2181

2182
def error_text_bytes_to_safe_str(err: bytes, *, max_len: Optional[int] = 500) -> str:
5✔
2183
    """Converts an untrusted error bytes text to a sane printable ascii str.
2184
    Never raises.
2185

2186
    Note that naive ascii conversion would be insufficient. Fun stuff:
2187
    >>> b = b"my_long_prefix_blabla" + 21 * b"\x08" + b"malicious_stuff"
2188
    >>> s = b.decode("ascii")
2189
    >>> print(s)
2190
    malicious_stuffblabla
2191
    """
2192
    # convert to ascii, to get rid of unicode stuff
2193
    ascii_text = err.decode("ascii", errors='backslashreplace')
5✔
2194
    # do repr to handle ascii special chars (especially when printing/logging the str)
2195
    text = repr(ascii_text)
5✔
2196
    return truncate_text(text, max_len=max_len)
5✔
2197

2198

2199
def truncate_text(text: str, *, max_len: Optional[int]) -> str:
5✔
2200
    if max_len is None or len(text) <= max_len:
5✔
2201
        return text
5✔
2202
    else:
2203
        return text[:max_len] + f"... (truncated. orig_len={len(text)})"
5✔
2204

2205

2206
def nostr_pow_worker(nonce, nostr_pubk, target_bits, hash_function, hash_len_bits, shutdown):
5✔
2207
    """Function to generate PoW for Nostr, to be spawned in a ProcessPoolExecutor."""
2208
    hash_preimage = b'electrum-' + nostr_pubk
×
2209
    while True:
×
2210
        # we cannot check is_set on each iteration as it has a lot of overhead, this way we can check
2211
        # it with low overhead (just the additional range counter)
2212
        for i in range(1000000):
×
2213
            digest = hash_function(hash_preimage + nonce.to_bytes(32, 'big')).digest()
×
2214
            if int.from_bytes(digest, 'big') < (1 << (hash_len_bits - target_bits)):
×
2215
                shutdown.set()
×
2216
                return hash, nonce
×
2217
            nonce += 1
×
2218
        if shutdown.is_set():
×
2219
            return None, None
×
2220

2221

2222
async def gen_nostr_ann_pow(nostr_pubk: bytes, target_bits: int) -> Tuple[int, int]:
5✔
2223
    """Generate a PoW for a Nostr announcement. The PoW is hash[b'electrum-'+pubk+nonce]"""
2224
    import multiprocessing  # not available on Android, so we import it here
×
2225
    hash_function = hashlib.sha256
×
2226
    hash_len_bits = 256
×
2227
    max_nonce: int = (1 << (32 * 8)) - 1  # 32-byte nonce
×
2228
    start_nonce = 0
×
2229

2230
    max_workers = max(multiprocessing.cpu_count() - 1, 1)  # use all but one CPU
×
2231
    manager = multiprocessing.Manager()
×
2232
    shutdown = manager.Event()
×
2233
    with ProcessPoolExecutor(max_workers=max_workers) as executor:
×
2234
        tasks = []
×
2235
        loop = asyncio.get_running_loop()
×
2236
        for task in range(0, max_workers):
×
2237
            task = loop.run_in_executor(
×
2238
                executor,
2239
                nostr_pow_worker,
2240
                start_nonce,
2241
                nostr_pubk,
2242
                target_bits,
2243
                hash_function,
2244
                hash_len_bits,
2245
                shutdown
2246
            )
2247
            tasks.append(task)
×
2248
            start_nonce += max_nonce // max_workers  # split the nonce range between the processes
×
2249
            if start_nonce > max_nonce:  # make sure we don't go over the max_nonce
×
2250
                start_nonce = random.randint(0, int(max_nonce * 0.75))
×
2251

2252
        done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)
×
2253
        hash_res, nonce_res = done.pop().result()
×
2254
        executor.shutdown(wait=False, cancel_futures=True)
×
2255

2256
    return nonce_res, get_nostr_ann_pow_amount(nostr_pubk, nonce_res)
×
2257

2258

2259
def get_nostr_ann_pow_amount(nostr_pubk: bytes, nonce: Optional[int]) -> int:
5✔
2260
    """Return the amount of leading zero bits for a nostr announcement PoW."""
2261
    if not nonce:
×
2262
        return 0
×
2263
    hash_function = hashlib.sha256
×
2264
    hash_len_bits = 256
×
2265
    hash_preimage = b'electrum-' + nostr_pubk
×
2266

2267
    digest = hash_function(hash_preimage + nonce.to_bytes(32, 'big')).digest()
×
2268
    digest = int.from_bytes(digest, 'big')
×
2269
    return hash_len_bits - digest.bit_length()
×
2270

2271

2272
class OnchainHistoryItem(NamedTuple):
5✔
2273
    txid: str
5✔
2274
    amount_sat: int
5✔
2275
    fee_sat: int
5✔
2276
    balance_sat: int
5✔
2277
    tx_mined_status: TxMinedInfo
5✔
2278
    group_id: Optional[str]
5✔
2279
    label: str
5✔
2280
    monotonic_timestamp: int
5✔
2281
    group_id: Optional[str]
5✔
2282
    def to_dict(self):
5✔
2283
        return {
×
2284
            'txid': self.txid,
2285
            'amount_sat': self.amount_sat,
2286
            'fee_sat': self.fee_sat,
2287
            'height': self.tx_mined_status.height,
2288
            'confirmations': self.tx_mined_status.conf,
2289
            'timestamp': self.tx_mined_status.timestamp,
2290
            'monotonic_timestamp': self.monotonic_timestamp,
2291
            'incoming': True if self.amount_sat>0 else False,
2292
            'bc_value': Satoshis(self.amount_sat),
2293
            'bc_balance': Satoshis(self.balance_sat),
2294
            'date': timestamp_to_datetime(self.tx_mined_status.timestamp),
2295
            'txpos_in_block': self.tx_mined_status.txpos,
2296
            'wanted_height': self.tx_mined_status.wanted_height,
2297
            'label': self.label,
2298
            'group_id': self.group_id,
2299
        }
2300

2301
class LightningHistoryItem(NamedTuple):
5✔
2302
    payment_hash: str
5✔
2303
    preimage: str
5✔
2304
    amount_msat: int
5✔
2305
    fee_msat: Optional[int]
5✔
2306
    type: str
5✔
2307
    group_id: Optional[str]
5✔
2308
    timestamp: int
5✔
2309
    label: str
5✔
2310
    direction: Optional[int]
5✔
2311
    def to_dict(self):
5✔
2312
        return {
×
2313
            'type': self.type,
2314
            'label': self.label,
2315
            'timestamp': self.timestamp or 0,
2316
            'date': timestamp_to_datetime(self.timestamp),
2317
            'amount_msat': self.amount_msat,
2318
            'fee_msat': self.fee_msat,
2319
            'payment_hash': self.payment_hash,
2320
            'preimage': self.preimage,
2321
            'group_id': self.group_id,
2322
            'ln_value': Satoshis(Decimal(self.amount_msat) / 1000),
2323
            'direction': self.direction,
2324
        }
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc