2016-07-20 19:00:34 +02:00
|
|
|
import base64
|
2018-07-18 02:35:53 +02:00
|
|
|
import codecs
|
2016-10-18 03:00:24 +02:00
|
|
|
import datetime
|
2015-08-20 17:27:15 +02:00
|
|
|
import random
|
2016-10-18 03:00:24 +02:00
|
|
|
import socket
|
2020-05-03 03:23:17 +02:00
|
|
|
import time
|
2016-12-30 19:35:17 +01:00
|
|
|
import string
|
2020-03-06 19:31:17 +01:00
|
|
|
import sys
|
2018-07-21 20:12:29 +02:00
|
|
|
import json
|
2019-01-22 23:44:17 +01:00
|
|
|
import typing
|
|
|
|
import asyncio
|
2019-02-28 18:40:11 +01:00
|
|
|
import ssl
|
2018-06-07 18:18:07 +02:00
|
|
|
import logging
|
2019-01-31 19:46:19 +01:00
|
|
|
import ipaddress
|
2019-02-28 18:40:11 +01:00
|
|
|
import contextlib
|
2019-03-31 01:14:41 +01:00
|
|
|
import functools
|
2019-05-18 00:57:07 +02:00
|
|
|
import collections
|
2020-01-03 06:52:48 +01:00
|
|
|
import hashlib
|
2020-01-03 05:15:33 +01:00
|
|
|
import pkg_resources
|
|
|
|
|
|
|
|
import certifi
|
|
|
|
import aiohttp
|
2020-12-23 22:37:31 +01:00
|
|
|
from prometheus_client import Counter
|
2019-06-21 02:55:47 +02:00
|
|
|
from lbry.schema.claim import Claim
|
2016-07-25 23:09:13 +02:00
|
|
|
|
2020-12-23 22:37:31 +01:00
|
|
|
|
2018-06-07 18:18:07 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2016-10-18 03:00:24 +02:00
|
|
|
|
2016-10-05 21:16:20 +02:00
|
|
|
# defining these time functions here allows for easier overriding in testing
|
2016-09-30 06:06:07 +02:00
|
|
|
def now():
|
|
|
|
return datetime.datetime.now()
|
|
|
|
|
2016-10-05 21:16:20 +02:00
|
|
|
|
2016-09-30 06:06:07 +02:00
|
|
|
def utcnow():
|
|
|
|
return datetime.datetime.utcnow()
|
|
|
|
|
2016-10-05 21:16:20 +02:00
|
|
|
|
2016-09-30 06:06:07 +02:00
|
|
|
def isonow():
|
|
|
|
"""Return utc now in isoformat with timezone"""
|
|
|
|
return utcnow().isoformat() + 'Z'
|
|
|
|
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2016-09-30 06:06:07 +02:00
|
|
|
def today():
|
|
|
|
return datetime.datetime.today()
|
|
|
|
|
|
|
|
|
2017-01-02 20:52:24 +01:00
|
|
|
def timedelta(**kwargs):
|
|
|
|
return datetime.timedelta(**kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
def datetime_obj(*args, **kwargs):
|
|
|
|
return datetime.datetime(*args, **kwargs)
|
|
|
|
|
|
|
|
|
2020-01-03 05:44:41 +01:00
|
|
|
def get_lbry_hash_obj():
|
|
|
|
return hashlib.sha384()
|
|
|
|
|
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
def generate_id(num=None):
|
|
|
|
h = get_lbry_hash_obj()
|
|
|
|
if num is not None:
|
2018-06-12 17:54:01 +02:00
|
|
|
h.update(str(num).encode())
|
2015-08-20 17:27:15 +02:00
|
|
|
else:
|
2018-06-12 17:54:01 +02:00
|
|
|
h.update(str(random.getrandbits(512)).encode())
|
2015-08-20 17:27:15 +02:00
|
|
|
return h.digest()
|
|
|
|
|
|
|
|
|
2020-01-03 05:15:33 +01:00
|
|
|
def version_is_greater_than(version_a, version_b):
|
2016-07-25 23:09:13 +02:00
|
|
|
"""Returns True if version a is more recent than version b"""
|
2020-01-03 05:15:33 +01:00
|
|
|
return pkg_resources.parse_version(version_a) > pkg_resources.parse_version(version_b)
|
2016-07-20 19:00:34 +02:00
|
|
|
|
|
|
|
|
2018-07-18 02:35:53 +02:00
|
|
|
def rot13(some_str):
|
|
|
|
return codecs.encode(some_str, 'rot_13')
|
|
|
|
|
|
|
|
|
2016-07-20 19:00:34 +02:00
|
|
|
def deobfuscate(obfustacated):
|
2018-10-18 21:57:15 +02:00
|
|
|
return base64.b64decode(rot13(obfustacated)).decode()
|
2016-07-20 19:00:34 +02:00
|
|
|
|
|
|
|
|
|
|
|
def obfuscate(plain):
|
2018-07-22 03:12:33 +02:00
|
|
|
return rot13(base64.b64encode(plain).decode())
|
2016-09-16 06:14:25 +02:00
|
|
|
|
|
|
|
|
2019-06-04 16:23:04 +02:00
|
|
|
def check_connection(server="lbry.com", port=80, timeout=5) -> bool:
|
2016-10-18 03:00:24 +02:00
|
|
|
"""Attempts to open a socket to server:port and returns True if successful."""
|
2017-09-20 22:52:16 +02:00
|
|
|
log.debug('Checking connection to %s:%s', server, port)
|
2016-10-18 03:00:24 +02:00
|
|
|
try:
|
2017-10-31 17:18:47 +01:00
|
|
|
server = socket.gethostbyname(server)
|
2019-02-03 22:19:29 +01:00
|
|
|
socket.create_connection((server, port), timeout).close()
|
2016-10-18 03:00:24 +02:00
|
|
|
return True
|
2020-01-03 05:15:33 +01:00
|
|
|
except (socket.gaierror, socket.herror):
|
2019-08-21 19:28:12 +02:00
|
|
|
log.debug("Failed to connect to %s:%s. Unable to resolve domain. Trying to bypass DNS",
|
|
|
|
server, port)
|
2017-09-20 22:52:16 +02:00
|
|
|
try:
|
|
|
|
server = "8.8.8.8"
|
|
|
|
port = 53
|
2019-02-03 22:19:29 +01:00
|
|
|
socket.create_connection((server, port), timeout).close()
|
2017-09-20 22:52:16 +02:00
|
|
|
return True
|
2019-08-21 19:28:12 +02:00
|
|
|
except OSError:
|
2017-09-20 22:52:16 +02:00
|
|
|
return False
|
2019-08-21 19:28:12 +02:00
|
|
|
except OSError:
|
2016-10-18 03:00:24 +02:00
|
|
|
return False
|
2016-10-22 00:26:36 +02:00
|
|
|
|
|
|
|
|
2016-12-30 19:35:17 +01:00
|
|
|
def random_string(length=10, chars=string.ascii_lowercase):
|
|
|
|
return ''.join([random.choice(chars) for _ in range(length)])
|
2017-02-16 15:09:21 +01:00
|
|
|
|
|
|
|
|
|
|
|
def short_hash(hash_str):
|
|
|
|
return hash_str[:6]
|
2017-03-09 16:39:17 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_sd_hash(stream_info):
|
|
|
|
if not stream_info:
|
|
|
|
return None
|
2019-03-20 06:46:23 +01:00
|
|
|
if isinstance(stream_info, Claim):
|
2019-04-20 07:12:43 +02:00
|
|
|
return stream_info.stream.source.sd_hash
|
2018-02-06 07:16:10 +01:00
|
|
|
result = stream_info.get('claim', {}).\
|
|
|
|
get('value', {}).\
|
|
|
|
get('stream', {}).\
|
|
|
|
get('source', {}).\
|
|
|
|
get('source')
|
2018-02-04 05:08:15 +01:00
|
|
|
if not result:
|
2018-07-22 01:08:28 +02:00
|
|
|
log.warning("Unable to get sd_hash")
|
2018-02-04 05:08:15 +01:00
|
|
|
return result
|
2017-03-15 21:19:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
def json_dumps_pretty(obj, **kwargs):
|
|
|
|
return json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '), **kwargs)
|
2018-02-28 20:59:12 +01:00
|
|
|
|
2022-04-29 14:39:31 +02:00
|
|
|
try:
|
|
|
|
# the standard contextlib.aclosing() is available in 3.10+
|
2022-04-29 21:39:43 +02:00
|
|
|
from contextlib import aclosing # pylint: disable=unused-import
|
2022-04-29 14:39:31 +02:00
|
|
|
except ImportError:
|
|
|
|
@contextlib.asynccontextmanager
|
|
|
|
async def aclosing(thing):
|
|
|
|
try:
|
|
|
|
yield thing
|
|
|
|
finally:
|
|
|
|
await thing.aclose()
|
2018-02-28 20:59:12 +01:00
|
|
|
|
2019-03-31 01:14:41 +01:00
|
|
|
def async_timed_cache(duration: int):
|
2020-01-03 05:15:33 +01:00
|
|
|
def wrapper(func):
|
2019-03-31 01:14:41 +01:00
|
|
|
cache: typing.Dict[typing.Tuple,
|
|
|
|
typing.Tuple[typing.Any, float]] = {}
|
|
|
|
|
2020-01-03 05:15:33 +01:00
|
|
|
@functools.wraps(func)
|
2019-03-31 01:14:41 +01:00
|
|
|
async def _inner(*args, **kwargs) -> typing.Any:
|
|
|
|
loop = asyncio.get_running_loop()
|
2020-01-03 05:15:33 +01:00
|
|
|
time_now = loop.time()
|
2021-08-21 04:36:35 +02:00
|
|
|
key = (args, tuple(kwargs.items()))
|
2020-01-03 05:15:33 +01:00
|
|
|
if key in cache and (time_now - cache[key][1] < duration):
|
2019-03-31 01:14:41 +01:00
|
|
|
return cache[key][0]
|
2020-01-03 05:15:33 +01:00
|
|
|
to_cache = await func(*args, **kwargs)
|
|
|
|
cache[key] = to_cache, time_now
|
2019-03-31 01:14:41 +01:00
|
|
|
return to_cache
|
|
|
|
return _inner
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2019-03-31 03:05:46 +02:00
|
|
|
def cache_concurrent(async_fn):
|
|
|
|
"""
|
2019-04-19 18:06:29 +02:00
|
|
|
When the decorated function has concurrent calls made to it with the same arguments, only run it once
|
2019-03-31 03:05:46 +02:00
|
|
|
"""
|
|
|
|
cache: typing.Dict = {}
|
|
|
|
|
|
|
|
@functools.wraps(async_fn)
|
|
|
|
async def wrapper(*args, **kwargs):
|
2021-08-21 04:36:35 +02:00
|
|
|
key = (args, tuple(kwargs.items()))
|
2019-04-19 18:06:29 +02:00
|
|
|
cache[key] = cache.get(key) or asyncio.create_task(async_fn(*args, **kwargs))
|
2019-03-31 03:05:46 +02:00
|
|
|
try:
|
2019-04-19 18:06:29 +02:00
|
|
|
return await cache[key]
|
2019-03-31 03:05:46 +02:00
|
|
|
finally:
|
2019-04-19 18:06:29 +02:00
|
|
|
cache.pop(key, None)
|
2019-03-31 03:05:46 +02:00
|
|
|
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2019-03-31 01:14:41 +01:00
|
|
|
@async_timed_cache(300)
|
2019-02-05 19:31:57 +01:00
|
|
|
async def resolve_host(url: str, port: int, proto: str) -> str:
|
|
|
|
if proto not in ['udp', 'tcp']:
|
|
|
|
raise Exception("invalid protocol")
|
2021-01-21 22:08:33 +01:00
|
|
|
if url.lower() == 'localhost':
|
|
|
|
return '127.0.0.1'
|
2019-01-31 19:46:19 +01:00
|
|
|
try:
|
|
|
|
if ipaddress.ip_address(url):
|
|
|
|
return url
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
loop = asyncio.get_running_loop()
|
|
|
|
return (await loop.getaddrinfo(
|
2019-02-05 19:31:57 +01:00
|
|
|
url, port,
|
2019-02-05 19:36:25 +01:00
|
|
|
proto=socket.IPPROTO_TCP if proto == 'tcp' else socket.IPPROTO_UDP,
|
2019-10-08 19:31:06 +02:00
|
|
|
type=socket.SOCK_STREAM if proto == 'tcp' else socket.SOCK_DGRAM,
|
|
|
|
family=socket.AF_INET
|
2019-01-31 19:46:19 +01:00
|
|
|
))[0][4][0]
|
2019-02-28 18:40:11 +01:00
|
|
|
|
|
|
|
|
2021-01-16 22:25:46 +01:00
|
|
|
class LRUCacheWithMetrics:
|
2019-05-18 00:57:07 +02:00
|
|
|
__slots__ = [
|
|
|
|
'capacity',
|
2020-12-23 22:37:31 +01:00
|
|
|
'cache',
|
|
|
|
'_track_metrics',
|
|
|
|
'hits',
|
|
|
|
'misses'
|
2019-05-18 00:57:07 +02:00
|
|
|
]
|
|
|
|
|
2020-12-23 23:45:05 +01:00
|
|
|
def __init__(self, capacity: int, metric_name: typing.Optional[str] = None, namespace: str = "daemon_cache"):
|
2019-05-18 00:57:07 +02:00
|
|
|
self.capacity = capacity
|
|
|
|
self.cache = collections.OrderedDict()
|
2020-12-23 22:37:31 +01:00
|
|
|
if metric_name is None:
|
|
|
|
self._track_metrics = False
|
|
|
|
self.hits = self.misses = None
|
|
|
|
else:
|
|
|
|
self._track_metrics = True
|
|
|
|
try:
|
|
|
|
self.hits = Counter(
|
2020-12-23 23:45:05 +01:00
|
|
|
f"{metric_name}_cache_hit_count", "Number of cache hits", namespace=namespace
|
2020-12-23 22:37:31 +01:00
|
|
|
)
|
|
|
|
self.misses = Counter(
|
2020-12-23 23:45:05 +01:00
|
|
|
f"{metric_name}_cache_miss_count", "Number of cache misses", namespace=namespace
|
2020-12-23 22:37:31 +01:00
|
|
|
)
|
|
|
|
except ValueError as err:
|
2021-01-21 20:51:59 +01:00
|
|
|
log.debug("failed to set up prometheus %s_cache_miss_count metric: %s", metric_name, err)
|
2020-12-23 22:37:31 +01:00
|
|
|
self._track_metrics = False
|
|
|
|
self.hits = self.misses = None
|
|
|
|
|
|
|
|
def get(self, key, default=None):
|
|
|
|
try:
|
|
|
|
value = self.cache.pop(key)
|
|
|
|
if self._track_metrics:
|
|
|
|
self.hits.inc()
|
|
|
|
except KeyError:
|
|
|
|
if self._track_metrics:
|
|
|
|
self.misses.inc()
|
|
|
|
return default
|
2019-05-18 00:57:07 +02:00
|
|
|
self.cache[key] = value
|
|
|
|
return value
|
|
|
|
|
|
|
|
def set(self, key, value):
|
|
|
|
try:
|
|
|
|
self.cache.pop(key)
|
|
|
|
except KeyError:
|
|
|
|
if len(self.cache) >= self.capacity:
|
|
|
|
self.cache.popitem(last=False)
|
|
|
|
self.cache[key] = value
|
|
|
|
|
2020-12-23 22:37:31 +01:00
|
|
|
def clear(self):
|
|
|
|
self.cache.clear()
|
|
|
|
|
|
|
|
def pop(self, key):
|
|
|
|
return self.cache.pop(key)
|
|
|
|
|
|
|
|
def __setitem__(self, key, value):
|
|
|
|
return self.set(key, value)
|
|
|
|
|
|
|
|
def __getitem__(self, item):
|
|
|
|
return self.get(item)
|
|
|
|
|
2019-05-18 00:57:07 +02:00
|
|
|
def __contains__(self, item) -> bool:
|
|
|
|
return item in self.cache
|
|
|
|
|
2020-12-23 22:37:31 +01:00
|
|
|
def __len__(self):
|
|
|
|
return len(self.cache)
|
|
|
|
|
|
|
|
def __delitem__(self, key):
|
|
|
|
self.cache.pop(key)
|
|
|
|
|
|
|
|
def __del__(self):
|
|
|
|
self.clear()
|
|
|
|
|
2019-05-18 00:57:07 +02:00
|
|
|
|
2021-01-16 22:25:46 +01:00
|
|
|
class LRUCache:
|
|
|
|
__slots__ = [
|
|
|
|
'capacity',
|
|
|
|
'cache'
|
|
|
|
]
|
|
|
|
|
|
|
|
def __init__(self, capacity: int):
|
|
|
|
self.capacity = capacity
|
|
|
|
self.cache = collections.OrderedDict()
|
|
|
|
|
|
|
|
def get(self, key, default=None):
|
|
|
|
try:
|
|
|
|
value = self.cache.pop(key)
|
|
|
|
except KeyError:
|
|
|
|
return default
|
|
|
|
self.cache[key] = value
|
|
|
|
return value
|
|
|
|
|
|
|
|
def set(self, key, value):
|
|
|
|
try:
|
|
|
|
self.cache.pop(key)
|
|
|
|
except KeyError:
|
|
|
|
if len(self.cache) >= self.capacity:
|
|
|
|
self.cache.popitem(last=False)
|
|
|
|
self.cache[key] = value
|
|
|
|
|
2021-06-04 17:20:44 +02:00
|
|
|
def items(self):
|
|
|
|
return self.cache.items()
|
|
|
|
|
2021-01-16 22:25:46 +01:00
|
|
|
def clear(self):
|
|
|
|
self.cache.clear()
|
|
|
|
|
2021-06-04 17:15:47 +02:00
|
|
|
def pop(self, key, default=None):
|
|
|
|
return self.cache.pop(key, default)
|
2021-01-16 22:25:46 +01:00
|
|
|
|
|
|
|
def __setitem__(self, key, value):
|
|
|
|
return self.set(key, value)
|
|
|
|
|
|
|
|
def __getitem__(self, item):
|
|
|
|
return self.get(item)
|
|
|
|
|
|
|
|
def __contains__(self, item) -> bool:
|
|
|
|
return item in self.cache
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return len(self.cache)
|
|
|
|
|
|
|
|
def __delitem__(self, key):
|
|
|
|
self.cache.pop(key)
|
|
|
|
|
|
|
|
def __del__(self):
|
|
|
|
self.clear()
|
|
|
|
|
|
|
|
|
2019-05-24 04:40:02 +02:00
|
|
|
def lru_cache_concurrent(cache_size: typing.Optional[int] = None,
|
2021-01-16 22:25:46 +01:00
|
|
|
override_lru_cache: typing.Optional[LRUCacheWithMetrics] = None):
|
2019-05-24 04:40:02 +02:00
|
|
|
if not cache_size and override_lru_cache is None:
|
2019-05-18 00:57:07 +02:00
|
|
|
raise ValueError("invalid cache size")
|
|
|
|
concurrent_cache = {}
|
2021-01-16 22:25:46 +01:00
|
|
|
lru_cache = override_lru_cache if override_lru_cache is not None else LRUCacheWithMetrics(cache_size)
|
2019-05-18 00:57:07 +02:00
|
|
|
|
|
|
|
def wrapper(async_fn):
|
|
|
|
|
|
|
|
@functools.wraps(async_fn)
|
|
|
|
async def _inner(*args, **kwargs):
|
2021-08-21 04:36:35 +02:00
|
|
|
key = (args, tuple(kwargs.items()))
|
2019-05-18 00:57:07 +02:00
|
|
|
if key in lru_cache:
|
|
|
|
return lru_cache.get(key)
|
|
|
|
|
|
|
|
concurrent_cache[key] = concurrent_cache.get(key) or asyncio.create_task(async_fn(*args, **kwargs))
|
|
|
|
|
|
|
|
try:
|
|
|
|
result = await concurrent_cache[key]
|
|
|
|
lru_cache.set(key, result)
|
|
|
|
return result
|
|
|
|
finally:
|
|
|
|
concurrent_cache.pop(key, None)
|
|
|
|
return _inner
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2019-02-28 18:42:23 +01:00
|
|
|
def get_ssl_context() -> ssl.SSLContext:
|
2019-02-28 18:40:11 +01:00
|
|
|
return ssl.create_default_context(
|
2019-02-28 18:45:56 +01:00
|
|
|
purpose=ssl.Purpose.CLIENT_AUTH, capath=certifi.where()
|
2019-02-28 18:40:11 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@contextlib.asynccontextmanager
|
2019-02-28 18:42:23 +01:00
|
|
|
async def aiohttp_request(method, url, **kwargs) -> typing.AsyncContextManager[aiohttp.ClientResponse]:
|
2019-02-28 18:40:11 +01:00
|
|
|
async with aiohttp.ClientSession() as session:
|
2021-02-10 19:29:05 +01:00
|
|
|
async with session.request(method, url, **kwargs) as response:
|
2019-02-28 18:40:11 +01:00
|
|
|
yield response
|
2019-03-11 02:55:33 +01:00
|
|
|
|
|
|
|
|
2021-01-21 22:20:53 +01:00
|
|
|
# the ipaddress module does not show these subnets as reserved
|
|
|
|
CARRIER_GRADE_NAT_SUBNET = ipaddress.ip_network('100.64.0.0/10')
|
|
|
|
IPV4_TO_6_RELAY_SUBNET = ipaddress.ip_network('192.88.99.0/24')
|
|
|
|
|
|
|
|
|
2021-04-28 21:28:00 +02:00
|
|
|
def is_valid_public_ipv4(address, allow_localhost: bool = False, allow_lan: bool = False):
|
2021-01-21 22:20:53 +01:00
|
|
|
try:
|
|
|
|
parsed_ip = ipaddress.ip_address(address)
|
|
|
|
if parsed_ip.is_loopback and allow_localhost:
|
|
|
|
return True
|
2021-04-28 21:28:00 +02:00
|
|
|
if allow_lan and parsed_ip.is_private:
|
|
|
|
return True
|
2021-01-21 22:20:53 +01:00
|
|
|
if any((parsed_ip.version != 4, parsed_ip.is_unspecified, parsed_ip.is_link_local, parsed_ip.is_loopback,
|
2021-04-28 21:28:00 +02:00
|
|
|
parsed_ip.is_multicast, parsed_ip.is_reserved, parsed_ip.is_private)):
|
2021-01-21 22:20:53 +01:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return not any((CARRIER_GRADE_NAT_SUBNET.supernet_of(ipaddress.ip_network(f"{address}/32")),
|
|
|
|
IPV4_TO_6_RELAY_SUBNET.supernet_of(ipaddress.ip_network(f"{address}/32"))))
|
|
|
|
except (ipaddress.AddressValueError, ValueError):
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
async def fallback_get_external_ip(): # used if spv servers can't be used for ip detection
|
2019-03-11 02:55:33 +01:00
|
|
|
try:
|
2019-06-04 16:23:04 +02:00
|
|
|
async with aiohttp_request("get", "https://api.lbry.com/ip") as resp:
|
2019-03-11 02:55:33 +01:00
|
|
|
response = await resp.json()
|
|
|
|
if response['success']:
|
2021-01-21 22:20:53 +01:00
|
|
|
return response['data']['ip'], None
|
2020-01-03 05:15:33 +01:00
|
|
|
except Exception:
|
2021-01-21 22:20:53 +01:00
|
|
|
return None, None
|
|
|
|
|
|
|
|
|
|
|
|
async def _get_external_ip(default_servers) -> typing.Tuple[typing.Optional[str], typing.Optional[str]]:
|
|
|
|
# used if upnp is disabled or non-functioning
|
2021-11-08 20:18:22 +01:00
|
|
|
from lbry.wallet.udp import SPVStatusClientProtocol # pylint: disable=C0415
|
2021-01-21 22:20:53 +01:00
|
|
|
|
|
|
|
hostname_to_ip = {}
|
|
|
|
ip_to_hostnames = collections.defaultdict(list)
|
|
|
|
|
|
|
|
async def resolve_spv(server, port):
|
|
|
|
try:
|
|
|
|
server_addr = await resolve_host(server, port, 'udp')
|
|
|
|
hostname_to_ip[server] = (server_addr, port)
|
|
|
|
ip_to_hostnames[(server_addr, port)].append(server)
|
|
|
|
except Exception:
|
|
|
|
log.exception("error looking up dns for spv servers")
|
|
|
|
|
|
|
|
# accumulate the dns results
|
|
|
|
await asyncio.gather(*(resolve_spv(server, port) for (server, port) in default_servers))
|
|
|
|
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
pong_responses = asyncio.Queue()
|
|
|
|
connection = SPVStatusClientProtocol(pong_responses)
|
|
|
|
try:
|
|
|
|
await loop.create_datagram_endpoint(lambda: connection, ('0.0.0.0', 0))
|
|
|
|
# could raise OSError if it cant bind
|
|
|
|
randomized_servers = list(ip_to_hostnames.keys())
|
|
|
|
random.shuffle(randomized_servers)
|
|
|
|
for server in randomized_servers:
|
|
|
|
connection.ping(server)
|
|
|
|
try:
|
|
|
|
_, pong = await asyncio.wait_for(pong_responses.get(), 1)
|
|
|
|
if is_valid_public_ipv4(pong.ip_address):
|
|
|
|
return pong.ip_address, ip_to_hostnames[server][0]
|
|
|
|
except asyncio.TimeoutError:
|
|
|
|
pass
|
|
|
|
return None, None
|
|
|
|
finally:
|
|
|
|
connection.close()
|
|
|
|
|
|
|
|
|
|
|
|
async def get_external_ip(default_servers) -> typing.Tuple[typing.Optional[str], typing.Optional[str]]:
|
|
|
|
ip_from_spv_servers = await _get_external_ip(default_servers)
|
|
|
|
if not ip_from_spv_servers[1]:
|
|
|
|
return await fallback_get_external_ip()
|
|
|
|
return ip_from_spv_servers
|
2020-03-06 19:31:17 +01:00
|
|
|
|
|
|
|
|
|
|
|
def is_running_from_bundle():
|
|
|
|
# see https://pyinstaller.readthedocs.io/en/stable/runtime-information.html
|
|
|
|
return getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS')
|
2020-05-03 03:23:17 +02:00
|
|
|
|
|
|
|
|
|
|
|
class LockWithMetrics(asyncio.Lock):
|
2021-08-21 05:33:21 +02:00
|
|
|
def __init__(self, acquire_metric, held_time_metric):
|
|
|
|
super().__init__()
|
2020-05-03 03:23:17 +02:00
|
|
|
self._acquire_metric = acquire_metric
|
|
|
|
self._lock_held_time_metric = held_time_metric
|
|
|
|
self._lock_acquired_time = None
|
|
|
|
|
|
|
|
async def acquire(self):
|
|
|
|
start = time.perf_counter()
|
|
|
|
try:
|
|
|
|
return await super().acquire()
|
|
|
|
finally:
|
|
|
|
self._lock_acquired_time = time.perf_counter()
|
|
|
|
self._acquire_metric.observe(self._lock_acquired_time - start)
|
|
|
|
|
|
|
|
def release(self):
|
|
|
|
try:
|
|
|
|
return super().release()
|
|
|
|
finally:
|
|
|
|
self._lock_held_time_metric.observe(time.perf_counter() - self._lock_acquired_time)
|
2022-02-22 19:46:08 +01:00
|
|
|
|
|
|
|
|
2022-02-23 02:38:04 +01:00
|
|
|
def get_colliding_prefix_bits(first_value: bytes, second_value: bytes):
|
2022-02-22 19:46:08 +01:00
|
|
|
"""
|
2022-02-23 02:38:04 +01:00
|
|
|
Calculates the amount of colliding prefix bits between <first_value> and <second_value>.
|
|
|
|
This is given by the amount of bits that are the same until the first different one (via XOR),
|
|
|
|
starting from the most significant bit to the least significant bit.
|
2022-02-22 19:46:08 +01:00
|
|
|
:param first_value: first value to compare, bigger than size.
|
|
|
|
:param second_value: second value to compare, bigger than size.
|
|
|
|
:return: amount of prefix colliding bits.
|
|
|
|
"""
|
2022-02-23 02:38:04 +01:00
|
|
|
assert len(first_value) == len(second_value), "length should be the same"
|
|
|
|
size = len(first_value) * 8
|
|
|
|
first_value, second_value = int.from_bytes(first_value, "big"), int.from_bytes(second_value, "big")
|
2022-02-22 19:46:08 +01:00
|
|
|
return size - (first_value ^ second_value).bit_length()
|