2016-07-20 19:00:34 +02:00
|
|
|
import base64
|
2018-07-18 02:35:53 +02:00
|
|
|
import codecs
|
2016-10-18 03:00:24 +02:00
|
|
|
import datetime
|
2015-08-20 17:27:15 +02:00
|
|
|
import random
|
2016-10-18 03:00:24 +02:00
|
|
|
import socket
|
2016-12-30 19:35:17 +01:00
|
|
|
import string
|
2018-07-21 20:12:29 +02:00
|
|
|
import json
|
2019-01-22 23:44:17 +01:00
|
|
|
import typing
|
|
|
|
import asyncio
|
2019-02-28 18:40:11 +01:00
|
|
|
import ssl
|
2018-06-07 18:18:07 +02:00
|
|
|
import logging
|
2019-01-31 19:46:19 +01:00
|
|
|
import ipaddress
|
2016-11-19 23:58:40 +01:00
|
|
|
import pkg_resources
|
2019-02-28 18:40:11 +01:00
|
|
|
import contextlib
|
|
|
|
import certifi
|
|
|
|
import aiohttp
|
2019-03-31 01:14:41 +01:00
|
|
|
import functools
|
2019-05-18 00:57:07 +02:00
|
|
|
import collections
|
2019-06-21 02:55:47 +02:00
|
|
|
from lbry.schema.claim import Claim
|
|
|
|
from lbry.cryptoutils import get_lbry_hash_obj
|
2016-07-25 23:09:13 +02:00
|
|
|
|
2018-06-07 18:18:07 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2016-10-18 03:00:24 +02:00
|
|
|
|
2016-10-05 21:16:20 +02:00
|
|
|
# defining these time functions here allows for easier overriding in testing
|
2016-09-30 06:06:07 +02:00
|
|
|
def now():
|
|
|
|
return datetime.datetime.now()
|
|
|
|
|
2016-10-05 21:16:20 +02:00
|
|
|
|
2016-09-30 06:06:07 +02:00
|
|
|
def utcnow():
|
|
|
|
return datetime.datetime.utcnow()
|
|
|
|
|
2016-10-05 21:16:20 +02:00
|
|
|
|
2016-09-30 06:06:07 +02:00
|
|
|
def isonow():
|
|
|
|
"""Return utc now in isoformat with timezone"""
|
|
|
|
return utcnow().isoformat() + 'Z'
|
|
|
|
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2016-09-30 06:06:07 +02:00
|
|
|
def today():
|
|
|
|
return datetime.datetime.today()
|
|
|
|
|
|
|
|
|
2017-01-02 20:52:24 +01:00
|
|
|
def timedelta(**kwargs):
|
|
|
|
return datetime.timedelta(**kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
def datetime_obj(*args, **kwargs):
|
|
|
|
return datetime.datetime(*args, **kwargs)
|
|
|
|
|
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
def generate_id(num=None):
|
|
|
|
h = get_lbry_hash_obj()
|
|
|
|
if num is not None:
|
2018-06-12 17:54:01 +02:00
|
|
|
h.update(str(num).encode())
|
2015-08-20 17:27:15 +02:00
|
|
|
else:
|
2018-06-12 17:54:01 +02:00
|
|
|
h.update(str(random.getrandbits(512)).encode())
|
2015-08-20 17:27:15 +02:00
|
|
|
return h.digest()
|
|
|
|
|
|
|
|
|
2016-07-25 23:09:13 +02:00
|
|
|
def version_is_greater_than(a, b):
|
|
|
|
"""Returns True if version a is more recent than version b"""
|
2016-11-19 23:58:40 +01:00
|
|
|
return pkg_resources.parse_version(a) > pkg_resources.parse_version(b)
|
2016-07-20 19:00:34 +02:00
|
|
|
|
|
|
|
|
2018-07-18 02:35:53 +02:00
|
|
|
def rot13(some_str):
|
|
|
|
return codecs.encode(some_str, 'rot_13')
|
|
|
|
|
|
|
|
|
2016-07-20 19:00:34 +02:00
|
|
|
def deobfuscate(obfustacated):
|
2018-10-18 21:57:15 +02:00
|
|
|
return base64.b64decode(rot13(obfustacated)).decode()
|
2016-07-20 19:00:34 +02:00
|
|
|
|
|
|
|
|
|
|
|
def obfuscate(plain):
|
2018-07-22 03:12:33 +02:00
|
|
|
return rot13(base64.b64encode(plain).decode())
|
2016-09-16 06:14:25 +02:00
|
|
|
|
|
|
|
|
2019-06-04 16:23:04 +02:00
|
|
|
def check_connection(server="lbry.com", port=80, timeout=5) -> bool:
|
2016-10-18 03:00:24 +02:00
|
|
|
"""Attempts to open a socket to server:port and returns True if successful."""
|
2017-09-20 22:52:16 +02:00
|
|
|
log.debug('Checking connection to %s:%s', server, port)
|
2016-10-18 03:00:24 +02:00
|
|
|
try:
|
2017-10-31 17:18:47 +01:00
|
|
|
server = socket.gethostbyname(server)
|
2019-02-03 22:19:29 +01:00
|
|
|
socket.create_connection((server, port), timeout).close()
|
2016-10-18 03:00:24 +02:00
|
|
|
return True
|
2017-09-20 22:52:16 +02:00
|
|
|
except (socket.gaierror, socket.herror) as ex:
|
2019-08-21 19:28:12 +02:00
|
|
|
log.debug("Failed to connect to %s:%s. Unable to resolve domain. Trying to bypass DNS",
|
|
|
|
server, port)
|
2017-09-20 22:52:16 +02:00
|
|
|
try:
|
|
|
|
server = "8.8.8.8"
|
|
|
|
port = 53
|
2019-02-03 22:19:29 +01:00
|
|
|
socket.create_connection((server, port), timeout).close()
|
2017-09-20 22:52:16 +02:00
|
|
|
return True
|
2019-08-21 19:28:12 +02:00
|
|
|
except OSError:
|
2017-09-20 22:52:16 +02:00
|
|
|
return False
|
2019-08-21 19:28:12 +02:00
|
|
|
except OSError:
|
2016-10-18 03:00:24 +02:00
|
|
|
return False
|
2016-10-22 00:26:36 +02:00
|
|
|
|
|
|
|
|
2019-08-21 19:28:12 +02:00
|
|
|
async def async_check_connection(server="lbry.com", port=80, timeout=1) -> bool:
|
2019-02-03 22:19:29 +01:00
|
|
|
return await asyncio.get_event_loop().run_in_executor(None, check_connection, server, port, timeout)
|
|
|
|
|
|
|
|
|
2016-12-30 19:35:17 +01:00
|
|
|
def random_string(length=10, chars=string.ascii_lowercase):
|
|
|
|
return ''.join([random.choice(chars) for _ in range(length)])
|
2017-02-16 15:09:21 +01:00
|
|
|
|
|
|
|
|
|
|
|
def short_hash(hash_str):
|
|
|
|
return hash_str[:6]
|
2017-03-09 16:39:17 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_sd_hash(stream_info):
|
|
|
|
if not stream_info:
|
|
|
|
return None
|
2019-03-20 06:46:23 +01:00
|
|
|
if isinstance(stream_info, Claim):
|
2019-04-20 07:12:43 +02:00
|
|
|
return stream_info.stream.source.sd_hash
|
2018-02-06 07:16:10 +01:00
|
|
|
result = stream_info.get('claim', {}).\
|
|
|
|
get('value', {}).\
|
|
|
|
get('stream', {}).\
|
|
|
|
get('source', {}).\
|
|
|
|
get('source')
|
2018-02-04 05:08:15 +01:00
|
|
|
if not result:
|
2018-07-22 01:08:28 +02:00
|
|
|
log.warning("Unable to get sd_hash")
|
2018-02-04 05:08:15 +01:00
|
|
|
return result
|
2017-03-15 21:19:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
def json_dumps_pretty(obj, **kwargs):
|
|
|
|
return json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '), **kwargs)
|
2018-02-28 20:59:12 +01:00
|
|
|
|
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
def cancel_task(task: typing.Optional[asyncio.Task]):
|
|
|
|
if task and not task.done():
|
|
|
|
task.cancel()
|
|
|
|
|
|
|
|
|
|
|
|
def cancel_tasks(tasks: typing.List[typing.Optional[asyncio.Task]]):
|
|
|
|
for task in tasks:
|
|
|
|
cancel_task(task)
|
|
|
|
|
|
|
|
|
|
|
|
def drain_tasks(tasks: typing.List[typing.Optional[asyncio.Task]]):
|
|
|
|
while tasks:
|
|
|
|
cancel_task(tasks.pop())
|
2019-01-31 19:46:19 +01:00
|
|
|
|
|
|
|
|
2019-03-31 01:14:41 +01:00
|
|
|
def async_timed_cache(duration: int):
|
|
|
|
def wrapper(fn):
|
|
|
|
cache: typing.Dict[typing.Tuple,
|
|
|
|
typing.Tuple[typing.Any, float]] = {}
|
|
|
|
|
|
|
|
@functools.wraps(fn)
|
|
|
|
async def _inner(*args, **kwargs) -> typing.Any:
|
|
|
|
loop = asyncio.get_running_loop()
|
|
|
|
now = loop.time()
|
|
|
|
key = tuple([args, tuple([tuple([k, kwargs[k]]) for k in kwargs])])
|
|
|
|
if key in cache and (now - cache[key][1] < duration):
|
|
|
|
return cache[key][0]
|
|
|
|
to_cache = await fn(*args, **kwargs)
|
|
|
|
cache[key] = to_cache, now
|
|
|
|
return to_cache
|
|
|
|
return _inner
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2019-03-31 03:05:46 +02:00
|
|
|
def cache_concurrent(async_fn):
|
|
|
|
"""
|
2019-04-19 18:06:29 +02:00
|
|
|
When the decorated function has concurrent calls made to it with the same arguments, only run it once
|
2019-03-31 03:05:46 +02:00
|
|
|
"""
|
|
|
|
cache: typing.Dict = {}
|
|
|
|
|
|
|
|
@functools.wraps(async_fn)
|
|
|
|
async def wrapper(*args, **kwargs):
|
|
|
|
key = tuple([args, tuple([tuple([k, kwargs[k]]) for k in kwargs])])
|
2019-04-19 18:06:29 +02:00
|
|
|
cache[key] = cache.get(key) or asyncio.create_task(async_fn(*args, **kwargs))
|
2019-03-31 03:05:46 +02:00
|
|
|
try:
|
2019-04-19 18:06:29 +02:00
|
|
|
return await cache[key]
|
2019-03-31 03:05:46 +02:00
|
|
|
finally:
|
2019-04-19 18:06:29 +02:00
|
|
|
cache.pop(key, None)
|
2019-03-31 03:05:46 +02:00
|
|
|
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2019-03-31 01:14:41 +01:00
|
|
|
@async_timed_cache(300)
|
2019-02-05 19:31:57 +01:00
|
|
|
async def resolve_host(url: str, port: int, proto: str) -> str:
|
|
|
|
if proto not in ['udp', 'tcp']:
|
|
|
|
raise Exception("invalid protocol")
|
2019-01-31 19:46:19 +01:00
|
|
|
try:
|
|
|
|
if ipaddress.ip_address(url):
|
|
|
|
return url
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
loop = asyncio.get_running_loop()
|
|
|
|
return (await loop.getaddrinfo(
|
2019-02-05 19:31:57 +01:00
|
|
|
url, port,
|
2019-02-05 19:36:25 +01:00
|
|
|
proto=socket.IPPROTO_TCP if proto == 'tcp' else socket.IPPROTO_UDP,
|
2019-10-08 19:31:06 +02:00
|
|
|
type=socket.SOCK_STREAM if proto == 'tcp' else socket.SOCK_DGRAM,
|
|
|
|
family=socket.AF_INET
|
2019-01-31 19:46:19 +01:00
|
|
|
))[0][4][0]
|
2019-02-28 18:40:11 +01:00
|
|
|
|
|
|
|
|
2019-05-18 00:57:07 +02:00
|
|
|
class LRUCache:
|
|
|
|
__slots__ = [
|
|
|
|
'capacity',
|
|
|
|
'cache'
|
|
|
|
]
|
|
|
|
|
|
|
|
def __init__(self, capacity):
|
|
|
|
self.capacity = capacity
|
|
|
|
self.cache = collections.OrderedDict()
|
|
|
|
|
|
|
|
def get(self, key):
|
|
|
|
value = self.cache.pop(key)
|
|
|
|
self.cache[key] = value
|
|
|
|
return value
|
|
|
|
|
|
|
|
def set(self, key, value):
|
|
|
|
try:
|
|
|
|
self.cache.pop(key)
|
|
|
|
except KeyError:
|
|
|
|
if len(self.cache) >= self.capacity:
|
|
|
|
self.cache.popitem(last=False)
|
|
|
|
self.cache[key] = value
|
|
|
|
|
|
|
|
def __contains__(self, item) -> bool:
|
|
|
|
return item in self.cache
|
|
|
|
|
|
|
|
|
2019-05-24 04:40:02 +02:00
|
|
|
def lru_cache_concurrent(cache_size: typing.Optional[int] = None,
|
|
|
|
override_lru_cache: typing.Optional[LRUCache] = None):
|
|
|
|
if not cache_size and override_lru_cache is None:
|
2019-05-18 00:57:07 +02:00
|
|
|
raise ValueError("invalid cache size")
|
|
|
|
concurrent_cache = {}
|
2019-05-24 04:40:02 +02:00
|
|
|
lru_cache = override_lru_cache or LRUCache(cache_size)
|
2019-05-18 00:57:07 +02:00
|
|
|
|
|
|
|
def wrapper(async_fn):
|
|
|
|
|
|
|
|
@functools.wraps(async_fn)
|
|
|
|
async def _inner(*args, **kwargs):
|
|
|
|
key = tuple([args, tuple([tuple([k, kwargs[k]]) for k in kwargs])])
|
|
|
|
if key in lru_cache:
|
|
|
|
return lru_cache.get(key)
|
|
|
|
|
|
|
|
concurrent_cache[key] = concurrent_cache.get(key) or asyncio.create_task(async_fn(*args, **kwargs))
|
|
|
|
|
|
|
|
try:
|
|
|
|
result = await concurrent_cache[key]
|
|
|
|
lru_cache.set(key, result)
|
|
|
|
return result
|
|
|
|
finally:
|
|
|
|
concurrent_cache.pop(key, None)
|
|
|
|
return _inner
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2019-02-28 18:42:23 +01:00
|
|
|
def get_ssl_context() -> ssl.SSLContext:
|
2019-02-28 18:40:11 +01:00
|
|
|
return ssl.create_default_context(
|
2019-02-28 18:45:56 +01:00
|
|
|
purpose=ssl.Purpose.CLIENT_AUTH, capath=certifi.where()
|
2019-02-28 18:40:11 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@contextlib.asynccontextmanager
|
2019-02-28 18:42:23 +01:00
|
|
|
async def aiohttp_request(method, url, **kwargs) -> typing.AsyncContextManager[aiohttp.ClientResponse]:
|
2019-02-28 18:40:11 +01:00
|
|
|
async with aiohttp.ClientSession() as session:
|
|
|
|
async with session.request(method, url, ssl=get_ssl_context(), **kwargs) as response:
|
|
|
|
yield response
|
2019-03-11 02:55:33 +01:00
|
|
|
|
|
|
|
|
|
|
|
async def get_external_ip() -> typing.Optional[str]: # used if upnp is disabled or non-functioning
|
|
|
|
try:
|
2019-06-04 16:23:04 +02:00
|
|
|
async with aiohttp_request("get", "https://api.lbry.com/ip") as resp:
|
2019-03-11 02:55:33 +01:00
|
|
|
response = await resp.json()
|
|
|
|
if response['success']:
|
|
|
|
return response['data']['ip']
|
|
|
|
except Exception as e:
|
|
|
|
return
|