2016-08-22 00:44:16 +02:00
|
|
|
import os
|
2017-07-03 22:01:19 +02:00
|
|
|
import re
|
2016-09-21 09:49:52 +02:00
|
|
|
import sys
|
2018-11-27 21:56:11 +01:00
|
|
|
import typing
|
|
|
|
import logging
|
2019-01-21 21:55:50 +01:00
|
|
|
from argparse import ArgumentParser
|
2019-01-20 10:06:55 +01:00
|
|
|
from contextlib import contextmanager
|
2020-01-03 06:32:51 +01:00
|
|
|
|
|
|
|
import yaml
|
2017-07-03 22:01:19 +02:00
|
|
|
from appdirs import user_data_dir, user_config_dir
|
2019-06-21 02:55:47 +02:00
|
|
|
from lbry.error import InvalidCurrencyError
|
|
|
|
from lbry.dht import constants
|
2020-01-03 04:18:49 +01:00
|
|
|
from lbry.wallet.coinselection import STRATEGIES
|
2017-07-03 22:01:19 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2016-12-21 20:55:43 +01:00
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
NOT_SET = type('NOT_SET', (object,), {}) # pylint: disable=invalid-name
|
2019-01-21 21:55:50 +01:00
|
|
|
T = typing.TypeVar('T')
|
2018-11-27 21:56:11 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
CURRENCIES = {
|
|
|
|
'BTC': {'type': 'crypto'},
|
|
|
|
'LBC': {'type': 'crypto'},
|
|
|
|
'USD': {'type': 'fiat'},
|
|
|
|
}
|
2016-10-26 09:16:33 +02:00
|
|
|
|
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
class Setting(typing.Generic[T]):
|
2018-01-09 22:55:39 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def __init__(self, doc: str, default: typing.Optional[T] = None,
|
2019-01-25 00:22:53 +01:00
|
|
|
previous_names: typing.Optional[typing.List[str]] = None,
|
|
|
|
metavar: typing.Optional[str] = None):
|
2019-01-21 21:55:50 +01:00
|
|
|
self.doc = doc
|
2019-01-20 10:06:55 +01:00
|
|
|
self.default = default
|
2019-01-21 21:55:50 +01:00
|
|
|
self.previous_names = previous_names or []
|
2019-01-25 00:22:53 +01:00
|
|
|
self.metavar = metavar
|
2018-07-24 18:42:12 +02:00
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
def __set_name__(self, owner, name):
|
2020-01-03 06:32:51 +01:00
|
|
|
self.name = name # pylint: disable=attribute-defined-outside-init
|
2018-01-09 22:55:39 +01:00
|
|
|
|
2019-01-26 04:13:43 +01:00
|
|
|
@property
|
|
|
|
def cli_name(self):
|
|
|
|
return f"--{self.name.replace('_', '-')}"
|
|
|
|
|
|
|
|
@property
|
|
|
|
def no_cli_name(self):
|
|
|
|
return f"--no-{self.name.replace('_', '-')}"
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def __get__(self, obj: typing.Optional['BaseConfig'], owner) -> T:
|
2019-01-20 10:06:55 +01:00
|
|
|
if obj is None:
|
|
|
|
return self
|
|
|
|
for location in obj.search_order:
|
|
|
|
if self.name in location:
|
|
|
|
return location[self.name]
|
|
|
|
return self.default
|
2018-07-24 18:42:12 +02:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def __set__(self, obj: 'BaseConfig', val: typing.Union[T, NOT_SET]):
|
2019-01-20 10:06:55 +01:00
|
|
|
if val == NOT_SET:
|
|
|
|
for location in obj.modify_order:
|
|
|
|
if self.name in location:
|
|
|
|
del location[self.name]
|
|
|
|
else:
|
|
|
|
self.validate(val)
|
|
|
|
for location in obj.modify_order:
|
|
|
|
location[self.name] = val
|
2017-01-17 04:23:20 +01:00
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
2019-01-20 10:06:55 +01:00
|
|
|
raise NotImplementedError()
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
def deserialize(self, value): # pylint: disable=no-self-use
|
2019-01-20 10:06:55 +01:00
|
|
|
return value
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
def serialize(self, value): # pylint: disable=no-self-use
|
2019-01-20 10:06:55 +01:00
|
|
|
return value
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2019-01-26 04:13:43 +01:00
|
|
|
def contribute_to_argparse(self, parser: ArgumentParser):
|
|
|
|
parser.add_argument(
|
|
|
|
self.cli_name,
|
|
|
|
help=self.doc,
|
|
|
|
metavar=self.metavar,
|
|
|
|
default=NOT_SET
|
|
|
|
)
|
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
class String(Setting[str]):
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
assert isinstance(value, str), \
|
2019-01-20 10:06:55 +01:00
|
|
|
f"Setting '{self.name}' must be a string."
|
|
|
|
|
2020-01-03 06:49:40 +01:00
|
|
|
# TODO: removes this after pylint starts to understand generics
|
|
|
|
def __get__(self, obj: typing.Optional['BaseConfig'], owner) -> str: # pylint: disable=useless-super-delegation
|
|
|
|
return super().__get__(obj, owner)
|
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
class Integer(Setting[int]):
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
assert isinstance(value, int), \
|
2019-01-20 10:06:55 +01:00
|
|
|
f"Setting '{self.name}' must be an integer."
|
|
|
|
|
2019-01-26 04:42:12 +01:00
|
|
|
def deserialize(self, value):
|
|
|
|
return int(value)
|
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
class Float(Setting[float]):
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
assert isinstance(value, float), \
|
2019-01-20 10:06:55 +01:00
|
|
|
f"Setting '{self.name}' must be a decimal."
|
2018-01-09 22:55:39 +01:00
|
|
|
|
2019-01-26 04:42:12 +01:00
|
|
|
def deserialize(self, value):
|
|
|
|
return float(value)
|
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
class Toggle(Setting[bool]):
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
assert isinstance(value, bool), \
|
2019-01-20 10:06:55 +01:00
|
|
|
f"Setting '{self.name}' must be a true/false value."
|
|
|
|
|
2019-01-26 04:13:43 +01:00
|
|
|
def contribute_to_argparse(self, parser: ArgumentParser):
|
|
|
|
parser.add_argument(
|
|
|
|
self.cli_name,
|
|
|
|
help=self.doc,
|
|
|
|
action="store_true",
|
|
|
|
default=NOT_SET
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
self.no_cli_name,
|
2019-06-04 15:39:33 +02:00
|
|
|
help=f"Opposite of {self.cli_name}",
|
2019-01-26 04:13:43 +01:00
|
|
|
dest=self.name,
|
|
|
|
action="store_false",
|
|
|
|
default=NOT_SET
|
|
|
|
)
|
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
class Path(String):
|
2020-01-03 06:32:51 +01:00
|
|
|
def __init__(self, doc: str, *args, default: str = '', **kwargs):
|
2019-01-21 21:55:50 +01:00
|
|
|
super().__init__(doc, default, *args, **kwargs)
|
2019-01-20 10:06:55 +01:00
|
|
|
|
2020-01-03 06:49:40 +01:00
|
|
|
def __get__(self, obj, owner) -> str:
|
2019-01-20 10:06:55 +01:00
|
|
|
value = super().__get__(obj, owner)
|
|
|
|
if isinstance(value, str):
|
|
|
|
return os.path.expanduser(os.path.expandvars(value))
|
2016-11-16 20:38:43 +01:00
|
|
|
return value
|
|
|
|
|
2017-04-26 20:15:38 +02:00
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
class MaxKeyFee(Setting[dict]):
|
|
|
|
|
|
|
|
def validate(self, value):
|
2019-01-26 04:27:10 +01:00
|
|
|
if value is not None:
|
|
|
|
assert isinstance(value, dict) and set(value) == {'currency', 'amount'}, \
|
|
|
|
f"Setting '{self.name}' must be a dict like \"{{'amount': 50.0, 'currency': 'USD'}}\"."
|
|
|
|
if value["currency"] not in CURRENCIES:
|
|
|
|
raise InvalidCurrencyError(value["currency"])
|
2019-01-26 04:13:43 +01:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _parse_list(l):
|
2019-09-19 17:36:01 +02:00
|
|
|
if l == ['null']:
|
2019-09-08 15:44:54 +02:00
|
|
|
return None
|
2019-09-19 12:25:05 +02:00
|
|
|
assert len(l) == 2, (
|
|
|
|
'Max key fee is made up of either two values: '
|
|
|
|
'"AMOUNT CURRENCY", or "null" (to set no limit)'
|
|
|
|
)
|
2019-01-26 04:13:43 +01:00
|
|
|
try:
|
2019-01-26 04:42:12 +01:00
|
|
|
amount = float(l[0])
|
|
|
|
except ValueError:
|
2019-01-26 04:13:43 +01:00
|
|
|
raise AssertionError('First value in max key fee is a decimal: "AMOUNT CURRENCY"')
|
|
|
|
currency = str(l[1]).upper()
|
2019-01-20 10:06:55 +01:00
|
|
|
if currency not in CURRENCIES:
|
|
|
|
raise InvalidCurrencyError(currency)
|
2019-01-26 04:13:43 +01:00
|
|
|
return {'amount': amount, 'currency': currency}
|
|
|
|
|
|
|
|
def deserialize(self, value):
|
2019-09-19 12:25:05 +02:00
|
|
|
if value is None:
|
|
|
|
return
|
2019-01-26 04:13:43 +01:00
|
|
|
if isinstance(value, dict):
|
|
|
|
return {
|
|
|
|
'currency': value['currency'],
|
2019-01-26 04:42:12 +01:00
|
|
|
'amount': float(value['amount']),
|
2019-01-26 04:13:43 +01:00
|
|
|
}
|
|
|
|
if isinstance(value, str):
|
|
|
|
value = value.split()
|
|
|
|
if isinstance(value, list):
|
|
|
|
return self._parse_list(value)
|
|
|
|
raise AssertionError('Invalid max key fee.')
|
|
|
|
|
|
|
|
def contribute_to_argparse(self, parser: ArgumentParser):
|
|
|
|
parser.add_argument(
|
|
|
|
self.cli_name,
|
|
|
|
help=self.doc,
|
2019-09-08 15:44:54 +02:00
|
|
|
nargs='+',
|
2019-01-26 04:13:43 +01:00
|
|
|
metavar=('AMOUNT', 'CURRENCY'),
|
|
|
|
default=NOT_SET
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
self.no_cli_name,
|
|
|
|
help=f"Disable maximum key fee check.",
|
|
|
|
dest=self.name,
|
|
|
|
const=None,
|
|
|
|
action="store_const",
|
|
|
|
default=NOT_SET
|
|
|
|
)
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
|
2019-06-21 08:15:59 +02:00
|
|
|
class StringChoice(String):
|
|
|
|
def __init__(self, doc: str, valid_values: typing.List[str], default: str, *args, **kwargs):
|
|
|
|
super().__init__(doc, default, *args, **kwargs)
|
|
|
|
if not valid_values:
|
|
|
|
raise ValueError("No valid values provided")
|
|
|
|
if default not in valid_values:
|
|
|
|
raise ValueError(f"Default value must be one of: {', '.join(valid_values)}")
|
2019-06-19 17:36:46 +02:00
|
|
|
self.valid_values = valid_values
|
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
super().validate(value)
|
|
|
|
if value not in self.valid_values:
|
2019-06-21 08:15:59 +02:00
|
|
|
raise ValueError(f"Setting '{self.name}' value must be one of: {', '.join(self.valid_values)}")
|
2019-06-19 17:36:46 +02:00
|
|
|
|
|
|
|
|
2019-06-04 22:48:12 +02:00
|
|
|
class ListSetting(Setting[list]):
|
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
assert isinstance(value, (tuple, list)), \
|
2019-06-04 22:52:54 +02:00
|
|
|
f"Setting '{self.name}' must be a tuple or list."
|
|
|
|
|
2019-06-04 22:48:12 +02:00
|
|
|
def contribute_to_argparse(self, parser: ArgumentParser):
|
|
|
|
parser.add_argument(
|
|
|
|
self.cli_name,
|
|
|
|
help=self.doc,
|
|
|
|
action='append'
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class Servers(ListSetting):
|
2019-01-20 10:06:55 +01:00
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
assert isinstance(value, (tuple, list)), \
|
2019-01-20 10:06:55 +01:00
|
|
|
f"Setting '{self.name}' must be a tuple or list of servers."
|
2020-01-03 06:32:51 +01:00
|
|
|
for idx, server in enumerate(value):
|
2019-01-20 10:06:55 +01:00
|
|
|
assert isinstance(server, (tuple, list)) and len(server) == 2, \
|
|
|
|
f"Server defined '{server}' at index {idx} in setting " \
|
|
|
|
f"'{self.name}' must be a tuple or list of two items."
|
|
|
|
assert isinstance(server[0], str), \
|
|
|
|
f"Server defined '{server}' at index {idx} in setting " \
|
|
|
|
f"'{self.name}' must be have hostname as string in first position."
|
|
|
|
assert isinstance(server[1], int), \
|
|
|
|
f"Server defined '{server}' at index {idx} in setting " \
|
|
|
|
f"'{self.name}' must be have port as int in second position."
|
|
|
|
|
|
|
|
def deserialize(self, value):
|
|
|
|
servers = []
|
|
|
|
if isinstance(value, list):
|
|
|
|
for server in value:
|
|
|
|
if isinstance(server, str) and server.count(':') == 1:
|
|
|
|
host, port = server.split(':')
|
|
|
|
try:
|
|
|
|
servers.append((host, int(port)))
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
return servers
|
|
|
|
|
|
|
|
def serialize(self, value):
|
|
|
|
if value:
|
|
|
|
return [f"{host}:{port}" for host, port in value]
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
2019-06-04 22:48:12 +02:00
|
|
|
class Strings(ListSetting):
|
2019-01-20 10:06:55 +01:00
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
assert isinstance(value, (tuple, list)), \
|
2019-01-20 10:06:55 +01:00
|
|
|
f"Setting '{self.name}' must be a tuple or list of strings."
|
2020-01-03 06:32:51 +01:00
|
|
|
for idx, string in enumerate(value):
|
2019-01-20 10:06:55 +01:00
|
|
|
assert isinstance(string, str), \
|
|
|
|
f"Value of '{string}' at index {idx} in setting " \
|
|
|
|
f"'{self.name}' must be a string."
|
|
|
|
|
|
|
|
|
|
|
|
class EnvironmentAccess:
|
|
|
|
PREFIX = 'LBRY_'
|
|
|
|
|
|
|
|
def __init__(self, environ: dict):
|
|
|
|
self.environ = environ
|
|
|
|
|
|
|
|
def __contains__(self, item: str):
|
|
|
|
return f'{self.PREFIX}{item.upper()}' in self.environ
|
|
|
|
|
|
|
|
def __getitem__(self, item: str):
|
|
|
|
return self.environ[f'{self.PREFIX}{item.upper()}']
|
|
|
|
|
|
|
|
|
|
|
|
class ArgumentAccess:
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2019-01-25 15:42:11 +01:00
|
|
|
def __init__(self, config: 'BaseConfig', args: dict):
|
|
|
|
self.configuration = config
|
|
|
|
self.args = {}
|
|
|
|
if args:
|
|
|
|
self.load(args)
|
|
|
|
|
|
|
|
def load(self, args):
|
|
|
|
for setting in self.configuration.get_settings():
|
|
|
|
value = getattr(args, setting.name, NOT_SET)
|
2019-06-04 22:48:12 +02:00
|
|
|
if value != NOT_SET and not (isinstance(setting, ListSetting) and value is None):
|
2019-01-25 15:42:11 +01:00
|
|
|
self.args[setting.name] = setting.deserialize(value)
|
2018-12-14 17:19:00 +01:00
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
def __contains__(self, item: str):
|
2019-01-25 15:42:11 +01:00
|
|
|
return item in self.args
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
def __getitem__(self, item: str):
|
2019-01-25 15:42:11 +01:00
|
|
|
return self.args[item]
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
|
|
|
|
class ConfigFileAccess:
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def __init__(self, config: 'BaseConfig', path: str):
|
2019-01-20 10:06:55 +01:00
|
|
|
self.configuration = config
|
|
|
|
self.path = path
|
|
|
|
self.data = {}
|
|
|
|
if self.exists:
|
|
|
|
self.load()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def exists(self):
|
|
|
|
return self.path and os.path.exists(self.path)
|
|
|
|
|
|
|
|
def load(self):
|
|
|
|
cls = type(self.configuration)
|
|
|
|
with open(self.path, 'r') as config_file:
|
|
|
|
raw = config_file.read()
|
|
|
|
serialized = yaml.load(raw) or {}
|
|
|
|
for key, value in serialized.items():
|
|
|
|
attr = getattr(cls, key, None)
|
2019-01-21 21:55:50 +01:00
|
|
|
if attr is None:
|
|
|
|
for setting in self.configuration.settings:
|
|
|
|
if key in setting.previous_names:
|
|
|
|
attr = setting
|
|
|
|
break
|
2019-01-20 10:06:55 +01:00
|
|
|
if attr is not None:
|
|
|
|
self.data[key] = attr.deserialize(value)
|
|
|
|
|
|
|
|
def save(self):
|
|
|
|
cls = type(self.configuration)
|
|
|
|
serialized = {}
|
|
|
|
for key, value in self.data.items():
|
|
|
|
attr = getattr(cls, key)
|
|
|
|
serialized[key] = attr.serialize(value)
|
|
|
|
with open(self.path, 'w') as config_file:
|
|
|
|
config_file.write(yaml.safe_dump(serialized, default_flow_style=False))
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def upgrade(self) -> bool:
|
|
|
|
upgraded = False
|
|
|
|
for key in list(self.data):
|
|
|
|
for setting in self.configuration.settings:
|
|
|
|
if key in setting.previous_names:
|
|
|
|
self.data[setting.name] = self.data[key]
|
|
|
|
del self.data[key]
|
|
|
|
upgraded = True
|
|
|
|
break
|
|
|
|
return upgraded
|
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
def __contains__(self, item: str):
|
|
|
|
return item in self.data
|
|
|
|
|
|
|
|
def __getitem__(self, item: str):
|
|
|
|
return self.data[item]
|
|
|
|
|
|
|
|
def __setitem__(self, key, value):
|
|
|
|
self.data[key] = value
|
|
|
|
|
|
|
|
def __delitem__(self, key):
|
|
|
|
del self.data[key]
|
|
|
|
|
|
|
|
|
2019-07-08 03:03:05 +02:00
|
|
|
TBC = typing.TypeVar('TBC', bound='BaseConfig')
|
|
|
|
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
class BaseConfig:
|
2019-01-20 10:06:55 +01:00
|
|
|
|
2019-01-25 01:16:39 +01:00
|
|
|
config = Path("Path to configuration file.", metavar='FILE')
|
2019-01-20 10:06:55 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def __init__(self, **kwargs):
|
2019-01-20 10:06:55 +01:00
|
|
|
self.runtime = {} # set internally or by various API calls
|
|
|
|
self.arguments = {} # from command line arguments
|
|
|
|
self.environment = {} # from environment variables
|
|
|
|
self.persisted = {} # from config file
|
|
|
|
self._updating_config = False
|
2019-01-21 21:55:50 +01:00
|
|
|
for key, value in kwargs.items():
|
|
|
|
setattr(self, key, value)
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def update_config(self):
|
|
|
|
self._updating_config = True
|
|
|
|
yield self
|
|
|
|
self._updating_config = False
|
2019-09-03 17:30:10 +02:00
|
|
|
if isinstance(self.persisted, ConfigFileAccess):
|
|
|
|
self.persisted.save()
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def modify_order(self):
|
|
|
|
locations = [self.runtime]
|
|
|
|
if self._updating_config:
|
|
|
|
locations.append(self.persisted)
|
|
|
|
return locations
|
|
|
|
|
|
|
|
@property
|
|
|
|
def search_order(self):
|
|
|
|
return [
|
|
|
|
self.runtime,
|
|
|
|
self.arguments,
|
|
|
|
self.environment,
|
|
|
|
self.persisted
|
|
|
|
]
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
@classmethod
|
|
|
|
def get_settings(cls):
|
2019-01-25 01:16:39 +01:00
|
|
|
for attr in dir(cls):
|
|
|
|
setting = getattr(cls, attr)
|
2019-01-21 21:55:50 +01:00
|
|
|
if isinstance(setting, Setting):
|
|
|
|
yield setting
|
|
|
|
|
|
|
|
@property
|
|
|
|
def settings(self):
|
|
|
|
return self.get_settings()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def settings_dict(self):
|
|
|
|
return {
|
|
|
|
setting.name: getattr(self, setting.name) for setting in self.settings
|
|
|
|
}
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
@classmethod
|
2019-07-08 03:03:05 +02:00
|
|
|
def create_from_arguments(cls, args) -> TBC:
|
2019-01-20 10:06:55 +01:00
|
|
|
conf = cls()
|
|
|
|
conf.set_arguments(args)
|
|
|
|
conf.set_environment()
|
|
|
|
conf.set_persisted()
|
|
|
|
return conf
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
@classmethod
|
2019-01-26 04:13:43 +01:00
|
|
|
def contribute_to_argparse(cls, parser: ArgumentParser):
|
2019-01-21 21:55:50 +01:00
|
|
|
for setting in cls.get_settings():
|
2019-01-26 04:13:43 +01:00
|
|
|
setting.contribute_to_argparse(parser)
|
2019-01-21 21:55:50 +01:00
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
def set_arguments(self, args):
|
2019-01-25 15:42:11 +01:00
|
|
|
self.arguments = ArgumentAccess(self, args)
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
def set_environment(self, environ=None):
|
|
|
|
self.environment = EnvironmentAccess(environ or os.environ)
|
|
|
|
|
|
|
|
def set_persisted(self, config_file_path=None):
|
|
|
|
if config_file_path is None:
|
|
|
|
config_file_path = self.config
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
if not config_file_path:
|
|
|
|
return
|
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
ext = os.path.splitext(config_file_path)[1]
|
|
|
|
assert ext in ('.yml', '.yaml'),\
|
|
|
|
f"File extension '{ext}' is not supported, " \
|
|
|
|
f"configuration file must be in YAML (.yaml)."
|
|
|
|
|
|
|
|
self.persisted = ConfigFileAccess(self, config_file_path)
|
2019-01-21 21:55:50 +01:00
|
|
|
if self.persisted.upgrade():
|
|
|
|
self.persisted.save()
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
|
2020-01-14 18:43:28 +01:00
|
|
|
class TranscodeConfig(BaseConfig):
|
|
|
|
|
|
|
|
ffmpeg_folder = String('The path to ffmpeg and ffprobe', '')
|
|
|
|
video_encoder = String('FFmpeg codec and parameters for the video encoding. '
|
|
|
|
'Example: libaom-av1 -crf 25 -b:v 0 -strict experimental',
|
|
|
|
'libx264 -crf 18 -vf "format=yuv420p"')
|
|
|
|
audio_encoder = String('FFmpeg codec and parameters for the audio encoding. '
|
|
|
|
'Example: libopus -b:a 128k',
|
|
|
|
'aac -b:a 192k')
|
|
|
|
volume_filter = String('FFmpeg filter for audio normalization.', '-af loudnorm')
|
|
|
|
volume_analysis_time = Integer('Maximum seconds into the file that we examine audio volume (0 to disable).', '240')
|
|
|
|
|
|
|
|
|
|
|
|
class CLIConfig(TranscodeConfig):
|
2019-01-21 21:55:50 +01:00
|
|
|
|
2019-01-25 00:22:53 +01:00
|
|
|
api = String('Host name and port for lbrynet daemon API.', 'localhost:5279', metavar='HOST:PORT')
|
2019-01-21 21:55:50 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def api_connection_url(self) -> str:
|
2019-01-22 21:40:45 +01:00
|
|
|
return f"http://{self.api}/lbryapi"
|
|
|
|
|
|
|
|
@property
|
|
|
|
def api_host(self):
|
|
|
|
return self.api.split(':')[0]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def api_port(self):
|
|
|
|
return int(self.api.split(':')[1])
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
class Config(CLIConfig):
|
2019-01-28 15:51:02 +01:00
|
|
|
# directories
|
2019-01-25 01:16:39 +01:00
|
|
|
data_dir = Path("Directory path to store blobs.", metavar='DIR')
|
|
|
|
download_dir = Path(
|
|
|
|
"Directory path to place assembled files downloaded from LBRY.",
|
|
|
|
previous_names=['download_directory'], metavar='DIR'
|
|
|
|
)
|
2019-01-21 21:55:50 +01:00
|
|
|
wallet_dir = Path(
|
|
|
|
"Directory containing a 'wallets' subdirectory with 'default_wallet' file.",
|
2019-01-25 01:16:39 +01:00
|
|
|
previous_names=['lbryum_wallet_dir'], metavar='DIR'
|
2019-01-21 21:55:50 +01:00
|
|
|
)
|
2019-09-23 18:47:50 +02:00
|
|
|
wallets = Strings(
|
|
|
|
"Wallet files in 'wallet_dir' to load at startup.",
|
|
|
|
['default_wallet']
|
|
|
|
)
|
2019-01-21 21:55:50 +01:00
|
|
|
|
2019-01-28 15:51:02 +01:00
|
|
|
# network
|
|
|
|
use_upnp = Toggle(
|
|
|
|
"Use UPnP to setup temporary port redirects for the DHT and the hosting of blobs. If you manually forward"
|
|
|
|
"ports or have firewall rules you likely want to disable this.", True
|
2019-01-21 21:55:50 +01:00
|
|
|
)
|
2019-01-28 15:51:02 +01:00
|
|
|
udp_port = Integer("UDP port for communicating on the LBRY DHT", 4444, previous_names=['dht_node_port'])
|
|
|
|
tcp_port = Integer("TCP port to listen for incoming blob requests", 3333, previous_names=['peer_port'])
|
2020-01-13 21:52:31 +01:00
|
|
|
prometheus_port = Integer("Port to expose prometheus metrics (off by default)", 0)
|
2019-01-28 15:51:02 +01:00
|
|
|
network_interface = String("Interface to use for the DHT and blob exchange", '0.0.0.0')
|
|
|
|
|
2019-02-20 17:22:55 +01:00
|
|
|
# routing table
|
|
|
|
split_buckets_under_index = Integer(
|
|
|
|
"Routing table bucket index below which we always split the bucket if given a new key to add to it and "
|
|
|
|
"the bucket is full. As this value is raised the depth of the routing table (and number of peers in it) "
|
|
|
|
"will increase. This setting is used by seed nodes, you probably don't want to change it during normal "
|
|
|
|
"use.", 1
|
|
|
|
)
|
|
|
|
|
2019-01-28 15:51:02 +01:00
|
|
|
# protocol timeouts
|
|
|
|
download_timeout = Float("Cumulative timeout for a stream to begin downloading before giving up", 30.0)
|
2019-02-08 05:05:58 +01:00
|
|
|
blob_download_timeout = Float("Timeout to download a blob from a peer", 30.0)
|
2019-04-24 14:44:33 +02:00
|
|
|
peer_connect_timeout = Float("Timeout to establish a TCP connection to a peer", 3.0)
|
2020-01-03 04:57:28 +01:00
|
|
|
node_rpc_timeout = Float("Timeout when making a DHT request", constants.RPC_TIMEOUT)
|
2019-01-28 15:51:02 +01:00
|
|
|
|
|
|
|
# blob announcement and download
|
2019-03-31 01:13:57 +01:00
|
|
|
save_blobs = Toggle("Save encrypted blob files for hosting, otherwise download blobs to memory only.", True)
|
2019-05-24 04:40:02 +02:00
|
|
|
blob_lru_cache_size = Integer(
|
|
|
|
"LRU cache size for decrypted downloaded blobs used to minimize re-downloading the same blobs when "
|
|
|
|
"replying to a range request. Set to 0 to disable.", 32
|
|
|
|
)
|
2019-01-28 15:51:02 +01:00
|
|
|
announce_head_and_sd_only = Toggle(
|
|
|
|
"Announce only the descriptor and first (rather than all) data blob for a stream to the DHT", True,
|
|
|
|
previous_names=['announce_head_blobs_only']
|
|
|
|
)
|
|
|
|
concurrent_blob_announcers = Integer(
|
2019-01-28 22:00:37 +01:00
|
|
|
"Number of blobs to iteratively announce at once, set to 0 to disable", 10,
|
|
|
|
previous_names=['concurrent_announcers']
|
2019-01-28 15:51:02 +01:00
|
|
|
)
|
|
|
|
max_connections_per_download = Integer(
|
2019-04-24 14:44:33 +02:00
|
|
|
"Maximum number of peers to connect to while downloading a blob", 4,
|
2019-01-28 15:51:02 +01:00
|
|
|
previous_names=['max_connections_per_stream']
|
|
|
|
)
|
2019-01-30 20:54:01 +01:00
|
|
|
fixed_peer_delay = Float(
|
|
|
|
"Amount of seconds before adding the reflector servers as potential peers to download from in case dht"
|
|
|
|
"peers are not found or are slow", 2.0
|
|
|
|
)
|
2019-01-28 15:51:02 +01:00
|
|
|
max_key_fee = MaxKeyFee(
|
2019-09-08 15:44:54 +02:00
|
|
|
"Don't download streams with fees exceeding this amount. When set to "
|
|
|
|
"null, the amount is unbounded.", {'currency': 'USD', 'amount': 50.0}
|
2019-02-09 03:08:41 +01:00
|
|
|
)
|
2019-01-28 15:51:02 +01:00
|
|
|
|
|
|
|
# reflector settings
|
|
|
|
reflect_streams = Toggle(
|
|
|
|
"Upload completed streams (published and downloaded) reflector in order to re-host them", True,
|
|
|
|
previous_names=['reflect_uploads']
|
|
|
|
)
|
2019-02-02 04:59:41 +01:00
|
|
|
concurrent_reflector_uploads = Integer(
|
|
|
|
"Maximum number of streams to upload to a reflector server at a time", 10
|
|
|
|
)
|
2019-01-28 15:51:02 +01:00
|
|
|
|
|
|
|
# servers
|
|
|
|
reflector_servers = Servers("Reflector re-hosting servers", [
|
2019-06-04 16:23:04 +02:00
|
|
|
('reflector.lbry.com', 5566)
|
2019-01-20 10:06:55 +01:00
|
|
|
])
|
2019-01-28 15:51:02 +01:00
|
|
|
lbryum_servers = Servers("SPV wallet servers", [
|
2020-02-11 19:51:15 +01:00
|
|
|
('spv11.lbry.com', 50001),
|
|
|
|
('spv12.lbry.com', 50001),
|
|
|
|
('spv13.lbry.com', 50001),
|
|
|
|
('spv14.lbry.com', 50001),
|
|
|
|
('spv15.lbry.com', 50001),
|
|
|
|
('spv16.lbry.com', 50001),
|
|
|
|
('spv17.lbry.com', 50001),
|
|
|
|
('spv18.lbry.com', 50001),
|
|
|
|
('spv19.lbry.com', 50001),
|
2019-01-20 10:06:55 +01:00
|
|
|
])
|
2019-01-28 15:51:02 +01:00
|
|
|
known_dht_nodes = Servers("Known nodes for bootstrapping connection to the DHT", [
|
2019-05-16 14:46:50 +02:00
|
|
|
('lbrynet1.lbry.com', 4444), # US EAST
|
|
|
|
('lbrynet2.lbry.com', 4444), # US WEST
|
|
|
|
('lbrynet3.lbry.com', 4444), # EU
|
|
|
|
('lbrynet4.lbry.com', 4444) # ASIA
|
2019-01-28 15:51:02 +01:00
|
|
|
])
|
|
|
|
|
2019-06-03 05:09:56 +02:00
|
|
|
comment_server = String("Comment server API URL", "https://comments.lbry.com/api")
|
2019-04-07 20:37:32 +02:00
|
|
|
|
2019-01-28 15:51:02 +01:00
|
|
|
# blockchain
|
|
|
|
blockchain_name = String("Blockchain name - lbrycrd_main, lbrycrd_regtest, or lbrycrd_testnet", 'lbrycrd_main')
|
2019-01-21 21:55:50 +01:00
|
|
|
s3_headers_depth = Integer("download headers from s3 when the local height is more than 10 chunks behind", 96 * 10)
|
2019-01-28 15:51:02 +01:00
|
|
|
cache_time = Integer("Time to cache resolved claims", 150) # TODO: use this
|
|
|
|
|
|
|
|
# daemon
|
2019-04-24 18:44:12 +02:00
|
|
|
save_files = Toggle("Save downloaded files when calling `get` by default", True)
|
2019-01-21 21:55:50 +01:00
|
|
|
components_to_skip = Strings("components which will be skipped during start-up of daemon", [])
|
2019-01-28 15:51:02 +01:00
|
|
|
share_usage_data = Toggle(
|
|
|
|
"Whether to share usage stats and diagnostic info with LBRY.", True,
|
|
|
|
previous_names=['upload_log', 'upload_log', 'share_debug_info']
|
|
|
|
)
|
2019-06-28 15:59:31 +02:00
|
|
|
track_bandwidth = Toggle("Track bandwidth usage", True)
|
2018-12-14 17:19:00 +01:00
|
|
|
|
2019-05-14 01:48:19 +02:00
|
|
|
# media server
|
|
|
|
streaming_server = String('Host name and port to serve streaming media over range requests',
|
|
|
|
'localhost:5280', metavar='HOST:PORT')
|
|
|
|
streaming_get = Toggle("Enable the /get endpoint for the streaming media server. "
|
|
|
|
"Disable to prevent new streams from being added.", True)
|
|
|
|
|
2019-06-21 08:15:59 +02:00
|
|
|
coin_selection_strategy = StringChoice(
|
|
|
|
"Strategy to use when selecting UTXOs for a transaction",
|
|
|
|
STRATEGIES, "standard")
|
2019-06-19 17:36:46 +02:00
|
|
|
|
2020-02-06 17:27:00 +01:00
|
|
|
save_resolved_claims = Toggle(
|
|
|
|
"Save content claims to the database when they are resolved to keep file_list up to date, "
|
|
|
|
"only disable this if file_x commands are not needed", True
|
|
|
|
)
|
|
|
|
|
2019-05-14 01:48:19 +02:00
|
|
|
@property
|
|
|
|
def streaming_host(self):
|
|
|
|
return self.streaming_server.split(':')[0]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def streaming_port(self):
|
|
|
|
return int(self.streaming_server.split(':')[1])
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def __init__(self, **kwargs):
|
|
|
|
super().__init__(**kwargs)
|
|
|
|
self.set_default_paths()
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def set_default_paths(self):
|
2019-01-22 23:37:52 +01:00
|
|
|
if 'darwin' in sys.platform.lower():
|
2019-01-21 21:55:50 +01:00
|
|
|
get_directories = get_darwin_directories
|
2019-01-22 23:37:52 +01:00
|
|
|
elif 'win' in sys.platform.lower():
|
|
|
|
get_directories = get_windows_directories
|
|
|
|
elif 'linux' in sys.platform.lower():
|
2019-01-21 21:55:50 +01:00
|
|
|
get_directories = get_linux_directories
|
|
|
|
else:
|
|
|
|
return
|
|
|
|
cls = type(self)
|
|
|
|
cls.data_dir.default, cls.wallet_dir.default, cls.download_dir.default = get_directories()
|
|
|
|
cls.config.default = os.path.join(
|
|
|
|
self.data_dir, 'daemon_settings.yml'
|
|
|
|
)
|
2018-11-27 21:56:11 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
@property
|
|
|
|
def log_file_path(self):
|
|
|
|
return os.path.join(self.data_dir, 'lbrynet.log')
|
2018-11-27 21:56:11 +01:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def get_windows_directories() -> typing.Tuple[str, str, str]:
|
2019-09-09 21:29:54 +02:00
|
|
|
from lbry.winpaths import get_path, FOLDERID, UserHandle, \
|
|
|
|
PathNotFoundException # pylint: disable=import-outside-toplevel
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2019-09-09 21:29:54 +02:00
|
|
|
try:
|
|
|
|
download_dir = get_path(FOLDERID.Downloads, UserHandle.current)
|
|
|
|
except PathNotFoundException:
|
|
|
|
download_dir = os.getcwd()
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
# old
|
|
|
|
appdata = get_path(FOLDERID.RoamingAppData, UserHandle.current)
|
|
|
|
data_dir = os.path.join(appdata, 'lbrynet')
|
|
|
|
lbryum_dir = os.path.join(appdata, 'lbryum')
|
|
|
|
if os.path.isdir(data_dir) or os.path.isdir(lbryum_dir):
|
|
|
|
return data_dir, lbryum_dir, download_dir
|
2017-11-29 09:24:56 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
# new
|
|
|
|
data_dir = user_data_dir('lbrynet', 'lbry')
|
|
|
|
lbryum_dir = user_data_dir('lbryum', 'lbry')
|
|
|
|
return data_dir, lbryum_dir, download_dir
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def get_darwin_directories() -> typing.Tuple[str, str, str]:
|
|
|
|
data_dir = user_data_dir('LBRY')
|
|
|
|
lbryum_dir = os.path.expanduser('~/.lbryum')
|
|
|
|
download_dir = os.path.expanduser('~/Downloads')
|
|
|
|
return data_dir, lbryum_dir, download_dir
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2018-12-14 17:19:00 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def get_linux_directories() -> typing.Tuple[str, str, str]:
|
|
|
|
try:
|
|
|
|
with open(os.path.join(user_config_dir(), 'user-dirs.dirs'), 'r') as xdg:
|
|
|
|
down_dir = re.search(r'XDG_DOWNLOAD_DIR=(.+)', xdg.read()).group(1)
|
2019-10-02 18:38:56 +02:00
|
|
|
down_dir = re.sub(r'\$HOME', os.getenv('HOME') or os.path.expanduser("~/"), down_dir)
|
2019-01-24 18:42:55 +01:00
|
|
|
download_dir = re.sub('\"', '', down_dir)
|
2019-10-02 18:38:56 +02:00
|
|
|
except OSError:
|
2019-01-21 21:55:50 +01:00
|
|
|
download_dir = os.getenv('XDG_DOWNLOAD_DIR')
|
|
|
|
if not download_dir:
|
|
|
|
download_dir = os.path.expanduser('~/Downloads')
|
2018-12-14 17:19:00 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
# old
|
|
|
|
data_dir = os.path.expanduser('~/.lbrynet')
|
|
|
|
lbryum_dir = os.path.expanduser('~/.lbryum')
|
|
|
|
if os.path.isdir(data_dir) or os.path.isdir(lbryum_dir):
|
|
|
|
return data_dir, lbryum_dir, download_dir
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
# new
|
|
|
|
return user_data_dir('lbry/lbrynet'), user_data_dir('lbry/lbryum'), download_dir
|