2016-08-22 00:44:16 +02:00
|
|
|
import os
|
2017-07-03 22:01:19 +02:00
|
|
|
import re
|
2016-09-21 09:49:52 +02:00
|
|
|
import sys
|
2018-11-27 21:56:11 +01:00
|
|
|
import logging
|
2021-05-21 15:50:47 +02:00
|
|
|
from typing import List, Dict, Tuple, Union, TypeVar, Generic, Optional
|
2019-01-21 21:55:50 +01:00
|
|
|
from argparse import ArgumentParser
|
2019-01-20 10:06:55 +01:00
|
|
|
from contextlib import contextmanager
|
2017-07-03 22:01:19 +02:00
|
|
|
from appdirs import user_data_dir, user_config_dir
|
2020-07-16 17:44:26 +02:00
|
|
|
import yaml
|
2019-06-21 02:55:47 +02:00
|
|
|
from lbry.error import InvalidCurrencyError
|
|
|
|
from lbry.dht import constants
|
2020-01-03 04:18:49 +01:00
|
|
|
from lbry.wallet.coinselection import STRATEGIES
|
2017-07-03 22:01:19 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2016-12-21 20:55:43 +01:00
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
NOT_SET = type('NOT_SET', (object,), {}) # pylint: disable=invalid-name
|
2021-05-19 16:56:22 +02:00
|
|
|
T = TypeVar('T')
|
2018-11-27 21:56:11 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
CURRENCIES = {
|
|
|
|
'BTC': {'type': 'crypto'},
|
|
|
|
'LBC': {'type': 'crypto'},
|
|
|
|
'USD': {'type': 'fiat'},
|
|
|
|
}
|
2016-10-26 09:16:33 +02:00
|
|
|
|
|
|
|
|
2021-05-19 16:56:22 +02:00
|
|
|
class Setting(Generic[T]):
|
2018-01-09 22:55:39 +01:00
|
|
|
|
2021-05-19 16:56:22 +02:00
|
|
|
def __init__(self, doc: str, default: Optional[T] = None,
|
|
|
|
previous_names: Optional[List[str]] = None,
|
|
|
|
metavar: Optional[str] = None):
|
2019-01-21 21:55:50 +01:00
|
|
|
self.doc = doc
|
2019-01-20 10:06:55 +01:00
|
|
|
self.default = default
|
2019-01-21 21:55:50 +01:00
|
|
|
self.previous_names = previous_names or []
|
2019-01-25 00:22:53 +01:00
|
|
|
self.metavar = metavar
|
2018-07-24 18:42:12 +02:00
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
def __set_name__(self, owner, name):
|
2020-01-03 06:32:51 +01:00
|
|
|
self.name = name # pylint: disable=attribute-defined-outside-init
|
2018-01-09 22:55:39 +01:00
|
|
|
|
2019-01-26 04:13:43 +01:00
|
|
|
@property
|
|
|
|
def cli_name(self):
|
|
|
|
return f"--{self.name.replace('_', '-')}"
|
|
|
|
|
|
|
|
@property
|
|
|
|
def no_cli_name(self):
|
|
|
|
return f"--no-{self.name.replace('_', '-')}"
|
|
|
|
|
2021-05-19 16:56:22 +02:00
|
|
|
def __get__(self, obj: Optional['BaseConfig'], owner) -> T:
|
2019-01-20 10:06:55 +01:00
|
|
|
if obj is None:
|
|
|
|
return self
|
|
|
|
for location in obj.search_order:
|
|
|
|
if self.name in location:
|
|
|
|
return location[self.name]
|
|
|
|
return self.default
|
2018-07-24 18:42:12 +02:00
|
|
|
|
2021-05-19 16:56:22 +02:00
|
|
|
def __set__(self, obj: 'BaseConfig', val: Union[T, NOT_SET]):
|
2019-01-20 10:06:55 +01:00
|
|
|
if val == NOT_SET:
|
|
|
|
for location in obj.modify_order:
|
|
|
|
if self.name in location:
|
|
|
|
del location[self.name]
|
|
|
|
else:
|
|
|
|
self.validate(val)
|
|
|
|
for location in obj.modify_order:
|
|
|
|
location[self.name] = val
|
2017-01-17 04:23:20 +01:00
|
|
|
|
2021-06-14 19:06:31 +02:00
|
|
|
def is_set(self, obj: 'BaseConfig') -> bool:
|
|
|
|
for location in obj.search_order:
|
|
|
|
if self.name in location:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def is_set_to_default(self, obj: 'BaseConfig') -> bool:
|
|
|
|
for location in obj.search_order:
|
|
|
|
if self.name in location:
|
|
|
|
return location[self.name] == self.default
|
|
|
|
return False
|
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
2019-01-20 10:06:55 +01:00
|
|
|
raise NotImplementedError()
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
def deserialize(self, value): # pylint: disable=no-self-use
|
2019-01-20 10:06:55 +01:00
|
|
|
return value
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
def serialize(self, value): # pylint: disable=no-self-use
|
2019-01-20 10:06:55 +01:00
|
|
|
return value
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2019-01-26 04:13:43 +01:00
|
|
|
def contribute_to_argparse(self, parser: ArgumentParser):
|
|
|
|
parser.add_argument(
|
|
|
|
self.cli_name,
|
|
|
|
help=self.doc,
|
|
|
|
metavar=self.metavar,
|
|
|
|
default=NOT_SET
|
|
|
|
)
|
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
class String(Setting[str]):
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
assert isinstance(value, str), \
|
2019-01-20 10:06:55 +01:00
|
|
|
f"Setting '{self.name}' must be a string."
|
|
|
|
|
2020-01-03 06:49:40 +01:00
|
|
|
# TODO: removes this after pylint starts to understand generics
|
2021-05-19 16:56:22 +02:00
|
|
|
def __get__(self, obj: Optional['BaseConfig'], owner) -> str: # pylint: disable=useless-super-delegation
|
2020-01-03 06:49:40 +01:00
|
|
|
return super().__get__(obj, owner)
|
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
class Integer(Setting[int]):
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
assert isinstance(value, int), \
|
2019-01-20 10:06:55 +01:00
|
|
|
f"Setting '{self.name}' must be an integer."
|
|
|
|
|
2019-01-26 04:42:12 +01:00
|
|
|
def deserialize(self, value):
|
|
|
|
return int(value)
|
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
class Float(Setting[float]):
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
assert isinstance(value, float), \
|
2019-01-20 10:06:55 +01:00
|
|
|
f"Setting '{self.name}' must be a decimal."
|
2018-01-09 22:55:39 +01:00
|
|
|
|
2019-01-26 04:42:12 +01:00
|
|
|
def deserialize(self, value):
|
|
|
|
return float(value)
|
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
class Toggle(Setting[bool]):
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
assert isinstance(value, bool), \
|
2019-01-20 10:06:55 +01:00
|
|
|
f"Setting '{self.name}' must be a true/false value."
|
|
|
|
|
2019-01-26 04:13:43 +01:00
|
|
|
def contribute_to_argparse(self, parser: ArgumentParser):
|
|
|
|
parser.add_argument(
|
|
|
|
self.cli_name,
|
|
|
|
help=self.doc,
|
|
|
|
action="store_true",
|
|
|
|
default=NOT_SET
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
self.no_cli_name,
|
2019-06-04 15:39:33 +02:00
|
|
|
help=f"Opposite of {self.cli_name}",
|
2019-01-26 04:13:43 +01:00
|
|
|
dest=self.name,
|
|
|
|
action="store_false",
|
|
|
|
default=NOT_SET
|
|
|
|
)
|
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
class Path(String):
|
2020-01-03 06:32:51 +01:00
|
|
|
def __init__(self, doc: str, *args, default: str = '', **kwargs):
|
2019-01-21 21:55:50 +01:00
|
|
|
super().__init__(doc, default, *args, **kwargs)
|
2019-01-20 10:06:55 +01:00
|
|
|
|
2020-01-03 06:49:40 +01:00
|
|
|
def __get__(self, obj, owner) -> str:
|
2019-01-20 10:06:55 +01:00
|
|
|
value = super().__get__(obj, owner)
|
|
|
|
if isinstance(value, str):
|
|
|
|
return os.path.expanduser(os.path.expandvars(value))
|
2016-11-16 20:38:43 +01:00
|
|
|
return value
|
|
|
|
|
2017-04-26 20:15:38 +02:00
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
class MaxKeyFee(Setting[dict]):
|
|
|
|
|
|
|
|
def validate(self, value):
|
2019-01-26 04:27:10 +01:00
|
|
|
if value is not None:
|
|
|
|
assert isinstance(value, dict) and set(value) == {'currency', 'amount'}, \
|
|
|
|
f"Setting '{self.name}' must be a dict like \"{{'amount': 50.0, 'currency': 'USD'}}\"."
|
|
|
|
if value["currency"] not in CURRENCIES:
|
|
|
|
raise InvalidCurrencyError(value["currency"])
|
2019-01-26 04:13:43 +01:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _parse_list(l):
|
2019-09-19 17:36:01 +02:00
|
|
|
if l == ['null']:
|
2019-09-08 15:44:54 +02:00
|
|
|
return None
|
2019-09-19 12:25:05 +02:00
|
|
|
assert len(l) == 2, (
|
|
|
|
'Max key fee is made up of either two values: '
|
|
|
|
'"AMOUNT CURRENCY", or "null" (to set no limit)'
|
|
|
|
)
|
2019-01-26 04:13:43 +01:00
|
|
|
try:
|
2019-01-26 04:42:12 +01:00
|
|
|
amount = float(l[0])
|
|
|
|
except ValueError:
|
2019-01-26 04:13:43 +01:00
|
|
|
raise AssertionError('First value in max key fee is a decimal: "AMOUNT CURRENCY"')
|
|
|
|
currency = str(l[1]).upper()
|
2019-01-20 10:06:55 +01:00
|
|
|
if currency not in CURRENCIES:
|
|
|
|
raise InvalidCurrencyError(currency)
|
2019-01-26 04:13:43 +01:00
|
|
|
return {'amount': amount, 'currency': currency}
|
|
|
|
|
|
|
|
def deserialize(self, value):
|
2019-09-19 12:25:05 +02:00
|
|
|
if value is None:
|
|
|
|
return
|
2019-01-26 04:13:43 +01:00
|
|
|
if isinstance(value, dict):
|
|
|
|
return {
|
|
|
|
'currency': value['currency'],
|
2019-01-26 04:42:12 +01:00
|
|
|
'amount': float(value['amount']),
|
2019-01-26 04:13:43 +01:00
|
|
|
}
|
|
|
|
if isinstance(value, str):
|
|
|
|
value = value.split()
|
|
|
|
if isinstance(value, list):
|
|
|
|
return self._parse_list(value)
|
|
|
|
raise AssertionError('Invalid max key fee.')
|
|
|
|
|
|
|
|
def contribute_to_argparse(self, parser: ArgumentParser):
|
|
|
|
parser.add_argument(
|
|
|
|
self.cli_name,
|
|
|
|
help=self.doc,
|
2019-09-08 15:44:54 +02:00
|
|
|
nargs='+',
|
2019-01-26 04:13:43 +01:00
|
|
|
metavar=('AMOUNT', 'CURRENCY'),
|
|
|
|
default=NOT_SET
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
self.no_cli_name,
|
2021-08-21 04:36:35 +02:00
|
|
|
help="Disable maximum key fee check.",
|
2019-01-26 04:13:43 +01:00
|
|
|
dest=self.name,
|
|
|
|
const=None,
|
|
|
|
action="store_const",
|
|
|
|
default=NOT_SET
|
|
|
|
)
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
|
2019-06-21 08:15:59 +02:00
|
|
|
class StringChoice(String):
|
2021-05-19 16:56:22 +02:00
|
|
|
def __init__(self, doc: str, valid_values: List[str], default: str, *args, **kwargs):
|
2019-06-21 08:15:59 +02:00
|
|
|
super().__init__(doc, default, *args, **kwargs)
|
|
|
|
if not valid_values:
|
|
|
|
raise ValueError("No valid values provided")
|
|
|
|
if default not in valid_values:
|
|
|
|
raise ValueError(f"Default value must be one of: {', '.join(valid_values)}")
|
2019-06-19 17:36:46 +02:00
|
|
|
self.valid_values = valid_values
|
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
super().validate(value)
|
|
|
|
if value not in self.valid_values:
|
2019-06-21 08:15:59 +02:00
|
|
|
raise ValueError(f"Setting '{self.name}' value must be one of: {', '.join(self.valid_values)}")
|
2019-06-19 17:36:46 +02:00
|
|
|
|
|
|
|
|
2019-06-04 22:48:12 +02:00
|
|
|
class ListSetting(Setting[list]):
|
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
assert isinstance(value, (tuple, list)), \
|
2019-06-04 22:52:54 +02:00
|
|
|
f"Setting '{self.name}' must be a tuple or list."
|
|
|
|
|
2019-06-04 22:48:12 +02:00
|
|
|
def contribute_to_argparse(self, parser: ArgumentParser):
|
|
|
|
parser.add_argument(
|
|
|
|
self.cli_name,
|
|
|
|
help=self.doc,
|
|
|
|
action='append'
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class Servers(ListSetting):
|
2019-01-20 10:06:55 +01:00
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
assert isinstance(value, (tuple, list)), \
|
2019-01-20 10:06:55 +01:00
|
|
|
f"Setting '{self.name}' must be a tuple or list of servers."
|
2020-01-03 06:32:51 +01:00
|
|
|
for idx, server in enumerate(value):
|
2019-01-20 10:06:55 +01:00
|
|
|
assert isinstance(server, (tuple, list)) and len(server) == 2, \
|
|
|
|
f"Server defined '{server}' at index {idx} in setting " \
|
|
|
|
f"'{self.name}' must be a tuple or list of two items."
|
|
|
|
assert isinstance(server[0], str), \
|
|
|
|
f"Server defined '{server}' at index {idx} in setting " \
|
|
|
|
f"'{self.name}' must be have hostname as string in first position."
|
|
|
|
assert isinstance(server[1], int), \
|
|
|
|
f"Server defined '{server}' at index {idx} in setting " \
|
|
|
|
f"'{self.name}' must be have port as int in second position."
|
|
|
|
|
|
|
|
def deserialize(self, value):
|
|
|
|
servers = []
|
|
|
|
if isinstance(value, list):
|
|
|
|
for server in value:
|
|
|
|
if isinstance(server, str) and server.count(':') == 1:
|
|
|
|
host, port = server.split(':')
|
|
|
|
try:
|
|
|
|
servers.append((host, int(port)))
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
return servers
|
|
|
|
|
|
|
|
def serialize(self, value):
|
|
|
|
if value:
|
|
|
|
return [f"{host}:{port}" for host, port in value]
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
2019-06-04 22:48:12 +02:00
|
|
|
class Strings(ListSetting):
|
2019-01-20 10:06:55 +01:00
|
|
|
|
2020-01-03 06:32:51 +01:00
|
|
|
def validate(self, value):
|
|
|
|
assert isinstance(value, (tuple, list)), \
|
2019-01-20 10:06:55 +01:00
|
|
|
f"Setting '{self.name}' must be a tuple or list of strings."
|
2020-01-03 06:32:51 +01:00
|
|
|
for idx, string in enumerate(value):
|
2019-01-20 10:06:55 +01:00
|
|
|
assert isinstance(string, str), \
|
|
|
|
f"Value of '{string}' at index {idx} in setting " \
|
|
|
|
f"'{self.name}' must be a string."
|
|
|
|
|
|
|
|
|
2021-05-10 21:50:16 +02:00
|
|
|
class KnownHubsList:
|
|
|
|
|
2021-05-19 16:56:22 +02:00
|
|
|
def __init__(self, config: 'Config' = None, file_name: str = 'known_hubs.yml'):
|
2021-05-10 21:50:16 +02:00
|
|
|
self.file_name = file_name
|
2021-05-21 15:50:47 +02:00
|
|
|
self.path = os.path.join(config.wallet_dir, self.file_name) if config else None
|
|
|
|
self.hubs: Dict[Tuple[str, int], Dict] = {}
|
|
|
|
if self.exists:
|
|
|
|
self.load()
|
2021-05-10 21:50:16 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def exists(self):
|
|
|
|
return self.path and os.path.exists(self.path)
|
|
|
|
|
2021-05-19 16:56:22 +02:00
|
|
|
@property
|
2021-05-21 15:50:47 +02:00
|
|
|
def serialized(self) -> Dict[str, Dict]:
|
|
|
|
return {f"{host}:{port}": details for (host, port), details in self.hubs.items()}
|
|
|
|
|
|
|
|
def filter(self, match_none=False, **kwargs):
|
|
|
|
if not kwargs:
|
|
|
|
return self.hubs
|
|
|
|
result = {}
|
|
|
|
for hub, details in self.hubs.items():
|
|
|
|
for key, constraint in kwargs.items():
|
|
|
|
value = details.get(key)
|
|
|
|
if value == constraint or (match_none and value is None):
|
|
|
|
result[hub] = details
|
|
|
|
break
|
|
|
|
return result
|
2021-05-19 16:56:22 +02:00
|
|
|
|
2021-05-10 21:50:16 +02:00
|
|
|
def load(self):
|
2021-05-21 15:50:47 +02:00
|
|
|
if self.path:
|
2021-05-19 16:56:22 +02:00
|
|
|
with open(self.path, 'r') as known_hubs_file:
|
|
|
|
raw = known_hubs_file.read()
|
2021-05-21 15:50:47 +02:00
|
|
|
for hub, details in yaml.safe_load(raw).items():
|
|
|
|
self.set(hub, details)
|
2021-05-10 21:50:16 +02:00
|
|
|
|
|
|
|
def save(self):
|
2021-05-19 16:56:22 +02:00
|
|
|
if self.path:
|
|
|
|
with open(self.path, 'w') as known_hubs_file:
|
|
|
|
known_hubs_file.write(yaml.safe_dump(self.serialized, default_flow_style=False))
|
2021-05-10 21:50:16 +02:00
|
|
|
|
2021-05-21 15:50:47 +02:00
|
|
|
def set(self, hub: str, details: Dict):
|
|
|
|
if hub and hub.count(':') == 1:
|
2021-05-19 16:56:22 +02:00
|
|
|
host, port = hub.split(':')
|
|
|
|
hub_parts = (host, int(port))
|
|
|
|
if hub_parts not in self.hubs:
|
2021-05-21 15:50:47 +02:00
|
|
|
self.hubs[hub_parts] = details
|
|
|
|
return hub
|
2021-05-10 21:50:16 +02:00
|
|
|
|
2021-05-21 15:50:47 +02:00
|
|
|
def add_hubs(self, hubs: List[str]):
|
|
|
|
added = False
|
2021-05-19 16:56:22 +02:00
|
|
|
for hub in hubs:
|
2021-05-21 15:50:47 +02:00
|
|
|
if self.set(hub, {}) is not None:
|
|
|
|
added = True
|
|
|
|
return added
|
|
|
|
|
|
|
|
def items(self):
|
|
|
|
return self.hubs.items()
|
2021-05-19 16:56:22 +02:00
|
|
|
|
|
|
|
def __bool__(self):
|
|
|
|
return len(self) > 0
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return self.hubs.__len__()
|
|
|
|
|
2021-05-10 21:50:16 +02:00
|
|
|
def __iter__(self):
|
|
|
|
return iter(self.hubs)
|
|
|
|
|
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
class EnvironmentAccess:
|
|
|
|
PREFIX = 'LBRY_'
|
|
|
|
|
2020-05-07 17:59:03 +02:00
|
|
|
def __init__(self, config: 'BaseConfig', environ: dict):
|
|
|
|
self.configuration = config
|
2020-05-18 14:51:06 +02:00
|
|
|
self.data = {}
|
2020-05-07 17:59:03 +02:00
|
|
|
if environ:
|
|
|
|
self.load(environ)
|
|
|
|
|
|
|
|
def load(self, environ):
|
|
|
|
for setting in self.configuration.get_settings():
|
|
|
|
value = environ.get(f'{self.PREFIX}{setting.name.upper()}', NOT_SET)
|
|
|
|
if value != NOT_SET and not (isinstance(setting, ListSetting) and value is None):
|
2020-05-18 14:51:06 +02:00
|
|
|
self.data[setting.name] = setting.deserialize(value)
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
def __contains__(self, item: str):
|
2020-05-18 14:51:06 +02:00
|
|
|
return item in self.data
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
def __getitem__(self, item: str):
|
2020-05-18 14:51:06 +02:00
|
|
|
return self.data[item]
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
|
|
|
|
class ArgumentAccess:
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2019-01-25 15:42:11 +01:00
|
|
|
def __init__(self, config: 'BaseConfig', args: dict):
|
|
|
|
self.configuration = config
|
|
|
|
self.args = {}
|
|
|
|
if args:
|
|
|
|
self.load(args)
|
|
|
|
|
|
|
|
def load(self, args):
|
|
|
|
for setting in self.configuration.get_settings():
|
|
|
|
value = getattr(args, setting.name, NOT_SET)
|
2019-06-04 22:48:12 +02:00
|
|
|
if value != NOT_SET and not (isinstance(setting, ListSetting) and value is None):
|
2019-01-25 15:42:11 +01:00
|
|
|
self.args[setting.name] = setting.deserialize(value)
|
2018-12-14 17:19:00 +01:00
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
def __contains__(self, item: str):
|
2019-01-25 15:42:11 +01:00
|
|
|
return item in self.args
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
def __getitem__(self, item: str):
|
2019-01-25 15:42:11 +01:00
|
|
|
return self.args[item]
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
|
|
|
|
class ConfigFileAccess:
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def __init__(self, config: 'BaseConfig', path: str):
|
2019-01-20 10:06:55 +01:00
|
|
|
self.configuration = config
|
|
|
|
self.path = path
|
|
|
|
self.data = {}
|
|
|
|
if self.exists:
|
|
|
|
self.load()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def exists(self):
|
|
|
|
return self.path and os.path.exists(self.path)
|
|
|
|
|
|
|
|
def load(self):
|
|
|
|
cls = type(self.configuration)
|
|
|
|
with open(self.path, 'r') as config_file:
|
|
|
|
raw = config_file.read()
|
2020-07-16 17:44:26 +02:00
|
|
|
serialized = yaml.safe_load(raw) or {}
|
2019-01-20 10:06:55 +01:00
|
|
|
for key, value in serialized.items():
|
|
|
|
attr = getattr(cls, key, None)
|
2019-01-21 21:55:50 +01:00
|
|
|
if attr is None:
|
|
|
|
for setting in self.configuration.settings:
|
|
|
|
if key in setting.previous_names:
|
|
|
|
attr = setting
|
|
|
|
break
|
2019-01-20 10:06:55 +01:00
|
|
|
if attr is not None:
|
|
|
|
self.data[key] = attr.deserialize(value)
|
|
|
|
|
|
|
|
def save(self):
|
|
|
|
cls = type(self.configuration)
|
|
|
|
serialized = {}
|
|
|
|
for key, value in self.data.items():
|
|
|
|
attr = getattr(cls, key)
|
|
|
|
serialized[key] = attr.serialize(value)
|
|
|
|
with open(self.path, 'w') as config_file:
|
|
|
|
config_file.write(yaml.safe_dump(serialized, default_flow_style=False))
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def upgrade(self) -> bool:
|
|
|
|
upgraded = False
|
|
|
|
for key in list(self.data):
|
|
|
|
for setting in self.configuration.settings:
|
|
|
|
if key in setting.previous_names:
|
|
|
|
self.data[setting.name] = self.data[key]
|
|
|
|
del self.data[key]
|
|
|
|
upgraded = True
|
|
|
|
break
|
|
|
|
return upgraded
|
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
def __contains__(self, item: str):
|
|
|
|
return item in self.data
|
|
|
|
|
|
|
|
def __getitem__(self, item: str):
|
|
|
|
return self.data[item]
|
|
|
|
|
|
|
|
def __setitem__(self, key, value):
|
|
|
|
self.data[key] = value
|
|
|
|
|
|
|
|
def __delitem__(self, key):
|
|
|
|
del self.data[key]
|
|
|
|
|
|
|
|
|
2021-05-19 16:56:22 +02:00
|
|
|
TBC = TypeVar('TBC', bound='BaseConfig')
|
2019-07-08 03:03:05 +02:00
|
|
|
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
class BaseConfig:
|
2019-01-20 10:06:55 +01:00
|
|
|
|
2019-01-25 01:16:39 +01:00
|
|
|
config = Path("Path to configuration file.", metavar='FILE')
|
2019-01-20 10:06:55 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def __init__(self, **kwargs):
|
2019-01-20 10:06:55 +01:00
|
|
|
self.runtime = {} # set internally or by various API calls
|
|
|
|
self.arguments = {} # from command line arguments
|
|
|
|
self.environment = {} # from environment variables
|
|
|
|
self.persisted = {} # from config file
|
|
|
|
self._updating_config = False
|
2019-01-21 21:55:50 +01:00
|
|
|
for key, value in kwargs.items():
|
|
|
|
setattr(self, key, value)
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def update_config(self):
|
|
|
|
self._updating_config = True
|
|
|
|
yield self
|
|
|
|
self._updating_config = False
|
2019-09-03 17:30:10 +02:00
|
|
|
if isinstance(self.persisted, ConfigFileAccess):
|
|
|
|
self.persisted.save()
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def modify_order(self):
|
|
|
|
locations = [self.runtime]
|
|
|
|
if self._updating_config:
|
|
|
|
locations.append(self.persisted)
|
|
|
|
return locations
|
|
|
|
|
|
|
|
@property
|
|
|
|
def search_order(self):
|
|
|
|
return [
|
|
|
|
self.runtime,
|
|
|
|
self.arguments,
|
|
|
|
self.environment,
|
|
|
|
self.persisted
|
|
|
|
]
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
@classmethod
|
|
|
|
def get_settings(cls):
|
2019-01-25 01:16:39 +01:00
|
|
|
for attr in dir(cls):
|
|
|
|
setting = getattr(cls, attr)
|
2019-01-21 21:55:50 +01:00
|
|
|
if isinstance(setting, Setting):
|
|
|
|
yield setting
|
|
|
|
|
|
|
|
@property
|
|
|
|
def settings(self):
|
|
|
|
return self.get_settings()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def settings_dict(self):
|
|
|
|
return {
|
|
|
|
setting.name: getattr(self, setting.name) for setting in self.settings
|
|
|
|
}
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
@classmethod
|
2019-07-08 03:03:05 +02:00
|
|
|
def create_from_arguments(cls, args) -> TBC:
|
2019-01-20 10:06:55 +01:00
|
|
|
conf = cls()
|
|
|
|
conf.set_arguments(args)
|
|
|
|
conf.set_environment()
|
|
|
|
conf.set_persisted()
|
|
|
|
return conf
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
@classmethod
|
2019-01-26 04:13:43 +01:00
|
|
|
def contribute_to_argparse(cls, parser: ArgumentParser):
|
2019-01-21 21:55:50 +01:00
|
|
|
for setting in cls.get_settings():
|
2019-01-26 04:13:43 +01:00
|
|
|
setting.contribute_to_argparse(parser)
|
2019-01-21 21:55:50 +01:00
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
def set_arguments(self, args):
|
2019-01-25 15:42:11 +01:00
|
|
|
self.arguments = ArgumentAccess(self, args)
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
def set_environment(self, environ=None):
|
2020-05-18 14:51:06 +02:00
|
|
|
self.environment = EnvironmentAccess(self, environ or os.environ)
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
def set_persisted(self, config_file_path=None):
|
|
|
|
if config_file_path is None:
|
|
|
|
config_file_path = self.config
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
if not config_file_path:
|
|
|
|
return
|
|
|
|
|
2019-01-20 10:06:55 +01:00
|
|
|
ext = os.path.splitext(config_file_path)[1]
|
|
|
|
assert ext in ('.yml', '.yaml'),\
|
|
|
|
f"File extension '{ext}' is not supported, " \
|
|
|
|
f"configuration file must be in YAML (.yaml)."
|
|
|
|
|
|
|
|
self.persisted = ConfigFileAccess(self, config_file_path)
|
2019-01-21 21:55:50 +01:00
|
|
|
if self.persisted.upgrade():
|
|
|
|
self.persisted.save()
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
|
2020-01-14 18:43:28 +01:00
|
|
|
class TranscodeConfig(BaseConfig):
|
|
|
|
|
2020-03-17 00:48:45 +01:00
|
|
|
ffmpeg_path = String('A list of places to check for ffmpeg and ffprobe. '
|
|
|
|
f'$data_dir/ffmpeg/bin and $PATH are checked afterward. Separator: {os.pathsep}',
|
|
|
|
'', previous_names=['ffmpeg_folder'])
|
2020-01-14 18:43:28 +01:00
|
|
|
video_encoder = String('FFmpeg codec and parameters for the video encoding. '
|
|
|
|
'Example: libaom-av1 -crf 25 -b:v 0 -strict experimental',
|
2020-05-26 03:45:59 +02:00
|
|
|
'libx264 -crf 24 -preset faster -pix_fmt yuv420p')
|
2020-07-15 03:54:42 +02:00
|
|
|
video_bitrate_maximum = Integer('Maximum bits per second allowed for video streams (0 to disable).', 5_000_000)
|
2020-03-04 01:17:32 +01:00
|
|
|
video_scaler = String('FFmpeg scaling parameters for reducing bitrate. '
|
|
|
|
'Example: -vf "scale=-2:720,fps=24" -maxrate 5M -bufsize 3M',
|
2020-03-23 15:20:13 +01:00
|
|
|
r'-vf "scale=if(gte(iw\,ih)\,min(1920\,iw)\,-2):if(lt(iw\,ih)\,min(1920\,ih)\,-2)" '
|
2020-05-26 03:45:59 +02:00
|
|
|
r'-maxrate 5500K -bufsize 5000K')
|
2020-01-14 18:43:28 +01:00
|
|
|
audio_encoder = String('FFmpeg codec and parameters for the audio encoding. '
|
|
|
|
'Example: libopus -b:a 128k',
|
2020-03-04 01:17:32 +01:00
|
|
|
'aac -b:a 160k')
|
2020-07-07 01:06:43 +02:00
|
|
|
volume_filter = String('FFmpeg filter for audio normalization. Exmple: -af loudnorm', '')
|
2020-03-04 01:17:32 +01:00
|
|
|
volume_analysis_time = Integer('Maximum seconds into the file that we examine audio volume (0 to disable).', 240)
|
2020-01-14 18:43:28 +01:00
|
|
|
|
|
|
|
|
|
|
|
class CLIConfig(TranscodeConfig):
|
2019-01-21 21:55:50 +01:00
|
|
|
|
2019-01-25 00:22:53 +01:00
|
|
|
api = String('Host name and port for lbrynet daemon API.', 'localhost:5279', metavar='HOST:PORT')
|
2019-01-21 21:55:50 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def api_connection_url(self) -> str:
|
2019-01-22 21:40:45 +01:00
|
|
|
return f"http://{self.api}/lbryapi"
|
|
|
|
|
|
|
|
@property
|
|
|
|
def api_host(self):
|
|
|
|
return self.api.split(':')[0]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def api_port(self):
|
|
|
|
return int(self.api.split(':')[1])
|
2019-01-20 10:06:55 +01:00
|
|
|
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
class Config(CLIConfig):
|
2021-06-14 19:06:31 +02:00
|
|
|
|
|
|
|
jurisdiction = String("Limit interactions to wallet server in this jurisdiction.")
|
|
|
|
|
2019-01-28 15:51:02 +01:00
|
|
|
# directories
|
2019-01-25 01:16:39 +01:00
|
|
|
data_dir = Path("Directory path to store blobs.", metavar='DIR')
|
|
|
|
download_dir = Path(
|
|
|
|
"Directory path to place assembled files downloaded from LBRY.",
|
|
|
|
previous_names=['download_directory'], metavar='DIR'
|
|
|
|
)
|
2019-01-21 21:55:50 +01:00
|
|
|
wallet_dir = Path(
|
|
|
|
"Directory containing a 'wallets' subdirectory with 'default_wallet' file.",
|
2019-01-25 01:16:39 +01:00
|
|
|
previous_names=['lbryum_wallet_dir'], metavar='DIR'
|
2019-01-21 21:55:50 +01:00
|
|
|
)
|
2019-09-23 18:47:50 +02:00
|
|
|
wallets = Strings(
|
|
|
|
"Wallet files in 'wallet_dir' to load at startup.",
|
|
|
|
['default_wallet']
|
|
|
|
)
|
2019-01-21 21:55:50 +01:00
|
|
|
|
2019-01-28 15:51:02 +01:00
|
|
|
# network
|
|
|
|
use_upnp = Toggle(
|
|
|
|
"Use UPnP to setup temporary port redirects for the DHT and the hosting of blobs. If you manually forward"
|
|
|
|
"ports or have firewall rules you likely want to disable this.", True
|
2019-01-21 21:55:50 +01:00
|
|
|
)
|
2019-01-28 15:51:02 +01:00
|
|
|
udp_port = Integer("UDP port for communicating on the LBRY DHT", 4444, previous_names=['dht_node_port'])
|
2021-11-09 20:44:03 +01:00
|
|
|
tcp_port = Integer("TCP port to listen for incoming blob requests", 4444, previous_names=['peer_port'])
|
2020-01-13 21:52:31 +01:00
|
|
|
prometheus_port = Integer("Port to expose prometheus metrics (off by default)", 0)
|
2019-01-28 15:51:02 +01:00
|
|
|
network_interface = String("Interface to use for the DHT and blob exchange", '0.0.0.0')
|
|
|
|
|
2019-02-20 17:22:55 +01:00
|
|
|
# routing table
|
|
|
|
split_buckets_under_index = Integer(
|
|
|
|
"Routing table bucket index below which we always split the bucket if given a new key to add to it and "
|
|
|
|
"the bucket is full. As this value is raised the depth of the routing table (and number of peers in it) "
|
|
|
|
"will increase. This setting is used by seed nodes, you probably don't want to change it during normal "
|
2022-02-04 20:43:19 +01:00
|
|
|
"use.", 2
|
2019-02-20 17:22:55 +01:00
|
|
|
)
|
|
|
|
|
2019-01-28 15:51:02 +01:00
|
|
|
# protocol timeouts
|
|
|
|
download_timeout = Float("Cumulative timeout for a stream to begin downloading before giving up", 30.0)
|
2019-02-08 05:05:58 +01:00
|
|
|
blob_download_timeout = Float("Timeout to download a blob from a peer", 30.0)
|
2021-03-13 06:31:03 +01:00
|
|
|
hub_timeout = Float("Timeout when making a hub request", 30.0)
|
2019-04-24 14:44:33 +02:00
|
|
|
peer_connect_timeout = Float("Timeout to establish a TCP connection to a peer", 3.0)
|
2020-01-03 04:57:28 +01:00
|
|
|
node_rpc_timeout = Float("Timeout when making a DHT request", constants.RPC_TIMEOUT)
|
2019-01-28 15:51:02 +01:00
|
|
|
|
|
|
|
# blob announcement and download
|
2019-03-31 01:13:57 +01:00
|
|
|
save_blobs = Toggle("Save encrypted blob files for hosting, otherwise download blobs to memory only.", True)
|
2021-10-20 06:27:36 +02:00
|
|
|
network_storage_limit = Integer("Disk space in MB to be allocated for helping the P2P network. 0 = disable", 0)
|
2021-08-06 16:44:57 +02:00
|
|
|
blob_storage_limit = Integer("Disk space in MB to be allocated for blob storage. 0 = no limit", 0)
|
2019-05-24 04:40:02 +02:00
|
|
|
blob_lru_cache_size = Integer(
|
|
|
|
"LRU cache size for decrypted downloaded blobs used to minimize re-downloading the same blobs when "
|
|
|
|
"replying to a range request. Set to 0 to disable.", 32
|
|
|
|
)
|
2019-01-28 15:51:02 +01:00
|
|
|
announce_head_and_sd_only = Toggle(
|
|
|
|
"Announce only the descriptor and first (rather than all) data blob for a stream to the DHT", True,
|
|
|
|
previous_names=['announce_head_blobs_only']
|
|
|
|
)
|
|
|
|
concurrent_blob_announcers = Integer(
|
2019-01-28 22:00:37 +01:00
|
|
|
"Number of blobs to iteratively announce at once, set to 0 to disable", 10,
|
|
|
|
previous_names=['concurrent_announcers']
|
2019-01-28 15:51:02 +01:00
|
|
|
)
|
|
|
|
max_connections_per_download = Integer(
|
2019-04-24 14:44:33 +02:00
|
|
|
"Maximum number of peers to connect to while downloading a blob", 4,
|
2019-01-28 15:51:02 +01:00
|
|
|
previous_names=['max_connections_per_stream']
|
|
|
|
)
|
2021-03-13 06:49:33 +01:00
|
|
|
concurrent_hub_requests = Integer("Maximum number of concurrent hub requests", 32)
|
2019-01-30 20:54:01 +01:00
|
|
|
fixed_peer_delay = Float(
|
|
|
|
"Amount of seconds before adding the reflector servers as potential peers to download from in case dht"
|
|
|
|
"peers are not found or are slow", 2.0
|
|
|
|
)
|
2019-01-28 15:51:02 +01:00
|
|
|
max_key_fee = MaxKeyFee(
|
2019-09-08 15:44:54 +02:00
|
|
|
"Don't download streams with fees exceeding this amount. When set to "
|
|
|
|
"null, the amount is unbounded.", {'currency': 'USD', 'amount': 50.0}
|
2019-02-09 03:08:41 +01:00
|
|
|
)
|
2020-02-21 18:15:17 +01:00
|
|
|
max_wallet_server_fee = String("Maximum daily LBC amount allowed as payment for wallet servers.", "0.0")
|
2019-01-28 15:51:02 +01:00
|
|
|
|
|
|
|
# reflector settings
|
|
|
|
reflect_streams = Toggle(
|
|
|
|
"Upload completed streams (published and downloaded) reflector in order to re-host them", True,
|
|
|
|
previous_names=['reflect_uploads']
|
|
|
|
)
|
2019-02-02 04:59:41 +01:00
|
|
|
concurrent_reflector_uploads = Integer(
|
|
|
|
"Maximum number of streams to upload to a reflector server at a time", 10
|
|
|
|
)
|
2019-01-28 15:51:02 +01:00
|
|
|
|
|
|
|
# servers
|
2020-05-08 16:58:29 +02:00
|
|
|
reflector_servers = Servers("Reflector re-hosting servers for mirroring publishes", [
|
2019-06-04 16:23:04 +02:00
|
|
|
('reflector.lbry.com', 5566)
|
2019-01-20 10:06:55 +01:00
|
|
|
])
|
2020-05-08 16:58:29 +02:00
|
|
|
|
|
|
|
fixed_peers = Servers("Fixed peers to fall back to if none are found on P2P for a blob", [
|
2020-05-11 20:52:31 +02:00
|
|
|
('cdn.reflector.lbry.com', 5567)
|
2020-05-08 16:58:29 +02:00
|
|
|
])
|
|
|
|
|
2022-03-05 06:21:53 +01:00
|
|
|
tracker_servers = Servers("BitTorrent-compatible (BEP15) UDP trackers for helping P2P discovery", [
|
2022-05-25 23:48:45 +02:00
|
|
|
('tracker.lbry.com', 9252),
|
|
|
|
('tracker.lbry.grin.io', 9252),
|
2022-03-05 06:21:53 +01:00
|
|
|
])
|
|
|
|
|
2019-01-28 15:51:02 +01:00
|
|
|
lbryum_servers = Servers("SPV wallet servers", [
|
2020-02-11 19:51:15 +01:00
|
|
|
('spv11.lbry.com', 50001),
|
|
|
|
('spv12.lbry.com', 50001),
|
|
|
|
('spv13.lbry.com', 50001),
|
|
|
|
('spv14.lbry.com', 50001),
|
|
|
|
('spv15.lbry.com', 50001),
|
|
|
|
('spv16.lbry.com', 50001),
|
|
|
|
('spv17.lbry.com', 50001),
|
|
|
|
('spv18.lbry.com', 50001),
|
|
|
|
('spv19.lbry.com', 50001),
|
2019-01-20 10:06:55 +01:00
|
|
|
])
|
2019-01-28 15:51:02 +01:00
|
|
|
known_dht_nodes = Servers("Known nodes for bootstrapping connection to the DHT", [
|
2021-09-29 16:49:53 +02:00
|
|
|
('dht.lbry.grin.io', 4444), # Grin
|
2021-09-29 05:24:51 +02:00
|
|
|
('dht.lbry.madiator.com', 4444), # Madiator
|
2019-05-16 14:46:50 +02:00
|
|
|
('lbrynet1.lbry.com', 4444), # US EAST
|
|
|
|
('lbrynet2.lbry.com', 4444), # US WEST
|
|
|
|
('lbrynet3.lbry.com', 4444), # EU
|
|
|
|
('lbrynet4.lbry.com', 4444) # ASIA
|
2019-01-28 15:51:02 +01:00
|
|
|
])
|
|
|
|
|
|
|
|
# blockchain
|
|
|
|
blockchain_name = String("Blockchain name - lbrycrd_main, lbrycrd_regtest, or lbrycrd_testnet", 'lbrycrd_main')
|
|
|
|
|
|
|
|
# daemon
|
2021-08-17 21:47:18 +02:00
|
|
|
save_files = Toggle("Save downloaded files when calling `get` by default", False)
|
2019-01-21 21:55:50 +01:00
|
|
|
components_to_skip = Strings("components which will be skipped during start-up of daemon", [])
|
2019-01-28 15:51:02 +01:00
|
|
|
share_usage_data = Toggle(
|
2020-02-14 00:56:11 +01:00
|
|
|
"Whether to share usage stats and diagnostic info with LBRY.", False,
|
2019-01-28 15:51:02 +01:00
|
|
|
previous_names=['upload_log', 'upload_log', 'share_debug_info']
|
|
|
|
)
|
2019-06-28 15:59:31 +02:00
|
|
|
track_bandwidth = Toggle("Track bandwidth usage", True)
|
2020-06-03 19:28:32 +02:00
|
|
|
allowed_origin = String(
|
|
|
|
"Allowed `Origin` header value for API request (sent by browser), use * to allow "
|
|
|
|
"all hosts; default is to only allow API requests with no `Origin` value.", "")
|
2018-12-14 17:19:00 +01:00
|
|
|
|
2019-05-14 01:48:19 +02:00
|
|
|
# media server
|
|
|
|
streaming_server = String('Host name and port to serve streaming media over range requests',
|
|
|
|
'localhost:5280', metavar='HOST:PORT')
|
|
|
|
streaming_get = Toggle("Enable the /get endpoint for the streaming media server. "
|
|
|
|
"Disable to prevent new streams from being added.", True)
|
|
|
|
|
2019-06-21 08:15:59 +02:00
|
|
|
coin_selection_strategy = StringChoice(
|
|
|
|
"Strategy to use when selecting UTXOs for a transaction",
|
2021-12-21 16:22:09 +01:00
|
|
|
STRATEGIES, "prefer_confirmed"
|
|
|
|
)
|
2019-06-19 17:36:46 +02:00
|
|
|
|
2020-12-23 22:37:31 +01:00
|
|
|
transaction_cache_size = Integer("Transaction cache size", 2 ** 17)
|
2020-02-06 17:27:00 +01:00
|
|
|
save_resolved_claims = Toggle(
|
|
|
|
"Save content claims to the database when they are resolved to keep file_list up to date, "
|
|
|
|
"only disable this if file_x commands are not needed", True
|
|
|
|
)
|
|
|
|
|
2019-05-14 01:48:19 +02:00
|
|
|
@property
|
|
|
|
def streaming_host(self):
|
|
|
|
return self.streaming_server.split(':')[0]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def streaming_port(self):
|
|
|
|
return int(self.streaming_server.split(':')[1])
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def __init__(self, **kwargs):
|
|
|
|
super().__init__(**kwargs)
|
|
|
|
self.set_default_paths()
|
2021-05-10 21:50:16 +02:00
|
|
|
self.known_hubs = KnownHubsList(self)
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def set_default_paths(self):
|
2019-01-22 23:37:52 +01:00
|
|
|
if 'darwin' in sys.platform.lower():
|
2019-01-21 21:55:50 +01:00
|
|
|
get_directories = get_darwin_directories
|
2019-01-22 23:37:52 +01:00
|
|
|
elif 'win' in sys.platform.lower():
|
|
|
|
get_directories = get_windows_directories
|
|
|
|
elif 'linux' in sys.platform.lower():
|
2019-01-21 21:55:50 +01:00
|
|
|
get_directories = get_linux_directories
|
|
|
|
else:
|
|
|
|
return
|
|
|
|
cls = type(self)
|
|
|
|
cls.data_dir.default, cls.wallet_dir.default, cls.download_dir.default = get_directories()
|
|
|
|
cls.config.default = os.path.join(
|
|
|
|
self.data_dir, 'daemon_settings.yml'
|
|
|
|
)
|
2018-11-27 21:56:11 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
@property
|
|
|
|
def log_file_path(self):
|
|
|
|
return os.path.join(self.data_dir, 'lbrynet.log')
|
2018-11-27 21:56:11 +01:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2021-05-19 16:56:22 +02:00
|
|
|
def get_windows_directories() -> Tuple[str, str, str]:
|
2019-09-09 21:29:54 +02:00
|
|
|
from lbry.winpaths import get_path, FOLDERID, UserHandle, \
|
|
|
|
PathNotFoundException # pylint: disable=import-outside-toplevel
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2019-09-09 21:29:54 +02:00
|
|
|
try:
|
|
|
|
download_dir = get_path(FOLDERID.Downloads, UserHandle.current)
|
|
|
|
except PathNotFoundException:
|
|
|
|
download_dir = os.getcwd()
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
# old
|
|
|
|
appdata = get_path(FOLDERID.RoamingAppData, UserHandle.current)
|
|
|
|
data_dir = os.path.join(appdata, 'lbrynet')
|
|
|
|
lbryum_dir = os.path.join(appdata, 'lbryum')
|
|
|
|
if os.path.isdir(data_dir) or os.path.isdir(lbryum_dir):
|
|
|
|
return data_dir, lbryum_dir, download_dir
|
2017-11-29 09:24:56 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
# new
|
|
|
|
data_dir = user_data_dir('lbrynet', 'lbry')
|
|
|
|
lbryum_dir = user_data_dir('lbryum', 'lbry')
|
|
|
|
return data_dir, lbryum_dir, download_dir
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2021-05-19 16:56:22 +02:00
|
|
|
def get_darwin_directories() -> Tuple[str, str, str]:
|
2019-01-21 21:55:50 +01:00
|
|
|
data_dir = user_data_dir('LBRY')
|
|
|
|
lbryum_dir = os.path.expanduser('~/.lbryum')
|
|
|
|
download_dir = os.path.expanduser('~/Downloads')
|
|
|
|
return data_dir, lbryum_dir, download_dir
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2018-12-14 17:19:00 +01:00
|
|
|
|
2021-05-19 16:56:22 +02:00
|
|
|
def get_linux_directories() -> Tuple[str, str, str]:
|
2019-01-21 21:55:50 +01:00
|
|
|
try:
|
|
|
|
with open(os.path.join(user_config_dir(), 'user-dirs.dirs'), 'r') as xdg:
|
2020-04-13 13:13:19 +02:00
|
|
|
down_dir = re.search(r'XDG_DOWNLOAD_DIR=(.+)', xdg.read())
|
|
|
|
if down_dir:
|
|
|
|
down_dir = re.sub(r'\$HOME', os.getenv('HOME') or os.path.expanduser("~/"), down_dir.group(1))
|
|
|
|
download_dir = re.sub('\"', '', down_dir)
|
2019-10-02 18:38:56 +02:00
|
|
|
except OSError:
|
2019-01-21 21:55:50 +01:00
|
|
|
download_dir = os.getenv('XDG_DOWNLOAD_DIR')
|
|
|
|
if not download_dir:
|
|
|
|
download_dir = os.path.expanduser('~/Downloads')
|
2018-12-14 17:19:00 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
# old
|
|
|
|
data_dir = os.path.expanduser('~/.lbrynet')
|
|
|
|
lbryum_dir = os.path.expanduser('~/.lbryum')
|
|
|
|
if os.path.isdir(data_dir) or os.path.isdir(lbryum_dir):
|
|
|
|
return data_dir, lbryum_dir, download_dir
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
# new
|
|
|
|
return user_data_dir('lbry/lbrynet'), user_data_dir('lbry/lbryum'), download_dir
|