2016-08-22 00:44:16 +02:00
|
|
|
import os
|
2017-07-03 22:01:19 +02:00
|
|
|
import re
|
2016-09-21 09:49:52 +02:00
|
|
|
import sys
|
2018-11-27 21:56:11 +01:00
|
|
|
import typing
|
|
|
|
import json
|
|
|
|
import logging
|
2016-11-16 20:38:43 +01:00
|
|
|
import envparse
|
2018-11-27 21:56:11 +01:00
|
|
|
import base58
|
|
|
|
import yaml
|
2017-07-03 22:01:19 +02:00
|
|
|
from appdirs import user_data_dir, user_config_dir
|
2018-11-07 21:15:05 +01:00
|
|
|
from lbrynet import utils
|
2018-12-14 17:19:00 +01:00
|
|
|
from lbrynet.p2p.Error import InvalidCurrencyError
|
2017-07-03 22:01:19 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2017-04-26 20:18:41 +02:00
|
|
|
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
ENV_NAMESPACE = 'LBRY_'
|
2016-12-21 19:43:13 +01:00
|
|
|
|
2016-11-10 20:26:21 +01:00
|
|
|
LBRYCRD_WALLET = 'lbrycrd'
|
|
|
|
LBRYUM_WALLET = 'lbryum'
|
|
|
|
PTC_WALLET = 'ptc'
|
2018-04-30 09:04:52 +02:00
|
|
|
TORBA_WALLET = 'torba'
|
2016-10-28 22:12:51 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
PROTOCOL_PREFIX = 'lbry'
|
|
|
|
APP_NAME = 'LBRY'
|
2016-10-28 22:12:51 +02:00
|
|
|
|
2016-09-21 09:49:52 +02:00
|
|
|
LINUX = 1
|
|
|
|
DARWIN = 2
|
|
|
|
WINDOWS = 3
|
2017-08-18 12:52:03 +02:00
|
|
|
ANDROID = 4
|
2017-01-17 04:23:20 +01:00
|
|
|
KB = 2 ** 10
|
|
|
|
MB = 2 ** 20
|
|
|
|
|
2018-06-06 23:18:13 +02:00
|
|
|
DEFAULT_CONCURRENT_ANNOUNCERS = 10
|
2018-03-27 22:07:55 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
DEFAULT_DHT_NODES = [
|
2019-01-04 17:07:02 +01:00
|
|
|
('lbrynet1.lbry.io', 4444), # US EAST
|
|
|
|
('lbrynet2.lbry.io', 4444), # US WEST
|
|
|
|
('lbrynet3.lbry.io', 4444), # EU
|
|
|
|
('lbrynet4.lbry.io', 4444) # ASIA
|
2017-01-17 04:23:20 +01:00
|
|
|
]
|
2016-09-21 09:49:52 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
settings_decoders = {
|
|
|
|
'.json': json.loads,
|
|
|
|
'.yml': yaml.load
|
|
|
|
}
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
settings_encoders = {
|
|
|
|
'.json': json.dumps,
|
|
|
|
'.yml': yaml.safe_dump
|
|
|
|
}
|
|
|
|
|
2017-08-18 12:52:03 +02:00
|
|
|
if 'ANDROID_ARGUMENT' in os.environ:
|
|
|
|
# https://github.com/kivy/kivy/blob/master/kivy/utils.py#L417-L421
|
|
|
|
platform = ANDROID
|
|
|
|
elif 'darwin' in sys.platform:
|
2017-07-03 22:01:19 +02:00
|
|
|
platform = DARWIN
|
|
|
|
elif 'win' in sys.platform:
|
|
|
|
platform = WINDOWS
|
2016-10-19 06:39:19 +02:00
|
|
|
else:
|
|
|
|
platform = LINUX
|
2016-10-19 06:12:44 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
ICON_PATH = 'icons' if platform is WINDOWS else 'app.icns'
|
2016-12-21 20:55:43 +01:00
|
|
|
|
|
|
|
|
2018-11-27 21:56:11 +01:00
|
|
|
def get_windows_directories() -> typing.Tuple[str, str, str]:
|
|
|
|
from lbrynet.winpaths import get_path, FOLDERID, UserHandle
|
|
|
|
|
|
|
|
download_dir = get_path(FOLDERID.Downloads, UserHandle.current)
|
|
|
|
|
|
|
|
# old
|
|
|
|
appdata = get_path(FOLDERID.RoamingAppData, UserHandle.current)
|
|
|
|
data_dir = os.path.join(appdata, 'lbrynet')
|
|
|
|
lbryum_dir = os.path.join(appdata, 'lbryum')
|
|
|
|
if os.path.isdir(data_dir) or os.path.isdir(lbryum_dir):
|
|
|
|
return data_dir, lbryum_dir, download_dir
|
|
|
|
|
|
|
|
# new
|
|
|
|
data_dir = user_data_dir('lbrynet', 'lbry')
|
|
|
|
lbryum_dir = user_data_dir('lbryum', 'lbry')
|
|
|
|
download_dir = get_path(FOLDERID.Downloads, UserHandle.current)
|
|
|
|
return data_dir, lbryum_dir, download_dir
|
|
|
|
|
|
|
|
|
|
|
|
def get_darwin_directories() -> typing.Tuple[str, str, str]:
|
|
|
|
data_dir = user_data_dir('LBRY')
|
|
|
|
lbryum_dir = os.path.expanduser('~/.lbryum')
|
|
|
|
download_dir = os.path.expanduser('~/Downloads')
|
|
|
|
return data_dir, lbryum_dir, download_dir
|
|
|
|
|
|
|
|
|
|
|
|
def get_linux_directories() -> typing.Tuple[str, str, str]:
|
|
|
|
download_dir = None
|
|
|
|
try:
|
|
|
|
with open(os.path.join(user_config_dir(), 'user-dirs.dirs'), 'r') as xdg:
|
|
|
|
down_dir = re.search(r'XDG_DOWNLOAD_DIR=(.+)', xdg.read()).group(1)
|
|
|
|
down_dir = re.sub('\$HOME', os.getenv('HOME'), down_dir)
|
|
|
|
download_dir = re.sub('\"', '', down_dir)
|
|
|
|
except EnvironmentError:
|
|
|
|
download_dir = os.getenv('XDG_DOWNLOAD_DIR')
|
|
|
|
|
|
|
|
if not download_dir:
|
|
|
|
download_dir = os.path.expanduser('~/Downloads')
|
|
|
|
|
|
|
|
# old
|
|
|
|
data_dir = os.path.expanduser('~/.lbrynet')
|
|
|
|
lbryum_dir = os.path.expanduser('~/.lbryum')
|
|
|
|
if os.path.isdir(data_dir) or os.path.isdir(lbryum_dir):
|
|
|
|
return data_dir, lbryum_dir, download_dir
|
|
|
|
|
|
|
|
# new
|
|
|
|
return user_data_dir('lbry/lbrynet'), user_data_dir('lbry/lbryum'), download_dir
|
|
|
|
|
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def server_port(server_and_port):
|
|
|
|
server, port = server_and_port.split(':')
|
|
|
|
return server, int(port)
|
2016-10-26 09:16:33 +02:00
|
|
|
|
|
|
|
|
2018-01-09 22:55:39 +01:00
|
|
|
def server_list(servers):
|
|
|
|
return [server_port(server) for server in servers]
|
|
|
|
|
2018-07-24 18:42:12 +02:00
|
|
|
|
2018-05-17 23:41:23 +02:00
|
|
|
def server_list_reverse(servers):
|
2018-10-18 12:42:45 +02:00
|
|
|
return [f"{server}:{port}" for server, port in servers]
|
2018-01-09 22:55:39 +01:00
|
|
|
|
2018-07-24 18:42:12 +02:00
|
|
|
|
2016-11-16 20:38:43 +01:00
|
|
|
class Env(envparse.Env):
|
|
|
|
"""An Env parser that automatically namespaces the variables with LBRY"""
|
2017-01-17 04:23:20 +01:00
|
|
|
|
2016-11-16 20:38:43 +01:00
|
|
|
def __init__(self, **schema):
|
|
|
|
self.original_schema = schema
|
|
|
|
my_schema = {
|
|
|
|
self._convert_key(key): self._convert_value(value)
|
|
|
|
for key, value in schema.items()
|
2017-04-26 20:15:38 +02:00
|
|
|
}
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(**my_schema)
|
2016-11-16 20:38:43 +01:00
|
|
|
|
|
|
|
def __call__(self, key, *args, **kwargs):
|
|
|
|
my_key = self._convert_key(key)
|
2018-07-22 00:34:59 +02:00
|
|
|
return super().__call__(my_key, *args, **kwargs)
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
@staticmethod
|
|
|
|
def _convert_key(key):
|
|
|
|
return ENV_NAMESPACE + key.upper()
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
@staticmethod
|
|
|
|
def _convert_value(value):
|
2017-01-17 18:29:09 +01:00
|
|
|
""" Allow value to be specified as a tuple or list.
|
|
|
|
|
|
|
|
If you do this, the tuple/list must be of the
|
|
|
|
form (cast, default) or (cast, default, subcast)
|
2016-11-16 20:38:43 +01:00
|
|
|
"""
|
2018-01-09 22:55:39 +01:00
|
|
|
|
2016-11-16 20:38:43 +01:00
|
|
|
if isinstance(value, (tuple, list)):
|
|
|
|
new_value = {'cast': value[0], 'default': value[1]}
|
|
|
|
if len(value) == 3:
|
|
|
|
new_value['subcast'] = value[2]
|
|
|
|
return new_value
|
|
|
|
return value
|
|
|
|
|
2017-04-26 20:15:38 +02:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
TYPE_DEFAULT = 'default'
|
|
|
|
TYPE_PERSISTED = 'persisted'
|
|
|
|
TYPE_ENV = 'env'
|
|
|
|
TYPE_CLI = 'cli'
|
|
|
|
TYPE_RUNTIME = 'runtime'
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
FIXED_SETTINGS = {
|
|
|
|
'ANALYTICS_ENDPOINT': 'https://api.segment.io/v1',
|
|
|
|
'ANALYTICS_TOKEN': 'Ax5LZzR1o3q3Z3WjATASDwR5rKyHH0qOIRIbLmMXn2H=',
|
|
|
|
'API_ADDRESS': 'lbryapi',
|
|
|
|
'APP_NAME': APP_NAME,
|
|
|
|
'BLOBFILES_DIR': 'blobfiles',
|
|
|
|
'CRYPTSD_FILE_EXTENSION': '.cryptsd',
|
|
|
|
'CURRENCIES': {
|
|
|
|
'BTC': {'type': 'crypto'},
|
|
|
|
'LBC': {'type': 'crypto'},
|
|
|
|
'USD': {'type': 'fiat'},
|
|
|
|
},
|
|
|
|
'DB_REVISION_FILE_NAME': 'db_revision',
|
|
|
|
'ICON_PATH': ICON_PATH,
|
2017-07-13 20:50:27 +02:00
|
|
|
'LOGGLY_TOKEN': 'BQEzZmMzLJHgAGxkBF00LGD0YGuyATVgAmqxAQEuAQZ2BQH4',
|
2017-01-17 04:23:20 +01:00
|
|
|
'LOG_FILE_NAME': 'lbrynet.log',
|
|
|
|
'LOG_POST_URL': 'https://lbry.io/log-upload',
|
|
|
|
'MAX_BLOB_REQUEST_SIZE': 64 * KB,
|
|
|
|
'MAX_HANDSHAKE_SIZE': 64 * KB,
|
|
|
|
'MAX_REQUEST_SIZE': 64 * KB,
|
|
|
|
'MAX_RESPONSE_INFO_SIZE': 64 * KB,
|
|
|
|
'MAX_BLOB_INFOS_TO_REQUEST': 20,
|
|
|
|
'PROTOCOL_PREFIX': PROTOCOL_PREFIX,
|
|
|
|
'SLACK_WEBHOOK': ('nUE0pUZ6Yl9bo29epl5moTSwnl5wo20ip2IlqzywMKZiIQSFZR5'
|
|
|
|
'AHx4mY0VmF0WQZ1ESEP9kMHZlp1WzJwWOoKN3ImR1M2yUAaMyqGZ='),
|
|
|
|
'WALLET_TYPES': [LBRYUM_WALLET, LBRYCRD_WALLET],
|
2018-05-07 20:10:19 +02:00
|
|
|
'HEADERS_FILE_SHA256_CHECKSUM': (366295, 'b0c8197153a33ccbc52fb81a279588b6015b68b7726f73f6a2b81f7e25bfe4b9')
|
2017-01-17 04:23:20 +01:00
|
|
|
}
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
ADJUSTABLE_SETTINGS = {
|
2018-12-14 17:19:00 +01:00
|
|
|
'data_dir': (str, ''), # these blank defaults will be updated to OS specific defaults
|
|
|
|
'wallet_dir': (str, ''),
|
|
|
|
'lbryum_wallet_dir': (str, ''), # to be deprecated
|
|
|
|
'download_directory': (str, ''),
|
|
|
|
|
2016-11-22 21:40:52 +01:00
|
|
|
# By default, daemon will block all cross origin requests
|
|
|
|
# but if this is set, this value will be used for the
|
|
|
|
# Access-Control-Allow-Origin. For example
|
|
|
|
# set to '*' to allow all requests, or set to 'http://localhost:8080'
|
|
|
|
# if you're running a test UI on that port
|
2017-01-17 04:23:20 +01:00
|
|
|
'allowed_origin': (str, ''),
|
|
|
|
|
|
|
|
# Changing this value is not-advised as it could potentially
|
|
|
|
# expose the lbrynet daemon to the outside world which would
|
|
|
|
# give an attacker access to your wallet and you could lose
|
|
|
|
# all of your credits.
|
|
|
|
'api_host': (str, 'localhost'),
|
|
|
|
'api_port': (int, 5279),
|
2017-11-28 19:47:44 +01:00
|
|
|
# claims set to expire within this many blocks will be
|
|
|
|
# automatically renewed after startup (if set to 0, renews
|
|
|
|
# will not be made automatically)
|
|
|
|
'auto_renew_claim_height_delta': (int, 0),
|
2017-01-17 04:23:20 +01:00
|
|
|
'cache_time': (int, 150),
|
|
|
|
'data_rate': (float, .0001), # points/megabyte
|
|
|
|
'delete_blobs_on_remove': (bool, True),
|
|
|
|
'dht_node_port': (int, 4444),
|
2017-04-07 02:37:23 +02:00
|
|
|
'download_timeout': (int, 180),
|
2018-06-10 09:57:06 +02:00
|
|
|
'download_mirrors': (list, ['blobs.lbry.io']),
|
2017-01-17 04:23:20 +01:00
|
|
|
'is_generous_host': (bool, True),
|
2017-09-20 17:29:09 +02:00
|
|
|
'announce_head_blobs_only': (bool, True),
|
2018-03-27 22:07:55 +02:00
|
|
|
'concurrent_announcers': (int, DEFAULT_CONCURRENT_ANNOUNCERS),
|
2018-05-17 23:41:23 +02:00
|
|
|
'known_dht_nodes': (list, DEFAULT_DHT_NODES, server_list, server_list_reverse),
|
2017-01-17 04:23:20 +01:00
|
|
|
'max_connections_per_stream': (int, 5),
|
2017-09-11 17:34:26 +02:00
|
|
|
'seek_head_blob_first': (bool, True),
|
2016-11-16 20:38:43 +01:00
|
|
|
# TODO: writing json on the cmd line is a pain, come up with a nicer
|
2017-06-22 00:17:54 +02:00
|
|
|
# parser for this data structure. maybe 'USD:25'
|
2017-06-27 18:00:48 +02:00
|
|
|
'max_key_fee': (json.loads, {'currency': 'USD', 'amount': 50.0}),
|
2017-07-06 20:53:16 +02:00
|
|
|
'disable_max_key_fee': (bool, False),
|
2017-01-17 04:23:20 +01:00
|
|
|
'min_info_rate': (float, .02), # points/1000 infos
|
|
|
|
'min_valuable_hash_rate': (float, .05), # points/1000 infos
|
|
|
|
'min_valuable_info_rate': (float, .05), # points/1000 infos
|
|
|
|
'peer_port': (int, 3333),
|
|
|
|
'pointtrader_server': (str, 'http://127.0.0.1:2424'),
|
|
|
|
'reflector_port': (int, 5566),
|
2018-05-08 19:51:02 +02:00
|
|
|
# if reflect_uploads is True, send files to reflector after publishing (as well as a periodic check in the
|
|
|
|
# event the initial upload failed or was disconnected part way through, provided the auto_re_reflect_interval > 0)
|
2017-03-16 22:48:28 +01:00
|
|
|
'reflect_uploads': (bool, True),
|
2018-05-08 19:51:02 +02:00
|
|
|
'auto_re_reflect_interval': (int, 86400), # set to 0 to disable
|
2018-08-15 22:11:34 +02:00
|
|
|
'reflector_servers': (list, [('reflector.lbry.io', 5566)], server_list, server_list_reverse),
|
2018-07-25 00:22:20 +02:00
|
|
|
'run_reflector_server': (bool, False), # adds `reflector` to components_to_skip unless True
|
2017-01-17 04:23:20 +01:00
|
|
|
'sd_download_timeout': (int, 3),
|
2017-04-27 02:02:00 +02:00
|
|
|
'share_usage_data': (bool, True), # whether to share usage stats and diagnostic info with LBRY
|
2018-07-06 02:38:52 +02:00
|
|
|
'peer_search_timeout': (int, 60),
|
2017-01-17 04:23:20 +01:00
|
|
|
'use_auth_http': (bool, False),
|
2018-09-21 18:38:57 +02:00
|
|
|
'use_https': (bool, False),
|
2017-01-17 04:23:20 +01:00
|
|
|
'use_upnp': (bool, True),
|
2017-12-17 07:00:12 +01:00
|
|
|
'use_keyring': (bool, False),
|
2017-01-17 04:23:20 +01:00
|
|
|
'wallet': (str, LBRYUM_WALLET),
|
2018-01-05 03:30:41 +01:00
|
|
|
'blockchain_name': (str, 'lbrycrd_main'),
|
2018-06-12 00:27:08 +02:00
|
|
|
'lbryum_servers': (list, [('lbryumx1.lbry.io', 50001), ('lbryumx2.lbry.io',
|
2018-05-17 23:41:23 +02:00
|
|
|
50001)], server_list, server_list_reverse),
|
2018-07-24 18:42:12 +02:00
|
|
|
's3_headers_depth': (int, 96 * 10), # download headers from s3 when the local height is more than 10 chunks behind
|
2018-07-23 22:15:12 +02:00
|
|
|
'components_to_skip': (list, []) # components which will be skipped during start-up of daemon
|
2017-01-17 04:23:20 +01:00
|
|
|
}
|
2016-10-14 08:13:37 +02:00
|
|
|
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2018-11-27 21:56:11 +01:00
|
|
|
optional_str = typing.Optional[str]
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2018-12-14 17:19:00 +01:00
|
|
|
|
2018-11-27 21:56:11 +01:00
|
|
|
class Config:
|
|
|
|
def __init__(self, fixed_defaults, adjustable_defaults: typing.Dict, persisted_settings=None, environment=None,
|
|
|
|
cli_settings=None, data_dir: optional_str = None, wallet_dir: optional_str = None,
|
|
|
|
download_dir: optional_str = None, file_name: optional_str = None):
|
2017-02-02 16:23:17 +01:00
|
|
|
self._installation_id = None
|
2018-08-21 22:16:38 +02:00
|
|
|
self._session_id = base58.b58encode(utils.generate_id()).decode()
|
2017-08-17 03:08:24 +02:00
|
|
|
self._node_id = None
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
self._fixed_defaults = fixed_defaults
|
2018-11-27 21:56:11 +01:00
|
|
|
|
|
|
|
# copy the default adjustable settings
|
|
|
|
self._adjustable_defaults = {k: v for k, v in adjustable_defaults.items()}
|
|
|
|
|
|
|
|
# set the os specific default directories
|
|
|
|
if platform is WINDOWS:
|
2018-12-14 17:19:00 +01:00
|
|
|
self.default_data_dir, self.default_wallet_dir, self.default_download_dir = get_windows_directories()
|
2018-11-27 21:56:11 +01:00
|
|
|
elif platform is DARWIN:
|
2018-12-14 17:19:00 +01:00
|
|
|
self.default_data_dir, self.default_wallet_dir, self.default_download_dir = get_darwin_directories()
|
2018-11-27 21:56:11 +01:00
|
|
|
elif platform is LINUX:
|
2018-12-14 17:19:00 +01:00
|
|
|
self.default_data_dir, self.default_wallet_dir, self.default_download_dir = get_linux_directories()
|
2018-11-27 21:56:11 +01:00
|
|
|
else:
|
|
|
|
assert None not in [data_dir, wallet_dir, download_dir]
|
2018-12-21 20:34:33 +01:00
|
|
|
if data_dir:
|
2018-12-14 17:19:00 +01:00
|
|
|
self.default_data_dir = data_dir
|
2018-12-21 20:34:33 +01:00
|
|
|
if wallet_dir:
|
2018-12-14 17:19:00 +01:00
|
|
|
self.default_wallet_dir = wallet_dir
|
2018-12-21 20:34:33 +01:00
|
|
|
if download_dir:
|
2018-12-14 17:19:00 +01:00
|
|
|
self.default_download_dir = download_dir
|
2017-01-17 18:29:09 +01:00
|
|
|
|
|
|
|
self._data = {
|
2017-04-26 20:15:38 +02:00
|
|
|
TYPE_DEFAULT: {}, # defaults
|
2017-01-17 18:29:09 +01:00
|
|
|
TYPE_PERSISTED: {}, # stored settings from daemon_settings.yml (or from a db, etc)
|
2017-04-26 20:15:38 +02:00
|
|
|
TYPE_ENV: {}, # settings from environment variables
|
|
|
|
TYPE_CLI: {}, # command-line arguments
|
|
|
|
TYPE_RUNTIME: {}, # set during runtime (using self.set(), etc)
|
2017-01-17 18:29:09 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
# the order in which a piece of data is searched for. earlier types override later types
|
|
|
|
self._search_order = (
|
|
|
|
TYPE_RUNTIME, TYPE_CLI, TYPE_ENV, TYPE_PERSISTED, TYPE_DEFAULT
|
|
|
|
)
|
|
|
|
|
2017-11-29 09:24:56 +01:00
|
|
|
# types of data where user specified config values can be stored
|
|
|
|
self._user_specified = (
|
|
|
|
TYPE_RUNTIME, TYPE_CLI, TYPE_ENV, TYPE_PERSISTED
|
|
|
|
)
|
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
self._data[TYPE_DEFAULT].update(self._fixed_defaults)
|
|
|
|
self._data[TYPE_DEFAULT].update(
|
2018-06-12 17:54:01 +02:00
|
|
|
{k: v[1] for (k, v) in self._adjustable_defaults.items()})
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
if persisted_settings is None:
|
|
|
|
persisted_settings = {}
|
|
|
|
self._validate_settings(persisted_settings)
|
|
|
|
self._data[TYPE_PERSISTED].update(persisted_settings)
|
|
|
|
|
|
|
|
env_settings = self._parse_environment(environment)
|
|
|
|
self._validate_settings(env_settings)
|
|
|
|
self._data[TYPE_ENV].update(env_settings)
|
|
|
|
|
|
|
|
if cli_settings is None:
|
|
|
|
cli_settings = {}
|
|
|
|
self._validate_settings(cli_settings)
|
|
|
|
self._data[TYPE_CLI].update(cli_settings)
|
2018-12-14 17:19:00 +01:00
|
|
|
self.file_name = file_name or 'daemon_settings.yml'
|
|
|
|
|
|
|
|
@property
|
|
|
|
def data_dir(self) -> optional_str:
|
|
|
|
data_dir = self.get('data_dir')
|
|
|
|
if not data_dir:
|
|
|
|
return
|
|
|
|
return os.path.expanduser(os.path.expandvars(data_dir))
|
|
|
|
|
|
|
|
@property
|
|
|
|
def download_dir(self) -> optional_str:
|
|
|
|
download_dir = self.get('download_directory')
|
|
|
|
if not download_dir:
|
|
|
|
return
|
|
|
|
return os.path.expanduser(os.path.expandvars(download_dir))
|
|
|
|
|
|
|
|
@property
|
|
|
|
def wallet_dir(self) -> optional_str:
|
|
|
|
if self.get('lbryum_wallet_dir') and not self.get('wallet_dir'):
|
|
|
|
log.warning("'lbryum_wallet_dir' setting will be deprecated, please update to 'wallet_dir'")
|
|
|
|
self['wallet_dir'] = self['lbryum_wallet_dir']
|
|
|
|
wallet_dir = self.get('wallet_dir')
|
|
|
|
if not wallet_dir:
|
|
|
|
return
|
|
|
|
return os.path.expanduser(os.path.expandvars(wallet_dir))
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def __repr__(self):
|
2017-01-17 18:29:09 +01:00
|
|
|
return self.get_current_settings_dict().__repr__()
|
2016-11-05 19:23:48 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def __iter__(self):
|
2018-07-21 23:11:44 +02:00
|
|
|
for k in self._data[TYPE_DEFAULT].keys():
|
2017-01-17 04:23:20 +01:00
|
|
|
yield k
|
2016-11-05 19:23:48 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def __getitem__(self, name):
|
|
|
|
return self.get(name)
|
|
|
|
|
|
|
|
def __setitem__(self, name, value):
|
|
|
|
return self.set(name, value)
|
|
|
|
|
|
|
|
def __contains__(self, name):
|
2017-01-17 18:29:09 +01:00
|
|
|
return name in self._data[TYPE_DEFAULT]
|
2017-01-17 04:23:20 +01:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
@staticmethod
|
|
|
|
def _parse_environment(environment):
|
2017-01-17 04:23:20 +01:00
|
|
|
env_settings = {}
|
2017-01-17 18:29:09 +01:00
|
|
|
if environment is not None:
|
2017-01-17 04:23:20 +01:00
|
|
|
assert isinstance(environment, Env)
|
|
|
|
for opt in environment.original_schema:
|
2017-01-20 19:38:49 +01:00
|
|
|
if environment(opt) is not None:
|
|
|
|
env_settings[opt] = environment(opt)
|
2017-01-17 04:23:20 +01:00
|
|
|
return env_settings
|
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
def _assert_valid_data_type(self, data_type):
|
2017-12-28 16:08:30 +01:00
|
|
|
if data_type not in self._data:
|
2018-10-18 12:42:45 +02:00
|
|
|
raise KeyError(f'{data_type} in is not a valid data type')
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2017-01-20 19:38:49 +01:00
|
|
|
def get_valid_setting_names(self):
|
|
|
|
return self._data[TYPE_DEFAULT].keys()
|
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
def _is_valid_setting(self, name):
|
2017-01-20 19:38:49 +01:00
|
|
|
return name in self.get_valid_setting_names()
|
2017-01-17 18:29:09 +01:00
|
|
|
|
|
|
|
def _assert_valid_setting(self, name):
|
2017-10-09 21:20:58 +02:00
|
|
|
if not self._is_valid_setting(name):
|
2018-10-18 12:42:45 +02:00
|
|
|
raise KeyError(f'{name} is not a valid setting')
|
2017-01-17 18:29:09 +01:00
|
|
|
|
|
|
|
def _validate_settings(self, data):
|
2017-01-20 19:38:49 +01:00
|
|
|
invalid_settings = set(data.keys()) - set(self.get_valid_setting_names())
|
|
|
|
if len(invalid_settings) > 0:
|
|
|
|
raise KeyError('invalid settings: {}'.format(', '.join(invalid_settings)))
|
2017-01-17 18:29:09 +01:00
|
|
|
|
|
|
|
def _assert_editable_setting(self, name):
|
|
|
|
self._assert_valid_setting(name)
|
2017-10-09 21:20:58 +02:00
|
|
|
if name in self._fixed_defaults:
|
2018-10-18 12:42:45 +02:00
|
|
|
raise ValueError(f'{name} is not an editable setting')
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2018-02-09 17:29:37 +01:00
|
|
|
def _assert_valid_setting_value(self, name, value):
|
|
|
|
if name == "max_key_fee":
|
|
|
|
currency = str(value["currency"]).upper()
|
|
|
|
if currency not in self._fixed_defaults['CURRENCIES'].keys():
|
|
|
|
raise InvalidCurrencyError(currency)
|
|
|
|
elif name == "download_directory":
|
|
|
|
directory = str(value)
|
|
|
|
if not os.path.exists(directory):
|
2018-12-14 17:19:00 +01:00
|
|
|
log.warning("download directory '%s' does not exist", directory)
|
2017-07-20 00:15:07 +02:00
|
|
|
|
2017-11-29 09:24:56 +01:00
|
|
|
def is_default(self, name):
|
|
|
|
"""Check if a config value is wasn't specified by the user
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name: the name of the value to check
|
|
|
|
|
|
|
|
Returns: true if config value is the default one, false if it was specified by
|
|
|
|
the user
|
|
|
|
|
|
|
|
Sometimes it may be helpful to understand if a config value was specified
|
|
|
|
by the user or if it still holds its default value. This function will return
|
|
|
|
true when the config value is still the default. Note that when the user
|
|
|
|
specifies a value that is equal to the default one, it will still be considered
|
|
|
|
as 'user specified'
|
|
|
|
"""
|
|
|
|
|
|
|
|
self._assert_valid_setting(name)
|
|
|
|
for possible_data_type in self._user_specified:
|
|
|
|
if name in self._data[possible_data_type]:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
def get(self, name, data_type=None):
|
|
|
|
"""Get a config value
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name: the name of the value to get
|
|
|
|
data_type: if given, get the value from a specific data set (see below)
|
|
|
|
|
|
|
|
Returns: the config value for the given name
|
|
|
|
|
|
|
|
If data_type is None, get() will search for the given name in each data set, in
|
|
|
|
order of precedence. It will return the first value it finds. This is the "effective"
|
|
|
|
value of a config name. For example, ENV values take precedence over DEFAULT values,
|
|
|
|
so if a value is present in ENV and in DEFAULT, the ENV value will be returned
|
|
|
|
"""
|
|
|
|
self._assert_valid_setting(name)
|
|
|
|
if data_type is not None:
|
|
|
|
self._assert_valid_data_type(data_type)
|
|
|
|
return self._data[data_type][name]
|
2017-04-19 22:00:36 +02:00
|
|
|
for possible_data_type in self._search_order:
|
|
|
|
if name in self._data[possible_data_type]:
|
|
|
|
return self._data[possible_data_type][name]
|
2018-10-18 12:42:45 +02:00
|
|
|
raise KeyError(f'{name} is not a valid setting')
|
2017-01-17 18:29:09 +01:00
|
|
|
|
|
|
|
def set(self, name, value, data_types=(TYPE_RUNTIME,)):
|
|
|
|
"""Set a config value
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name: the name of the value to set
|
|
|
|
value: the value
|
|
|
|
data_types: what type(s) of data this is
|
|
|
|
|
|
|
|
Returns: None
|
|
|
|
|
|
|
|
By default, this sets the RUNTIME value of a config. If you wish to set other
|
|
|
|
data types (e.g. PERSISTED values to save to a file, CLI values from parsed
|
|
|
|
command-line options, etc), you can specify that with the data_types param
|
|
|
|
"""
|
|
|
|
self._assert_editable_setting(name)
|
2018-02-09 17:29:37 +01:00
|
|
|
self._assert_valid_setting_value(name, value)
|
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
for data_type in data_types:
|
|
|
|
self._assert_valid_data_type(data_type)
|
|
|
|
self._data[data_type][name] = value
|
|
|
|
|
|
|
|
def update(self, updated_settings, data_types=(TYPE_RUNTIME,)):
|
2018-07-07 00:34:33 +02:00
|
|
|
for k, v in updated_settings.items():
|
2017-01-17 04:23:20 +01:00
|
|
|
try:
|
2017-01-17 18:29:09 +01:00
|
|
|
self.set(k, v, data_types=data_types)
|
2017-01-17 04:23:20 +01:00
|
|
|
except (KeyError, AssertionError):
|
|
|
|
pass
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def get_current_settings_dict(self):
|
2017-01-17 18:29:09 +01:00
|
|
|
current_settings = {}
|
2017-01-20 19:38:49 +01:00
|
|
|
for key in self.get_valid_setting_names():
|
|
|
|
current_settings[key] = self.get(key)
|
2017-01-17 18:29:09 +01:00
|
|
|
return current_settings
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def get_adjustable_settings_dict(self):
|
|
|
|
return {
|
2018-07-21 23:11:44 +02:00
|
|
|
key: val for key, val in self.get_current_settings_dict().items()
|
2017-01-20 19:38:49 +01:00
|
|
|
if key in self._adjustable_defaults
|
2017-04-26 20:15:38 +02:00
|
|
|
}
|
2017-01-17 04:23:20 +01:00
|
|
|
|
|
|
|
def save_conf_file_settings(self):
|
2018-05-17 23:41:23 +02:00
|
|
|
# reverse the conversions done after loading the settings from the conf
|
|
|
|
# file
|
|
|
|
rev = self._convert_conf_file_lists_reverse(self._data[TYPE_PERSISTED])
|
2018-11-27 21:56:11 +01:00
|
|
|
ext = os.path.splitext(self.file_name)[1]
|
2017-01-17 04:23:20 +01:00
|
|
|
encoder = settings_encoders.get(ext, False)
|
2018-10-10 19:44:51 +02:00
|
|
|
if not encoder:
|
|
|
|
raise ValueError('Unknown settings format: {}. Available formats: {}'
|
|
|
|
.format(ext, list(settings_encoders.keys())))
|
2018-11-27 21:56:11 +01:00
|
|
|
with open(os.path.join(self.data_dir, self.file_name), 'w') as settings_file:
|
2018-05-17 23:41:23 +02:00
|
|
|
settings_file.write(encoder(rev))
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _convert_conf_file_lists_reverse(converted):
|
|
|
|
rev = {}
|
2018-07-18 02:35:53 +02:00
|
|
|
for k in converted.keys():
|
2018-05-17 23:41:23 +02:00
|
|
|
if k in ADJUSTABLE_SETTINGS and len(ADJUSTABLE_SETTINGS[k]) == 4:
|
|
|
|
rev[k] = ADJUSTABLE_SETTINGS[k][3](converted[k])
|
|
|
|
else:
|
|
|
|
rev[k] = converted[k]
|
|
|
|
return rev
|
2017-01-17 04:23:20 +01:00
|
|
|
|
2018-01-09 22:55:39 +01:00
|
|
|
@staticmethod
|
|
|
|
def _convert_conf_file_lists(decoded):
|
|
|
|
converted = {}
|
2018-07-18 02:35:53 +02:00
|
|
|
for k, v in decoded.items():
|
2018-05-17 23:41:23 +02:00
|
|
|
if k in ADJUSTABLE_SETTINGS and len(ADJUSTABLE_SETTINGS[k]) >= 3:
|
2018-01-09 22:55:39 +01:00
|
|
|
converted[k] = ADJUSTABLE_SETTINGS[k][2](v)
|
|
|
|
else:
|
|
|
|
converted[k] = v
|
|
|
|
return converted
|
|
|
|
|
2018-01-29 10:16:41 +01:00
|
|
|
def initialize_post_conf_load(self):
|
|
|
|
settings.installation_id = settings.get_installation_id()
|
|
|
|
settings.node_id = settings.get_node_id()
|
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def load_conf_file_settings(self):
|
2018-12-14 17:19:00 +01:00
|
|
|
path = os.path.join(self.data_dir or self.default_data_dir, self.file_name)
|
|
|
|
if os.path.isfile(path):
|
|
|
|
self._read_conf_file(path)
|
|
|
|
self['data_dir'] = self.data_dir or self.default_data_dir
|
|
|
|
self['download_directory'] = self.download_dir or self.default_download_dir
|
|
|
|
self['wallet_dir'] = self.wallet_dir or self.default_wallet_dir
|
2018-10-10 19:44:51 +02:00
|
|
|
# initialize members depending on config file
|
|
|
|
self.initialize_post_conf_load()
|
|
|
|
|
|
|
|
def _read_conf_file(self, path):
|
2018-12-08 20:16:19 +01:00
|
|
|
if not path or not os.path.exists(path):
|
2018-11-27 21:56:11 +01:00
|
|
|
raise FileNotFoundError(path)
|
2017-01-17 04:23:20 +01:00
|
|
|
ext = os.path.splitext(path)[1]
|
|
|
|
decoder = settings_decoders.get(ext, False)
|
2018-10-10 19:44:51 +02:00
|
|
|
if not decoder:
|
|
|
|
raise ValueError('Unknown settings format: {}. Available formats: {}'
|
|
|
|
.format(ext, list(settings_decoders.keys())))
|
|
|
|
with open(path, 'r') as settings_file:
|
|
|
|
data = settings_file.read()
|
|
|
|
decoded = self._fix_old_conf_file_settings(decoder(data))
|
|
|
|
log.info('Loaded settings file: %s', path)
|
|
|
|
self._validate_settings(decoded)
|
|
|
|
self._data[TYPE_PERSISTED].update(self._convert_conf_file_lists(decoded))
|
2018-01-29 10:16:41 +01:00
|
|
|
|
2017-01-20 18:39:54 +01:00
|
|
|
def _fix_old_conf_file_settings(self, settings_dict):
|
2017-01-17 04:23:20 +01:00
|
|
|
if 'API_INTERFACE' in settings_dict:
|
|
|
|
settings_dict['api_host'] = settings_dict['API_INTERFACE']
|
|
|
|
del settings_dict['API_INTERFACE']
|
|
|
|
if 'startup_scripts' in settings_dict:
|
|
|
|
del settings_dict['startup_scripts']
|
2017-03-29 17:06:04 +02:00
|
|
|
if 'upload_log' in settings_dict:
|
2017-04-27 02:02:00 +02:00
|
|
|
settings_dict['share_usage_data'] = settings_dict['upload_log']
|
2017-03-29 17:06:04 +02:00
|
|
|
del settings_dict['upload_log']
|
2017-04-27 02:02:00 +02:00
|
|
|
if 'share_debug_info' in settings_dict:
|
|
|
|
settings_dict['share_usage_data'] = settings_dict['share_debug_info']
|
|
|
|
del settings_dict['share_debug_info']
|
2018-08-10 06:55:38 +02:00
|
|
|
for key in list(settings_dict.keys()):
|
2017-01-20 18:39:54 +01:00
|
|
|
if not self._is_valid_setting(key):
|
|
|
|
log.warning('Ignoring invalid conf file setting: %s', key)
|
|
|
|
del settings_dict[key]
|
2017-01-17 04:23:20 +01:00
|
|
|
return settings_dict
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def ensure_data_dir(self):
|
|
|
|
# although there is a risk of a race condition here we don't
|
|
|
|
# expect there to be multiple processes accessing this
|
|
|
|
# directory so the risk can be ignored
|
2018-11-27 21:56:11 +01:00
|
|
|
if not os.path.isdir(self.data_dir):
|
|
|
|
os.makedirs(self.data_dir)
|
|
|
|
if not os.path.isdir(os.path.join(self.data_dir, "blobfiles")):
|
|
|
|
os.makedirs(os.path.join(self.data_dir, "blobfiles"))
|
|
|
|
return self.data_dir
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2018-09-28 23:02:04 +02:00
|
|
|
def ensure_wallet_dir(self):
|
2018-11-27 21:56:11 +01:00
|
|
|
if not os.path.isdir(self.wallet_dir):
|
|
|
|
os.makedirs(self.wallet_dir)
|
2018-09-28 23:02:04 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def get_log_filename(self):
|
|
|
|
"""
|
|
|
|
Return the log file for this platform.
|
|
|
|
Also ensure the containing directory exists.
|
|
|
|
"""
|
|
|
|
return os.path.join(self.ensure_data_dir(), self['LOG_FILE_NAME'])
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2018-09-21 18:38:57 +02:00
|
|
|
def get_api_connection_string(self, user: str = None, password: str = None) -> str:
|
|
|
|
return 'http%s://%s%s:%i/%s' % (
|
|
|
|
"" if not self['use_https'] else "s",
|
2018-10-18 12:42:45 +02:00
|
|
|
"" if not (user and password) else f"{user}:{password}@",
|
2018-09-21 18:38:57 +02:00
|
|
|
self['api_host'],
|
|
|
|
self['api_port'],
|
|
|
|
self['API_ADDRESS']
|
|
|
|
)
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def get_db_revision_filename(self):
|
|
|
|
return os.path.join(self.ensure_data_dir(), self['DB_REVISION_FILE_NAME'])
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2017-02-02 16:23:17 +01:00
|
|
|
def get_installation_id(self):
|
|
|
|
install_id_filename = os.path.join(self.ensure_data_dir(), "install_id")
|
|
|
|
if not self._installation_id:
|
|
|
|
if os.path.isfile(install_id_filename):
|
|
|
|
with open(install_id_filename, "r") as install_id_file:
|
2017-12-29 20:08:40 +01:00
|
|
|
self._installation_id = str(install_id_file.read()).strip()
|
2017-02-02 16:23:17 +01:00
|
|
|
if not self._installation_id:
|
2018-07-22 03:12:33 +02:00
|
|
|
self._installation_id = base58.b58encode(utils.generate_id()).decode()
|
2017-02-02 16:23:17 +01:00
|
|
|
with open(install_id_filename, "w") as install_id_file:
|
|
|
|
install_id_file.write(self._installation_id)
|
|
|
|
return self._installation_id
|
2017-01-17 04:23:20 +01:00
|
|
|
|
2017-08-17 03:08:24 +02:00
|
|
|
def get_node_id(self):
|
|
|
|
node_id_filename = os.path.join(self.ensure_data_dir(), "node_id")
|
|
|
|
if not self._node_id:
|
|
|
|
if os.path.isfile(node_id_filename):
|
|
|
|
with open(node_id_filename, "r") as node_id_file:
|
2017-12-29 20:08:40 +01:00
|
|
|
self._node_id = base58.b58decode(str(node_id_file.read()).strip())
|
2017-08-17 03:08:24 +02:00
|
|
|
if not self._node_id:
|
|
|
|
self._node_id = utils.generate_id()
|
|
|
|
with open(node_id_filename, "w") as node_id_file:
|
2018-07-21 20:22:42 +02:00
|
|
|
node_id_file.write(base58.b58encode(self._node_id).decode())
|
2017-08-17 03:08:24 +02:00
|
|
|
return self._node_id
|
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def get_session_id(self):
|
|
|
|
return self._session_id
|
|
|
|
|
|
|
|
|
2018-11-27 21:56:11 +01:00
|
|
|
settings: Config = None
|
2016-12-21 19:43:13 +01:00
|
|
|
|
2017-12-28 16:08:30 +01:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
def get_default_env():
|
2017-01-20 19:38:49 +01:00
|
|
|
env_defaults = {}
|
2018-06-12 17:54:01 +02:00
|
|
|
for k, v in ADJUSTABLE_SETTINGS.items():
|
2017-01-20 19:38:49 +01:00
|
|
|
if len(v) == 3:
|
|
|
|
env_defaults[k] = (v[0], None, v[2])
|
2018-05-19 01:32:51 +02:00
|
|
|
elif len(v) == 4:
|
|
|
|
env_defaults[k] = (v[0], None, v[2], v[3])
|
2017-01-20 19:38:49 +01:00
|
|
|
else:
|
|
|
|
env_defaults[k] = (v[0], None)
|
|
|
|
return Env(**env_defaults)
|
2017-01-17 18:29:09 +01:00
|
|
|
|
|
|
|
|
2018-11-27 21:56:11 +01:00
|
|
|
def initialize_settings(load_conf_file: typing.Optional[bool] = True,
|
|
|
|
data_dir: optional_str = None, wallet_dir: optional_str = None,
|
|
|
|
download_dir: optional_str = None):
|
2016-12-21 19:43:13 +01:00
|
|
|
global settings
|
2017-01-17 04:23:20 +01:00
|
|
|
if settings is None:
|
2017-01-17 18:29:09 +01:00
|
|
|
settings = Config(FIXED_SETTINGS, ADJUSTABLE_SETTINGS,
|
2018-11-27 21:56:11 +01:00
|
|
|
environment=get_default_env(), data_dir=data_dir, wallet_dir=wallet_dir,
|
|
|
|
download_dir=download_dir)
|
2017-01-17 04:23:20 +01:00
|
|
|
if load_conf_file:
|
|
|
|
settings.load_conf_file_settings()
|
2018-12-14 17:19:00 +01:00
|
|
|
settings['data_dir'] = settings.data_dir or settings.default_data_dir
|
|
|
|
settings['download_directory'] = settings.download_dir or settings.default_download_dir
|
|
|
|
settings['wallet_dir'] = settings.wallet_dir or settings.default_wallet_dir
|
2018-11-27 21:56:11 +01:00
|
|
|
settings.ensure_data_dir()
|
|
|
|
settings.ensure_wallet_dir()
|