2016-12-21 19:43:13 +01:00
|
|
|
import base58
|
2016-10-31 22:19:19 +01:00
|
|
|
import json
|
2016-10-28 22:12:51 +02:00
|
|
|
import logging
|
2016-08-22 00:44:16 +02:00
|
|
|
import os
|
2017-07-03 22:01:19 +02:00
|
|
|
import re
|
2016-09-21 09:49:52 +02:00
|
|
|
import sys
|
2016-10-31 22:19:19 +01:00
|
|
|
import yaml
|
2016-11-16 20:38:43 +01:00
|
|
|
import envparse
|
2017-07-03 22:01:19 +02:00
|
|
|
from appdirs import user_data_dir, user_config_dir
|
2016-12-21 19:43:13 +01:00
|
|
|
from lbrynet.core import utils
|
2018-02-09 17:29:37 +01:00
|
|
|
from lbrynet.core.Error import InvalidCurrencyError, NoSuchDirectoryError
|
2017-08-18 16:09:08 +02:00
|
|
|
from lbrynet.androidhelpers.paths import (
|
|
|
|
android_internal_storage_dir,
|
|
|
|
android_app_internal_storage_dir
|
|
|
|
)
|
2016-12-21 19:43:13 +01:00
|
|
|
|
2017-07-03 22:01:19 +02:00
|
|
|
try:
|
|
|
|
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
|
2017-08-18 12:52:03 +02:00
|
|
|
except (ImportError, ValueError, NameError):
|
|
|
|
# Android platform: NameError: name 'c_wchar' is not defined
|
2017-07-03 22:01:19 +02:00
|
|
|
pass
|
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2017-04-26 20:18:41 +02:00
|
|
|
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
ENV_NAMESPACE = 'LBRY_'
|
2016-12-21 19:43:13 +01:00
|
|
|
|
2016-11-10 20:26:21 +01:00
|
|
|
LBRYCRD_WALLET = 'lbrycrd'
|
|
|
|
LBRYUM_WALLET = 'lbryum'
|
|
|
|
PTC_WALLET = 'ptc'
|
2018-04-30 09:04:52 +02:00
|
|
|
TORBA_WALLET = 'torba'
|
2016-10-28 22:12:51 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
PROTOCOL_PREFIX = 'lbry'
|
|
|
|
APP_NAME = 'LBRY'
|
2016-10-28 22:12:51 +02:00
|
|
|
|
2016-09-21 09:49:52 +02:00
|
|
|
LINUX = 1
|
|
|
|
DARWIN = 2
|
|
|
|
WINDOWS = 3
|
2017-08-18 12:52:03 +02:00
|
|
|
ANDROID = 4
|
2017-01-17 04:23:20 +01:00
|
|
|
KB = 2 ** 10
|
|
|
|
MB = 2 ** 20
|
|
|
|
|
2018-06-06 23:18:13 +02:00
|
|
|
DEFAULT_CONCURRENT_ANNOUNCERS = 10
|
2018-03-27 22:07:55 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
DEFAULT_DHT_NODES = [
|
|
|
|
('lbrynet1.lbry.io', 4444),
|
|
|
|
('lbrynet2.lbry.io', 4444),
|
|
|
|
('lbrynet3.lbry.io', 4444)
|
|
|
|
]
|
2016-09-21 09:49:52 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
settings_decoders = {
|
|
|
|
'.json': json.loads,
|
|
|
|
'.yml': yaml.load
|
|
|
|
}
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
settings_encoders = {
|
|
|
|
'.json': json.dumps,
|
|
|
|
'.yml': yaml.safe_dump
|
|
|
|
}
|
|
|
|
|
2017-12-29 12:55:30 +01:00
|
|
|
# set by CLI when the user specifies an alternate config file path
|
|
|
|
conf_file = None
|
2017-07-03 22:01:19 +02:00
|
|
|
|
2018-01-09 22:55:39 +01:00
|
|
|
|
2017-06-30 20:30:54 +02:00
|
|
|
def _win_path_to_bytes(path):
|
|
|
|
"""
|
|
|
|
Encode Windows paths to string. appdirs.user_data_dir()
|
|
|
|
on windows will return unicode path, unlike other platforms
|
|
|
|
which returns string. This will cause problems
|
|
|
|
because we use strings for filenames and combining them with
|
|
|
|
os.path.join() will result in errors.
|
|
|
|
"""
|
|
|
|
for encoding in ('ASCII', 'MBCS'):
|
|
|
|
try:
|
|
|
|
return path.encode(encoding)
|
|
|
|
except (UnicodeEncodeError, LookupError):
|
|
|
|
pass
|
|
|
|
return path
|
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
|
2017-12-28 16:08:30 +01:00
|
|
|
def _get_old_directories(platform_type):
|
|
|
|
directories = {}
|
|
|
|
if platform_type == WINDOWS:
|
2017-07-03 22:01:19 +02:00
|
|
|
appdata = get_path(FOLDERID.RoamingAppData, UserHandle.current)
|
2017-12-28 16:08:30 +01:00
|
|
|
directories['data'] = os.path.join(appdata, 'lbrynet')
|
|
|
|
directories['lbryum'] = os.path.join(appdata, 'lbryum')
|
|
|
|
directories['download'] = get_path(FOLDERID.Downloads, UserHandle.current)
|
|
|
|
elif platform_type == DARWIN:
|
|
|
|
directories['data'] = user_data_dir('LBRY')
|
|
|
|
directories['lbryum'] = os.path.expanduser('~/.lbryum')
|
|
|
|
directories['download'] = os.path.expanduser('~/Downloads')
|
|
|
|
elif platform_type == LINUX:
|
|
|
|
directories['data'] = os.path.expanduser('~/.lbrynet')
|
|
|
|
directories['lbryum'] = os.path.expanduser('~/.lbryum')
|
|
|
|
directories['download'] = os.path.expanduser('~/Downloads')
|
2017-07-03 22:01:19 +02:00
|
|
|
else:
|
|
|
|
raise ValueError('unknown platform value')
|
2017-12-28 16:08:30 +01:00
|
|
|
return directories
|
|
|
|
|
|
|
|
|
|
|
|
def _get_new_directories(platform_type):
|
|
|
|
directories = {}
|
|
|
|
if platform_type == ANDROID:
|
|
|
|
directories['data'] = '%s/lbrynet' % android_app_internal_storage_dir()
|
|
|
|
directories['lbryum'] = '%s/lbryum' % android_app_internal_storage_dir()
|
|
|
|
directories['download'] = '%s/Download' % android_internal_storage_dir()
|
|
|
|
elif platform_type == WINDOWS:
|
|
|
|
directories['data'] = user_data_dir('lbrynet', 'lbry')
|
|
|
|
directories['lbryum'] = user_data_dir('lbryum', 'lbry')
|
|
|
|
directories['download'] = get_path(FOLDERID.Downloads, UserHandle.current)
|
|
|
|
elif platform_type == DARWIN:
|
|
|
|
directories = _get_old_directories(platform_type)
|
|
|
|
elif platform_type == LINUX:
|
|
|
|
directories['data'] = user_data_dir('lbry/lbrynet')
|
|
|
|
directories['lbryum'] = user_data_dir('lbry/lbryum')
|
2017-07-03 22:01:19 +02:00
|
|
|
try:
|
|
|
|
with open(os.path.join(user_config_dir(), 'user-dirs.dirs'), 'r') as xdg:
|
|
|
|
down_dir = re.search(r'XDG_DOWNLOAD_DIR=(.+)', xdg.read()).group(1)
|
|
|
|
down_dir = re.sub('\$HOME', os.getenv('HOME'), down_dir)
|
2017-12-28 16:08:30 +01:00
|
|
|
directories['download'] = re.sub('\"', '', down_dir)
|
2017-07-03 22:01:19 +02:00
|
|
|
except EnvironmentError:
|
2017-12-28 16:08:30 +01:00
|
|
|
directories['download'] = os.getenv('XDG_DOWNLOAD_DIR')
|
2017-07-03 22:01:19 +02:00
|
|
|
|
2017-12-28 16:08:30 +01:00
|
|
|
if not directories['download']:
|
|
|
|
directories['download'] = os.path.expanduser('~/Downloads')
|
2017-07-03 22:01:19 +02:00
|
|
|
else:
|
|
|
|
raise ValueError('unknown platform value')
|
2017-12-28 16:08:30 +01:00
|
|
|
return directories
|
2017-07-03 22:01:19 +02:00
|
|
|
|
2017-06-30 20:30:54 +02:00
|
|
|
|
2017-08-18 12:52:03 +02:00
|
|
|
if 'ANDROID_ARGUMENT' in os.environ:
|
|
|
|
# https://github.com/kivy/kivy/blob/master/kivy/utils.py#L417-L421
|
|
|
|
platform = ANDROID
|
|
|
|
dirs = _get_new_directories(ANDROID)
|
|
|
|
elif 'darwin' in sys.platform:
|
2017-07-03 22:01:19 +02:00
|
|
|
platform = DARWIN
|
|
|
|
dirs = _get_old_directories(DARWIN)
|
|
|
|
elif 'win' in sys.platform:
|
|
|
|
platform = WINDOWS
|
|
|
|
if os.path.isdir(_get_old_directories(WINDOWS)['data']) or \
|
|
|
|
os.path.isdir(_get_old_directories(WINDOWS)['lbryum']):
|
|
|
|
dirs = _get_old_directories(WINDOWS)
|
|
|
|
else:
|
|
|
|
dirs = _get_new_directories(WINDOWS)
|
|
|
|
dirs['data'] = _win_path_to_bytes(dirs['data'])
|
|
|
|
dirs['lbryum'] = _win_path_to_bytes(dirs['lbryum'])
|
|
|
|
dirs['download'] = _win_path_to_bytes(dirs['download'])
|
2016-10-19 06:39:19 +02:00
|
|
|
else:
|
|
|
|
platform = LINUX
|
2017-07-03 22:01:19 +02:00
|
|
|
if os.path.isdir(_get_old_directories(LINUX)['data']) or \
|
|
|
|
os.path.isdir(_get_old_directories(LINUX)['lbryum']):
|
|
|
|
dirs = _get_old_directories(LINUX)
|
|
|
|
else:
|
|
|
|
dirs = _get_new_directories(LINUX)
|
|
|
|
|
|
|
|
default_data_dir = dirs['data']
|
|
|
|
default_lbryum_dir = dirs['lbryum']
|
|
|
|
default_download_dir = dirs['download']
|
2016-10-19 06:12:44 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
ICON_PATH = 'icons' if platform is WINDOWS else 'app.icns'
|
2016-12-21 20:55:43 +01:00
|
|
|
|
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def server_port(server_and_port):
|
|
|
|
server, port = server_and_port.split(':')
|
|
|
|
return server, int(port)
|
2016-10-26 09:16:33 +02:00
|
|
|
|
|
|
|
|
2018-01-09 22:55:39 +01:00
|
|
|
def server_list(servers):
|
|
|
|
return [server_port(server) for server in servers]
|
|
|
|
|
2018-07-24 18:42:12 +02:00
|
|
|
|
2018-05-17 23:41:23 +02:00
|
|
|
def server_list_reverse(servers):
|
|
|
|
return ["%s:%s" % (server, port) for server, port in servers]
|
2018-01-09 22:55:39 +01:00
|
|
|
|
2018-07-24 18:42:12 +02:00
|
|
|
|
2016-11-16 20:38:43 +01:00
|
|
|
class Env(envparse.Env):
|
|
|
|
"""An Env parser that automatically namespaces the variables with LBRY"""
|
2017-01-17 04:23:20 +01:00
|
|
|
|
2016-11-16 20:38:43 +01:00
|
|
|
def __init__(self, **schema):
|
|
|
|
self.original_schema = schema
|
|
|
|
my_schema = {
|
|
|
|
self._convert_key(key): self._convert_value(value)
|
|
|
|
for key, value in schema.items()
|
2017-04-26 20:15:38 +02:00
|
|
|
}
|
2016-11-16 20:38:43 +01:00
|
|
|
envparse.Env.__init__(self, **my_schema)
|
|
|
|
|
|
|
|
def __call__(self, key, *args, **kwargs):
|
|
|
|
my_key = self._convert_key(key)
|
|
|
|
return super(Env, self).__call__(my_key, *args, **kwargs)
|
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
@staticmethod
|
|
|
|
def _convert_key(key):
|
|
|
|
return ENV_NAMESPACE + key.upper()
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
@staticmethod
|
|
|
|
def _convert_value(value):
|
2017-01-17 18:29:09 +01:00
|
|
|
""" Allow value to be specified as a tuple or list.
|
|
|
|
|
|
|
|
If you do this, the tuple/list must be of the
|
|
|
|
form (cast, default) or (cast, default, subcast)
|
2016-11-16 20:38:43 +01:00
|
|
|
"""
|
2018-01-09 22:55:39 +01:00
|
|
|
|
2016-11-16 20:38:43 +01:00
|
|
|
if isinstance(value, (tuple, list)):
|
|
|
|
new_value = {'cast': value[0], 'default': value[1]}
|
|
|
|
if len(value) == 3:
|
|
|
|
new_value['subcast'] = value[2]
|
|
|
|
return new_value
|
|
|
|
return value
|
|
|
|
|
2017-04-26 20:15:38 +02:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
TYPE_DEFAULT = 'default'
|
|
|
|
TYPE_PERSISTED = 'persisted'
|
|
|
|
TYPE_ENV = 'env'
|
|
|
|
TYPE_CLI = 'cli'
|
|
|
|
TYPE_RUNTIME = 'runtime'
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
FIXED_SETTINGS = {
|
|
|
|
'ANALYTICS_ENDPOINT': 'https://api.segment.io/v1',
|
|
|
|
'ANALYTICS_TOKEN': 'Ax5LZzR1o3q3Z3WjATASDwR5rKyHH0qOIRIbLmMXn2H=',
|
|
|
|
'API_ADDRESS': 'lbryapi',
|
|
|
|
'APP_NAME': APP_NAME,
|
|
|
|
'BLOBFILES_DIR': 'blobfiles',
|
|
|
|
'CRYPTSD_FILE_EXTENSION': '.cryptsd',
|
|
|
|
'CURRENCIES': {
|
|
|
|
'BTC': {'type': 'crypto'},
|
|
|
|
'LBC': {'type': 'crypto'},
|
|
|
|
'USD': {'type': 'fiat'},
|
|
|
|
},
|
|
|
|
'DB_REVISION_FILE_NAME': 'db_revision',
|
|
|
|
'ICON_PATH': ICON_PATH,
|
2017-07-13 20:50:27 +02:00
|
|
|
'LOGGLY_TOKEN': 'BQEzZmMzLJHgAGxkBF00LGD0YGuyATVgAmqxAQEuAQZ2BQH4',
|
2017-01-17 04:23:20 +01:00
|
|
|
'LOG_FILE_NAME': 'lbrynet.log',
|
|
|
|
'LOG_POST_URL': 'https://lbry.io/log-upload',
|
|
|
|
'MAX_BLOB_REQUEST_SIZE': 64 * KB,
|
|
|
|
'MAX_HANDSHAKE_SIZE': 64 * KB,
|
|
|
|
'MAX_REQUEST_SIZE': 64 * KB,
|
|
|
|
'MAX_RESPONSE_INFO_SIZE': 64 * KB,
|
|
|
|
'MAX_BLOB_INFOS_TO_REQUEST': 20,
|
|
|
|
'PROTOCOL_PREFIX': PROTOCOL_PREFIX,
|
|
|
|
'SLACK_WEBHOOK': ('nUE0pUZ6Yl9bo29epl5moTSwnl5wo20ip2IlqzywMKZiIQSFZR5'
|
|
|
|
'AHx4mY0VmF0WQZ1ESEP9kMHZlp1WzJwWOoKN3ImR1M2yUAaMyqGZ='),
|
|
|
|
'WALLET_TYPES': [LBRYUM_WALLET, LBRYCRD_WALLET],
|
2018-05-07 20:10:19 +02:00
|
|
|
'HEADERS_FILE_SHA256_CHECKSUM': (366295, 'b0c8197153a33ccbc52fb81a279588b6015b68b7726f73f6a2b81f7e25bfe4b9')
|
2017-01-17 04:23:20 +01:00
|
|
|
}
|
2016-11-16 20:38:43 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
ADJUSTABLE_SETTINGS = {
|
2016-11-22 21:40:52 +01:00
|
|
|
# By default, daemon will block all cross origin requests
|
|
|
|
# but if this is set, this value will be used for the
|
|
|
|
# Access-Control-Allow-Origin. For example
|
|
|
|
# set to '*' to allow all requests, or set to 'http://localhost:8080'
|
|
|
|
# if you're running a test UI on that port
|
2017-01-17 04:23:20 +01:00
|
|
|
'allowed_origin': (str, ''),
|
|
|
|
|
|
|
|
# Changing this value is not-advised as it could potentially
|
|
|
|
# expose the lbrynet daemon to the outside world which would
|
|
|
|
# give an attacker access to your wallet and you could lose
|
|
|
|
# all of your credits.
|
|
|
|
'api_host': (str, 'localhost'),
|
|
|
|
'api_port': (int, 5279),
|
2017-11-28 19:47:44 +01:00
|
|
|
# claims set to expire within this many blocks will be
|
|
|
|
# automatically renewed after startup (if set to 0, renews
|
|
|
|
# will not be made automatically)
|
|
|
|
'auto_renew_claim_height_delta': (int, 0),
|
2017-01-17 04:23:20 +01:00
|
|
|
'cache_time': (int, 150),
|
|
|
|
'data_dir': (str, default_data_dir),
|
|
|
|
'data_rate': (float, .0001), # points/megabyte
|
|
|
|
'delete_blobs_on_remove': (bool, True),
|
|
|
|
'dht_node_port': (int, 4444),
|
2017-07-03 22:01:19 +02:00
|
|
|
'download_directory': (str, default_download_dir),
|
2017-04-07 02:37:23 +02:00
|
|
|
'download_timeout': (int, 180),
|
2018-06-10 09:57:06 +02:00
|
|
|
'download_mirrors': (list, ['blobs.lbry.io']),
|
2017-01-17 04:23:20 +01:00
|
|
|
'is_generous_host': (bool, True),
|
2017-09-20 17:29:09 +02:00
|
|
|
'announce_head_blobs_only': (bool, True),
|
2018-03-27 22:07:55 +02:00
|
|
|
'concurrent_announcers': (int, DEFAULT_CONCURRENT_ANNOUNCERS),
|
2018-05-17 23:41:23 +02:00
|
|
|
'known_dht_nodes': (list, DEFAULT_DHT_NODES, server_list, server_list_reverse),
|
2017-01-17 04:23:20 +01:00
|
|
|
'lbryum_wallet_dir': (str, default_lbryum_dir),
|
|
|
|
'max_connections_per_stream': (int, 5),
|
2017-09-11 17:34:26 +02:00
|
|
|
'seek_head_blob_first': (bool, True),
|
2016-11-16 20:38:43 +01:00
|
|
|
# TODO: writing json on the cmd line is a pain, come up with a nicer
|
2017-06-22 00:17:54 +02:00
|
|
|
# parser for this data structure. maybe 'USD:25'
|
2017-06-27 18:00:48 +02:00
|
|
|
'max_key_fee': (json.loads, {'currency': 'USD', 'amount': 50.0}),
|
2017-07-06 20:53:16 +02:00
|
|
|
'disable_max_key_fee': (bool, False),
|
2017-01-17 04:23:20 +01:00
|
|
|
'min_info_rate': (float, .02), # points/1000 infos
|
|
|
|
'min_valuable_hash_rate': (float, .05), # points/1000 infos
|
|
|
|
'min_valuable_info_rate': (float, .05), # points/1000 infos
|
|
|
|
'peer_port': (int, 3333),
|
|
|
|
'pointtrader_server': (str, 'http://127.0.0.1:2424'),
|
|
|
|
'reflector_port': (int, 5566),
|
2018-05-08 19:51:02 +02:00
|
|
|
# if reflect_uploads is True, send files to reflector after publishing (as well as a periodic check in the
|
|
|
|
# event the initial upload failed or was disconnected part way through, provided the auto_re_reflect_interval > 0)
|
2017-03-16 22:48:28 +01:00
|
|
|
'reflect_uploads': (bool, True),
|
2018-05-08 19:51:02 +02:00
|
|
|
'auto_re_reflect_interval': (int, 86400), # set to 0 to disable
|
2018-08-15 22:11:34 +02:00
|
|
|
'reflector_servers': (list, [('reflector.lbry.io', 5566)], server_list, server_list_reverse),
|
2018-07-25 00:22:20 +02:00
|
|
|
'run_reflector_server': (bool, False), # adds `reflector` to components_to_skip unless True
|
2017-01-17 04:23:20 +01:00
|
|
|
'sd_download_timeout': (int, 3),
|
2017-04-27 02:02:00 +02:00
|
|
|
'share_usage_data': (bool, True), # whether to share usage stats and diagnostic info with LBRY
|
2018-07-06 02:38:52 +02:00
|
|
|
'peer_search_timeout': (int, 60),
|
2017-01-17 04:23:20 +01:00
|
|
|
'use_auth_http': (bool, False),
|
|
|
|
'use_upnp': (bool, True),
|
2017-12-17 07:00:12 +01:00
|
|
|
'use_keyring': (bool, False),
|
2017-01-17 04:23:20 +01:00
|
|
|
'wallet': (str, LBRYUM_WALLET),
|
2018-01-05 03:30:41 +01:00
|
|
|
'blockchain_name': (str, 'lbrycrd_main'),
|
2018-06-12 00:27:08 +02:00
|
|
|
'lbryum_servers': (list, [('lbryumx1.lbry.io', 50001), ('lbryumx2.lbry.io',
|
2018-05-17 23:41:23 +02:00
|
|
|
50001)], server_list, server_list_reverse),
|
2018-07-24 18:42:12 +02:00
|
|
|
's3_headers_depth': (int, 96 * 10), # download headers from s3 when the local height is more than 10 chunks behind
|
2018-07-23 22:15:12 +02:00
|
|
|
'components_to_skip': (list, []) # components which will be skipped during start-up of daemon
|
2017-01-17 04:23:20 +01:00
|
|
|
}
|
2016-10-14 08:13:37 +02:00
|
|
|
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
class Config(object):
|
|
|
|
def __init__(self, fixed_defaults, adjustable_defaults, persisted_settings=None,
|
|
|
|
environment=None, cli_settings=None):
|
|
|
|
|
2017-02-02 16:23:17 +01:00
|
|
|
self._installation_id = None
|
2018-07-18 02:35:53 +02:00
|
|
|
self._session_id = base58.b58encode(utils.generate_id()).decode()
|
2017-08-17 03:08:24 +02:00
|
|
|
self._node_id = None
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
self._fixed_defaults = fixed_defaults
|
|
|
|
self._adjustable_defaults = adjustable_defaults
|
|
|
|
|
|
|
|
self._data = {
|
2017-04-26 20:15:38 +02:00
|
|
|
TYPE_DEFAULT: {}, # defaults
|
2017-01-17 18:29:09 +01:00
|
|
|
TYPE_PERSISTED: {}, # stored settings from daemon_settings.yml (or from a db, etc)
|
2017-04-26 20:15:38 +02:00
|
|
|
TYPE_ENV: {}, # settings from environment variables
|
|
|
|
TYPE_CLI: {}, # command-line arguments
|
|
|
|
TYPE_RUNTIME: {}, # set during runtime (using self.set(), etc)
|
2017-01-17 18:29:09 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
# the order in which a piece of data is searched for. earlier types override later types
|
|
|
|
self._search_order = (
|
|
|
|
TYPE_RUNTIME, TYPE_CLI, TYPE_ENV, TYPE_PERSISTED, TYPE_DEFAULT
|
|
|
|
)
|
|
|
|
|
2017-11-29 09:24:56 +01:00
|
|
|
# types of data where user specified config values can be stored
|
|
|
|
self._user_specified = (
|
|
|
|
TYPE_RUNTIME, TYPE_CLI, TYPE_ENV, TYPE_PERSISTED
|
|
|
|
)
|
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
self._data[TYPE_DEFAULT].update(self._fixed_defaults)
|
|
|
|
self._data[TYPE_DEFAULT].update(
|
2018-06-12 17:54:01 +02:00
|
|
|
{k: v[1] for (k, v) in self._adjustable_defaults.items()})
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
if persisted_settings is None:
|
|
|
|
persisted_settings = {}
|
|
|
|
self._validate_settings(persisted_settings)
|
|
|
|
self._data[TYPE_PERSISTED].update(persisted_settings)
|
|
|
|
|
|
|
|
env_settings = self._parse_environment(environment)
|
|
|
|
self._validate_settings(env_settings)
|
|
|
|
self._data[TYPE_ENV].update(env_settings)
|
|
|
|
|
|
|
|
if cli_settings is None:
|
|
|
|
cli_settings = {}
|
|
|
|
self._validate_settings(cli_settings)
|
|
|
|
self._data[TYPE_CLI].update(cli_settings)
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def __repr__(self):
|
2017-01-17 18:29:09 +01:00
|
|
|
return self.get_current_settings_dict().__repr__()
|
2016-11-05 19:23:48 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def __iter__(self):
|
2017-01-17 18:29:09 +01:00
|
|
|
for k in self._data[TYPE_DEFAULT].iterkeys():
|
2017-01-17 04:23:20 +01:00
|
|
|
yield k
|
2016-11-05 19:23:48 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def __getitem__(self, name):
|
|
|
|
return self.get(name)
|
|
|
|
|
|
|
|
def __setitem__(self, name, value):
|
|
|
|
return self.set(name, value)
|
|
|
|
|
|
|
|
def __contains__(self, name):
|
2017-01-17 18:29:09 +01:00
|
|
|
return name in self._data[TYPE_DEFAULT]
|
2017-01-17 04:23:20 +01:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
@staticmethod
|
|
|
|
def _parse_environment(environment):
|
2017-01-17 04:23:20 +01:00
|
|
|
env_settings = {}
|
2017-01-17 18:29:09 +01:00
|
|
|
if environment is not None:
|
2017-01-17 04:23:20 +01:00
|
|
|
assert isinstance(environment, Env)
|
|
|
|
for opt in environment.original_schema:
|
2017-01-20 19:38:49 +01:00
|
|
|
if environment(opt) is not None:
|
|
|
|
env_settings[opt] = environment(opt)
|
2017-01-17 04:23:20 +01:00
|
|
|
return env_settings
|
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
def _assert_valid_data_type(self, data_type):
|
2017-12-28 16:08:30 +01:00
|
|
|
if data_type not in self._data:
|
2017-10-11 08:59:30 +02:00
|
|
|
raise KeyError('{} in is not a valid data type'.format(data_type))
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2017-01-20 19:38:49 +01:00
|
|
|
def get_valid_setting_names(self):
|
|
|
|
return self._data[TYPE_DEFAULT].keys()
|
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
def _is_valid_setting(self, name):
|
2017-01-20 19:38:49 +01:00
|
|
|
return name in self.get_valid_setting_names()
|
2017-01-17 18:29:09 +01:00
|
|
|
|
|
|
|
def _assert_valid_setting(self, name):
|
2017-10-09 21:20:58 +02:00
|
|
|
if not self._is_valid_setting(name):
|
2017-10-11 08:59:30 +02:00
|
|
|
raise KeyError('{} is not a valid setting'.format(name))
|
2017-01-17 18:29:09 +01:00
|
|
|
|
|
|
|
def _validate_settings(self, data):
|
2017-01-20 19:38:49 +01:00
|
|
|
invalid_settings = set(data.keys()) - set(self.get_valid_setting_names())
|
|
|
|
if len(invalid_settings) > 0:
|
|
|
|
raise KeyError('invalid settings: {}'.format(', '.join(invalid_settings)))
|
2017-01-17 18:29:09 +01:00
|
|
|
|
|
|
|
def _assert_editable_setting(self, name):
|
|
|
|
self._assert_valid_setting(name)
|
2017-10-09 21:20:58 +02:00
|
|
|
if name in self._fixed_defaults:
|
2017-10-11 08:59:30 +02:00
|
|
|
raise ValueError('{} is not an editable setting'.format(name))
|
2017-01-17 18:29:09 +01:00
|
|
|
|
2018-02-09 17:29:37 +01:00
|
|
|
def _assert_valid_setting_value(self, name, value):
|
|
|
|
if name == "max_key_fee":
|
|
|
|
currency = str(value["currency"]).upper()
|
|
|
|
if currency not in self._fixed_defaults['CURRENCIES'].keys():
|
|
|
|
raise InvalidCurrencyError(currency)
|
|
|
|
elif name == "download_directory":
|
|
|
|
directory = str(value)
|
|
|
|
if not os.path.exists(directory):
|
|
|
|
raise NoSuchDirectoryError(directory)
|
2017-07-20 00:15:07 +02:00
|
|
|
|
2017-11-29 09:24:56 +01:00
|
|
|
def is_default(self, name):
|
|
|
|
"""Check if a config value is wasn't specified by the user
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name: the name of the value to check
|
|
|
|
|
|
|
|
Returns: true if config value is the default one, false if it was specified by
|
|
|
|
the user
|
|
|
|
|
|
|
|
Sometimes it may be helpful to understand if a config value was specified
|
|
|
|
by the user or if it still holds its default value. This function will return
|
|
|
|
true when the config value is still the default. Note that when the user
|
|
|
|
specifies a value that is equal to the default one, it will still be considered
|
|
|
|
as 'user specified'
|
|
|
|
"""
|
|
|
|
|
|
|
|
self._assert_valid_setting(name)
|
|
|
|
for possible_data_type in self._user_specified:
|
|
|
|
if name in self._data[possible_data_type]:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
def get(self, name, data_type=None):
|
|
|
|
"""Get a config value
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name: the name of the value to get
|
|
|
|
data_type: if given, get the value from a specific data set (see below)
|
|
|
|
|
|
|
|
Returns: the config value for the given name
|
|
|
|
|
|
|
|
If data_type is None, get() will search for the given name in each data set, in
|
|
|
|
order of precedence. It will return the first value it finds. This is the "effective"
|
|
|
|
value of a config name. For example, ENV values take precedence over DEFAULT values,
|
|
|
|
so if a value is present in ENV and in DEFAULT, the ENV value will be returned
|
|
|
|
"""
|
|
|
|
self._assert_valid_setting(name)
|
|
|
|
if data_type is not None:
|
|
|
|
self._assert_valid_data_type(data_type)
|
|
|
|
return self._data[data_type][name]
|
2017-04-19 22:00:36 +02:00
|
|
|
for possible_data_type in self._search_order:
|
|
|
|
if name in self._data[possible_data_type]:
|
|
|
|
return self._data[possible_data_type][name]
|
2017-01-17 18:29:09 +01:00
|
|
|
raise KeyError('{} is not a valid setting'.format(name))
|
|
|
|
|
|
|
|
def set(self, name, value, data_types=(TYPE_RUNTIME,)):
|
|
|
|
"""Set a config value
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name: the name of the value to set
|
|
|
|
value: the value
|
|
|
|
data_types: what type(s) of data this is
|
|
|
|
|
|
|
|
Returns: None
|
|
|
|
|
|
|
|
By default, this sets the RUNTIME value of a config. If you wish to set other
|
|
|
|
data types (e.g. PERSISTED values to save to a file, CLI values from parsed
|
|
|
|
command-line options, etc), you can specify that with the data_types param
|
|
|
|
"""
|
|
|
|
self._assert_editable_setting(name)
|
2018-02-09 17:29:37 +01:00
|
|
|
self._assert_valid_setting_value(name, value)
|
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
for data_type in data_types:
|
|
|
|
self._assert_valid_data_type(data_type)
|
|
|
|
self._data[data_type][name] = value
|
|
|
|
|
|
|
|
def update(self, updated_settings, data_types=(TYPE_RUNTIME,)):
|
2018-07-07 00:34:33 +02:00
|
|
|
for k, v in updated_settings.items():
|
2017-01-17 04:23:20 +01:00
|
|
|
try:
|
2017-01-17 18:29:09 +01:00
|
|
|
self.set(k, v, data_types=data_types)
|
2017-01-17 04:23:20 +01:00
|
|
|
except (KeyError, AssertionError):
|
|
|
|
pass
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def get_current_settings_dict(self):
|
2017-01-17 18:29:09 +01:00
|
|
|
current_settings = {}
|
2017-01-20 19:38:49 +01:00
|
|
|
for key in self.get_valid_setting_names():
|
|
|
|
current_settings[key] = self.get(key)
|
2017-01-17 18:29:09 +01:00
|
|
|
return current_settings
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def get_adjustable_settings_dict(self):
|
|
|
|
return {
|
2017-01-20 19:38:49 +01:00
|
|
|
key: val for key, val in self.get_current_settings_dict().iteritems()
|
|
|
|
if key in self._adjustable_defaults
|
2017-04-26 20:15:38 +02:00
|
|
|
}
|
2017-01-17 04:23:20 +01:00
|
|
|
|
|
|
|
def save_conf_file_settings(self):
|
2017-12-29 12:55:30 +01:00
|
|
|
if conf_file:
|
|
|
|
path = conf_file
|
|
|
|
else:
|
|
|
|
path = self.get_conf_filename()
|
2018-05-17 23:41:23 +02:00
|
|
|
# reverse the conversions done after loading the settings from the conf
|
|
|
|
# file
|
|
|
|
rev = self._convert_conf_file_lists_reverse(self._data[TYPE_PERSISTED])
|
2017-01-17 04:23:20 +01:00
|
|
|
ext = os.path.splitext(path)[1]
|
|
|
|
encoder = settings_encoders.get(ext, False)
|
2017-01-17 18:29:09 +01:00
|
|
|
assert encoder is not False, 'Unknown settings format %s' % ext
|
2017-01-17 04:23:20 +01:00
|
|
|
with open(path, 'w') as settings_file:
|
2018-05-17 23:41:23 +02:00
|
|
|
settings_file.write(encoder(rev))
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _convert_conf_file_lists_reverse(converted):
|
|
|
|
rev = {}
|
2018-07-18 02:35:53 +02:00
|
|
|
for k in converted.keys():
|
2018-05-17 23:41:23 +02:00
|
|
|
if k in ADJUSTABLE_SETTINGS and len(ADJUSTABLE_SETTINGS[k]) == 4:
|
|
|
|
rev[k] = ADJUSTABLE_SETTINGS[k][3](converted[k])
|
|
|
|
else:
|
|
|
|
rev[k] = converted[k]
|
|
|
|
return rev
|
2017-01-17 04:23:20 +01:00
|
|
|
|
2018-01-09 22:55:39 +01:00
|
|
|
@staticmethod
|
|
|
|
def _convert_conf_file_lists(decoded):
|
|
|
|
converted = {}
|
2018-07-18 02:35:53 +02:00
|
|
|
for k, v in decoded.items():
|
2018-05-17 23:41:23 +02:00
|
|
|
if k in ADJUSTABLE_SETTINGS and len(ADJUSTABLE_SETTINGS[k]) >= 3:
|
2018-01-09 22:55:39 +01:00
|
|
|
converted[k] = ADJUSTABLE_SETTINGS[k][2](v)
|
|
|
|
else:
|
|
|
|
converted[k] = v
|
|
|
|
return converted
|
|
|
|
|
2018-01-29 10:16:41 +01:00
|
|
|
def initialize_post_conf_load(self):
|
|
|
|
settings.installation_id = settings.get_installation_id()
|
|
|
|
settings.node_id = settings.get_node_id()
|
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def load_conf_file_settings(self):
|
2017-12-29 12:55:30 +01:00
|
|
|
if conf_file:
|
|
|
|
path = conf_file
|
|
|
|
else:
|
|
|
|
path = self.get_conf_filename()
|
2017-01-17 04:23:20 +01:00
|
|
|
ext = os.path.splitext(path)[1]
|
|
|
|
decoder = settings_decoders.get(ext, False)
|
2017-01-17 18:29:09 +01:00
|
|
|
assert decoder is not False, 'Unknown settings format %s' % ext
|
2017-01-17 04:23:20 +01:00
|
|
|
try:
|
|
|
|
with open(path, 'r') as settings_file:
|
|
|
|
data = settings_file.read()
|
|
|
|
decoded = self._fix_old_conf_file_settings(decoder(data))
|
|
|
|
log.info('Loaded settings file: %s', path)
|
2017-01-17 18:29:09 +01:00
|
|
|
self._validate_settings(decoded)
|
2018-01-09 22:55:39 +01:00
|
|
|
self._data[TYPE_PERSISTED].update(self._convert_conf_file_lists(decoded))
|
2017-01-17 04:23:20 +01:00
|
|
|
except (IOError, OSError) as err:
|
|
|
|
log.info('%s: Failed to update settings from %s', err, path)
|
|
|
|
|
2018-01-29 10:16:41 +01:00
|
|
|
#initialize members depending on config file
|
|
|
|
self.initialize_post_conf_load()
|
|
|
|
|
2017-01-20 18:39:54 +01:00
|
|
|
def _fix_old_conf_file_settings(self, settings_dict):
|
2017-01-17 04:23:20 +01:00
|
|
|
if 'API_INTERFACE' in settings_dict:
|
|
|
|
settings_dict['api_host'] = settings_dict['API_INTERFACE']
|
|
|
|
del settings_dict['API_INTERFACE']
|
|
|
|
if 'startup_scripts' in settings_dict:
|
|
|
|
del settings_dict['startup_scripts']
|
2017-03-29 17:06:04 +02:00
|
|
|
if 'upload_log' in settings_dict:
|
2017-04-27 02:02:00 +02:00
|
|
|
settings_dict['share_usage_data'] = settings_dict['upload_log']
|
2017-03-29 17:06:04 +02:00
|
|
|
del settings_dict['upload_log']
|
2017-04-27 02:02:00 +02:00
|
|
|
if 'share_debug_info' in settings_dict:
|
|
|
|
settings_dict['share_usage_data'] = settings_dict['share_debug_info']
|
|
|
|
del settings_dict['share_debug_info']
|
2017-01-20 18:39:54 +01:00
|
|
|
for key in settings_dict.keys():
|
|
|
|
if not self._is_valid_setting(key):
|
|
|
|
log.warning('Ignoring invalid conf file setting: %s', key)
|
|
|
|
del settings_dict[key]
|
2017-01-17 04:23:20 +01:00
|
|
|
return settings_dict
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def ensure_data_dir(self):
|
|
|
|
# although there is a risk of a race condition here we don't
|
|
|
|
# expect there to be multiple processes accessing this
|
|
|
|
# directory so the risk can be ignored
|
|
|
|
if not os.path.isdir(self['data_dir']):
|
|
|
|
os.makedirs(self['data_dir'])
|
|
|
|
return self['data_dir']
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def get_log_filename(self):
|
|
|
|
"""
|
|
|
|
Return the log file for this platform.
|
|
|
|
Also ensure the containing directory exists.
|
|
|
|
"""
|
|
|
|
return os.path.join(self.ensure_data_dir(), self['LOG_FILE_NAME'])
|
2016-10-31 22:19:19 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def get_api_connection_string(self):
|
|
|
|
return 'http://%s:%i/%s' % (self['api_host'], self['api_port'], self['API_ADDRESS'])
|
2016-12-01 14:53:58 +01:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def get_ui_address(self):
|
|
|
|
return 'http://%s:%i' % (self['api_host'], self['api_port'])
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def get_db_revision_filename(self):
|
|
|
|
return os.path.join(self.ensure_data_dir(), self['DB_REVISION_FILE_NAME'])
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def get_conf_filename(self):
|
|
|
|
data_dir = self.ensure_data_dir()
|
|
|
|
yml_path = os.path.join(data_dir, 'daemon_settings.yml')
|
|
|
|
json_path = os.path.join(data_dir, 'daemon_settings.json')
|
|
|
|
if os.path.isfile(yml_path):
|
|
|
|
return yml_path
|
|
|
|
elif os.path.isfile(json_path):
|
|
|
|
return json_path
|
|
|
|
else:
|
|
|
|
return yml_path
|
|
|
|
|
2017-02-02 16:23:17 +01:00
|
|
|
def get_installation_id(self):
|
|
|
|
install_id_filename = os.path.join(self.ensure_data_dir(), "install_id")
|
|
|
|
if not self._installation_id:
|
|
|
|
if os.path.isfile(install_id_filename):
|
|
|
|
with open(install_id_filename, "r") as install_id_file:
|
2017-12-29 20:08:40 +01:00
|
|
|
self._installation_id = str(install_id_file.read()).strip()
|
2017-02-02 16:23:17 +01:00
|
|
|
if not self._installation_id:
|
|
|
|
self._installation_id = base58.b58encode(utils.generate_id())
|
|
|
|
with open(install_id_filename, "w") as install_id_file:
|
|
|
|
install_id_file.write(self._installation_id)
|
|
|
|
return self._installation_id
|
2017-01-17 04:23:20 +01:00
|
|
|
|
2017-08-17 03:08:24 +02:00
|
|
|
def get_node_id(self):
|
|
|
|
node_id_filename = os.path.join(self.ensure_data_dir(), "node_id")
|
|
|
|
if not self._node_id:
|
|
|
|
if os.path.isfile(node_id_filename):
|
|
|
|
with open(node_id_filename, "r") as node_id_file:
|
2017-12-29 20:08:40 +01:00
|
|
|
self._node_id = base58.b58decode(str(node_id_file.read()).strip())
|
2017-08-17 03:08:24 +02:00
|
|
|
if not self._node_id:
|
|
|
|
self._node_id = utils.generate_id()
|
|
|
|
with open(node_id_filename, "w") as node_id_file:
|
|
|
|
node_id_file.write(base58.b58encode(self._node_id))
|
|
|
|
return self._node_id
|
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def get_session_id(self):
|
|
|
|
return self._session_id
|
|
|
|
|
|
|
|
|
|
|
|
# type: Config
|
2016-12-21 19:43:13 +01:00
|
|
|
settings = None
|
|
|
|
|
2017-12-28 16:08:30 +01:00
|
|
|
|
2017-01-17 18:29:09 +01:00
|
|
|
def get_default_env():
|
2017-01-20 19:38:49 +01:00
|
|
|
env_defaults = {}
|
2018-06-12 17:54:01 +02:00
|
|
|
for k, v in ADJUSTABLE_SETTINGS.items():
|
2017-01-20 19:38:49 +01:00
|
|
|
if len(v) == 3:
|
|
|
|
env_defaults[k] = (v[0], None, v[2])
|
2018-05-19 01:32:51 +02:00
|
|
|
elif len(v) == 4:
|
|
|
|
env_defaults[k] = (v[0], None, v[2], v[3])
|
2017-01-20 19:38:49 +01:00
|
|
|
else:
|
|
|
|
env_defaults[k] = (v[0], None)
|
|
|
|
return Env(**env_defaults)
|
2017-01-17 18:29:09 +01:00
|
|
|
|
|
|
|
|
2017-01-17 04:23:20 +01:00
|
|
|
def initialize_settings(load_conf_file=True):
|
2016-12-21 19:43:13 +01:00
|
|
|
global settings
|
2017-01-17 04:23:20 +01:00
|
|
|
if settings is None:
|
2017-01-17 18:29:09 +01:00
|
|
|
settings = Config(FIXED_SETTINGS, ADJUSTABLE_SETTINGS,
|
|
|
|
environment=get_default_env())
|
2017-01-17 04:23:20 +01:00
|
|
|
if load_conf_file:
|
|
|
|
settings.load_conf_file_settings()
|