2016-10-19 06:12:44 +02:00
|
|
|
import copy
|
2016-10-31 22:19:19 +01:00
|
|
|
import json
|
2016-10-28 22:12:51 +02:00
|
|
|
import logging
|
2016-08-22 00:44:16 +02:00
|
|
|
import os
|
2016-09-21 09:49:52 +02:00
|
|
|
import sys
|
2016-10-31 22:19:19 +01:00
|
|
|
import yaml
|
2016-10-28 22:12:51 +02:00
|
|
|
|
2016-09-21 09:49:52 +02:00
|
|
|
from appdirs import user_data_dir
|
2016-11-16 20:38:43 +01:00
|
|
|
import envparse
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-11-10 20:26:21 +01:00
|
|
|
LBRYCRD_WALLET = 'lbrycrd'
|
|
|
|
LBRYUM_WALLET = 'lbryum'
|
|
|
|
PTC_WALLET = 'ptc'
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2016-10-28 22:12:51 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2016-09-21 09:49:52 +02:00
|
|
|
LINUX = 1
|
|
|
|
DARWIN = 2
|
|
|
|
WINDOWS = 3
|
2016-11-04 17:36:43 +01:00
|
|
|
KB = 2**10
|
|
|
|
MB = 2**20
|
2016-09-21 09:49:52 +02:00
|
|
|
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2016-10-19 06:39:19 +02:00
|
|
|
if sys.platform.startswith("darwin"):
|
2016-09-21 09:49:52 +02:00
|
|
|
platform = DARWIN
|
2016-10-19 06:12:44 +02:00
|
|
|
default_download_directory = os.path.join(os.path.expanduser("~"), 'Downloads')
|
|
|
|
default_data_dir = user_data_dir("LBRY")
|
|
|
|
default_lbryum_dir = os.path.join(os.path.expanduser("~"), ".lbryum")
|
2016-10-19 06:39:19 +02:00
|
|
|
elif sys.platform.startswith("win"):
|
|
|
|
platform = WINDOWS
|
2016-10-19 06:12:44 +02:00
|
|
|
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
|
|
|
|
default_download_directory = get_path(FOLDERID.Downloads, UserHandle.current)
|
2016-10-27 17:49:28 +02:00
|
|
|
default_data_dir = os.path.join(
|
|
|
|
get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbrynet")
|
|
|
|
default_lbryum_dir = os.path.join(
|
|
|
|
get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbryum")
|
2016-10-19 06:39:19 +02:00
|
|
|
else:
|
|
|
|
platform = LINUX
|
|
|
|
default_download_directory = os.path.join(os.path.expanduser("~"), 'Downloads')
|
2016-10-19 20:26:19 +02:00
|
|
|
default_data_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
|
2016-10-19 06:39:19 +02:00
|
|
|
default_lbryum_dir = os.path.join(os.path.expanduser("~"), ".lbryum")
|
2016-10-19 06:12:44 +02:00
|
|
|
|
|
|
|
|
2016-11-16 06:29:38 +01:00
|
|
|
class Settings(object):
|
2016-10-28 22:12:51 +02:00
|
|
|
"""A collection of configuration settings"""
|
2016-10-26 09:16:33 +02:00
|
|
|
__fixed = []
|
|
|
|
__excluded = ['get_dict', 'update']
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
for k in self.__dict__.iterkeys():
|
|
|
|
if k.startswith('_') or k in self.__excluded:
|
|
|
|
continue
|
|
|
|
yield k
|
|
|
|
|
|
|
|
def __getitem__(self, item):
|
|
|
|
assert item in self, IndexError
|
|
|
|
return self.__dict__[item]
|
|
|
|
|
|
|
|
def __setitem__(self, key, value):
|
|
|
|
assert key in self and key not in self.__fixed, KeyError(key)
|
2016-11-16 20:38:43 +01:00
|
|
|
self.__dict__[key] = value
|
2016-10-26 09:16:33 +02:00
|
|
|
|
|
|
|
def __contains__(self, item):
|
|
|
|
return item in iter(self)
|
|
|
|
|
|
|
|
def get_dict(self):
|
|
|
|
return {k: self[k] for k in self}
|
|
|
|
|
|
|
|
def update(self, other):
|
|
|
|
for k, v in other.iteritems():
|
|
|
|
try:
|
|
|
|
self.__setitem__(k, v)
|
2016-10-28 22:12:51 +02:00
|
|
|
except (KeyError, AssertionError):
|
2016-10-26 09:16:33 +02:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
2016-11-16 20:38:43 +01:00
|
|
|
class Env(envparse.Env):
|
|
|
|
"""An Env parser that automatically namespaces the variables with LBRY"""
|
|
|
|
NAMESPACE = 'LBRY_'
|
|
|
|
def __init__(self, **schema):
|
|
|
|
self.original_schema = schema
|
|
|
|
my_schema = {
|
|
|
|
self._convert_key(key): self._convert_value(value)
|
|
|
|
for key, value in schema.items()
|
|
|
|
}
|
|
|
|
envparse.Env.__init__(self, **my_schema)
|
|
|
|
|
|
|
|
def __call__(self, key, *args, **kwargs):
|
|
|
|
my_key = self._convert_key(key)
|
|
|
|
return super(Env, self).__call__(my_key, *args, **kwargs)
|
|
|
|
|
|
|
|
def _convert_key(self, key):
|
|
|
|
return Env.NAMESPACE + key.upper()
|
|
|
|
|
|
|
|
def _convert_value(self, value):
|
|
|
|
"""Allow value to be specified as an object, tuple or dict
|
|
|
|
|
|
|
|
if object or dict, follow default envparse rules, if tuple
|
|
|
|
it needs to be of the form (cast, default) or (cast, default, subcast)
|
|
|
|
"""
|
|
|
|
if isinstance(value, dict):
|
|
|
|
return value
|
|
|
|
if isinstance(value, (tuple, list)):
|
|
|
|
new_value = {'cast': value[0], 'default': value[1]}
|
|
|
|
if len(value) == 3:
|
|
|
|
new_value['subcast'] = value[2]
|
|
|
|
return new_value
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
def server_port(server_port):
|
|
|
|
server, port = server_port.split(':')
|
|
|
|
return server, port
|
|
|
|
|
|
|
|
|
|
|
|
DEFAULT_DHT_NODES = [
|
|
|
|
('lbrynet1.lbry.io', 4444),
|
|
|
|
('lbrynet2.lbry.io', 4444),
|
|
|
|
('lbrynet3.lbry.io', 4444)
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
ENVIRONMENT = Env(
|
|
|
|
is_generous_host=(bool, True),
|
|
|
|
run_on_startup=(bool, False),
|
|
|
|
download_directory=(str, default_download_directory),
|
|
|
|
max_upload=(float, 0.0),
|
|
|
|
max_download=(float, 0.0),
|
|
|
|
upload_log=(bool, True),
|
|
|
|
delete_blobs_on_remove=(bool, True),
|
|
|
|
use_upnp=(bool, True),
|
|
|
|
start_lbrycrdd=(bool, True),
|
|
|
|
run_reflector_server=(bool, False),
|
|
|
|
startup_scripts=(list, []),
|
|
|
|
# TODO: this doesn't seem like the kind of thing that should
|
|
|
|
# be configured; move it elsewhere.
|
|
|
|
last_version=(dict, {'lbrynet': '0.0.1', 'lbryum': '0.0.1'}),
|
|
|
|
peer_port=(int, 3333),
|
|
|
|
dht_node_port=(int, 4444),
|
|
|
|
reflector_port=(int, 5566),
|
|
|
|
download_timeout=(int, 30),
|
|
|
|
max_search_results=(int, 25),
|
|
|
|
search_timeout=(float, 3.0),
|
|
|
|
cache_time=(int, 150),
|
|
|
|
host_ui=(bool, True),
|
|
|
|
check_ui_requirements=(bool, True),
|
|
|
|
local_ui_path=(bool, False),
|
|
|
|
api_port=(int, 5279),
|
|
|
|
search_servers=(list, ['lighthouse1.lbry.io:50005']),
|
|
|
|
data_rate=(float, .0001), # points/megabyte
|
|
|
|
min_info_rate=(float, .02), # points/1000 infos
|
|
|
|
min_valuable_info_rate=(float, .05), # points/1000 infos
|
|
|
|
min_valuable_hash_rate=(float, .05), # points/1000 infos
|
|
|
|
max_connections_per_stream=(int, 5),
|
|
|
|
known_dht_nodes=(list, DEFAULT_DHT_NODES, server_port),
|
|
|
|
pointtrader_server=(str, 'http://127.0.0.1:2424'),
|
|
|
|
reflector_servers=(list, [("reflector.lbry.io", 5566)], server_port),
|
|
|
|
wallet=(str, LBRYUM_WALLET),
|
|
|
|
ui_branch=(str, "master"),
|
|
|
|
default_ui_branch=(str, 'master'),
|
|
|
|
data_dir=(str, default_data_dir),
|
|
|
|
lbryum_wallet_dir=(str, default_lbryum_dir),
|
|
|
|
use_auth_http=(bool, False),
|
|
|
|
sd_download_timeout=(int, 3),
|
2016-11-22 18:50:54 +01:00
|
|
|
# TODO: this field is more complicated than it needs to be because
|
|
|
|
# it goes through a Fee validator when loaded by the exchange rate
|
|
|
|
# manager. Look into refactoring the exchange rate conversion to
|
|
|
|
# take in a simpler form.
|
|
|
|
#
|
2016-11-16 20:38:43 +01:00
|
|
|
# TODO: writing json on the cmd line is a pain, come up with a nicer
|
|
|
|
# parser for this data structure. (maybe MAX_KEY_FEE=USD:25
|
|
|
|
max_key_fee=(json.loads, {'USD': {'amount': 25.0, 'address': ''}})
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2016-11-16 06:29:38 +01:00
|
|
|
class AdjustableSettings(Settings):
|
2016-10-28 22:12:51 +02:00
|
|
|
"""Settings that are allowed to be overriden by the user"""
|
2016-11-16 20:38:43 +01:00
|
|
|
def __init__(self, environ=None):
|
|
|
|
self.environ = environ or ENVIRONMENT
|
2016-11-16 06:29:38 +01:00
|
|
|
Settings.__init__(self)
|
2016-10-26 09:16:33 +02:00
|
|
|
|
2016-11-16 20:38:43 +01:00
|
|
|
def __getattr__(self, attr):
|
|
|
|
if attr in self.environ.original_schema:
|
|
|
|
return self.environ(attr)
|
|
|
|
raise AttributeError
|
|
|
|
|
|
|
|
def get_dict(self):
|
|
|
|
return {
|
|
|
|
name: self.environ(name)
|
|
|
|
for name in self.environ.original_schema
|
|
|
|
}
|
|
|
|
|
2016-10-26 09:16:33 +02:00
|
|
|
|
2016-11-16 06:29:38 +01:00
|
|
|
class ApplicationSettings(Settings):
|
2016-10-28 22:12:51 +02:00
|
|
|
"""Settings that are constants and shouldn't be overriden"""
|
2016-10-26 09:16:33 +02:00
|
|
|
def __init__(self):
|
2016-11-04 17:36:43 +01:00
|
|
|
self.MAX_HANDSHAKE_SIZE = 64*KB
|
|
|
|
self.MAX_REQUEST_SIZE = 64*KB
|
|
|
|
self.MAX_BLOB_REQUEST_SIZE = 64*KB
|
|
|
|
self.MAX_RESPONSE_INFO_SIZE = 64*KB
|
2016-10-26 09:16:33 +02:00
|
|
|
self.MAX_BLOB_INFOS_TO_REQUEST = 20
|
|
|
|
self.BLOBFILES_DIR = "blobfiles"
|
2016-11-04 17:36:43 +01:00
|
|
|
self.BLOB_SIZE = 2*MB
|
2016-10-26 09:16:33 +02:00
|
|
|
self.LOG_FILE_NAME = "lbrynet.log"
|
2016-10-14 08:13:37 +02:00
|
|
|
self.LOG_POST_URL = "https://lbry.io/log-upload"
|
2016-10-26 09:16:33 +02:00
|
|
|
self.CRYPTSD_FILE_EXTENSION = ".cryptsd"
|
|
|
|
self.API_INTERFACE = "localhost"
|
|
|
|
self.API_ADDRESS = "lbryapi"
|
|
|
|
self.ICON_PATH = "icons" if platform is WINDOWS else "app.icns"
|
|
|
|
self.APP_NAME = "LBRY"
|
|
|
|
self.PROTOCOL_PREFIX = "lbry"
|
2016-11-10 20:26:21 +01:00
|
|
|
self.WALLET_TYPES = [LBRYUM_WALLET, LBRYCRD_WALLET]
|
2016-10-26 09:16:33 +02:00
|
|
|
self.SOURCE_TYPES = ['lbry_sd_hash', 'url', 'btih']
|
|
|
|
self.CURRENCIES = {
|
2016-10-27 17:49:28 +02:00
|
|
|
'BTC': {'type': 'crypto'},
|
|
|
|
'LBC': {'type': 'crypto'},
|
|
|
|
'USD': {'type': 'fiat'},
|
2016-10-26 09:16:33 +02:00
|
|
|
}
|
|
|
|
self.LOGGLY_TOKEN = 'LJEzATH4AzRgAwxjAP00LwZ2YGx3MwVgZTMuBQZ3MQuxLmOv'
|
|
|
|
self.ANALYTICS_ENDPOINT = 'https://api.segment.io/v1'
|
|
|
|
self.ANALYTICS_TOKEN = 'Ax5LZzR1o3q3Z3WjATASDwR5rKyHH0qOIRIbLmMXn2H='
|
2016-10-14 08:13:37 +02:00
|
|
|
self.DB_REVISION_FILE_NAME = 'db_revision'
|
2016-11-16 06:29:38 +01:00
|
|
|
Settings.__init__(self)
|
2016-10-26 09:16:33 +02:00
|
|
|
|
|
|
|
|
|
|
|
APPLICATION_SETTINGS = AdjustableSettings()
|
|
|
|
ADJUSTABLE_SETTINGS = AdjustableSettings()
|
|
|
|
|
|
|
|
|
|
|
|
class DefaultSettings(ApplicationSettings, AdjustableSettings):
|
|
|
|
__fixed = APPLICATION_SETTINGS.get_dict().keys()
|
2016-10-19 06:12:44 +02:00
|
|
|
|
2016-10-26 09:16:33 +02:00
|
|
|
def __init__(self):
|
|
|
|
ApplicationSettings.__init__(self)
|
|
|
|
AdjustableSettings.__init__(self)
|
|
|
|
|
2016-11-16 20:38:43 +01:00
|
|
|
def get_dict(self):
|
|
|
|
d = ApplicationSettings.get_dict(self)
|
|
|
|
d.update(AdjustableSettings.get_dict(self))
|
|
|
|
return d
|
|
|
|
|
2016-10-26 09:16:33 +02:00
|
|
|
|
|
|
|
DEFAULT_SETTINGS = DefaultSettings()
|
2016-10-19 06:12:44 +02:00
|
|
|
|
2016-10-26 09:16:33 +02:00
|
|
|
|
|
|
|
class Config(DefaultSettings):
|
|
|
|
__shared_state = copy.deepcopy(DEFAULT_SETTINGS.get_dict())
|
|
|
|
|
2016-10-19 06:12:44 +02:00
|
|
|
@property
|
|
|
|
def ORIGIN(self):
|
2016-10-26 09:16:33 +02:00
|
|
|
return "http://%s:%i" % (DEFAULT_SETTINGS.API_INTERFACE, self.api_port)
|
2016-10-19 06:12:44 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def REFERER(self):
|
2016-10-26 09:16:33 +02:00
|
|
|
return "http://%s:%i/" % (DEFAULT_SETTINGS.API_INTERFACE, self.api_port)
|
2016-10-19 06:12:44 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def API_CONNECTION_STRING(self):
|
2016-10-27 17:49:28 +02:00
|
|
|
return "http://%s:%i/%s" % (
|
|
|
|
DEFAULT_SETTINGS.API_INTERFACE, self.api_port, DEFAULT_SETTINGS.API_ADDRESS)
|
2016-10-19 06:12:44 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def UI_ADDRESS(self):
|
2016-10-26 09:16:33 +02:00
|
|
|
return "http://%s:%i" % (DEFAULT_SETTINGS.API_INTERFACE, self.api_port)
|
2016-10-27 21:18:25 +02:00
|
|
|
|
2016-10-31 22:19:19 +01:00
|
|
|
def ensure_data_dir(self):
|
|
|
|
# although there is a risk of a race condition here we don't
|
|
|
|
# expect there to be multiple processes accessing this
|
|
|
|
# directory so the risk can be ignored
|
|
|
|
if not os.path.isdir(self.data_dir):
|
|
|
|
os.makedirs(self.data_dir)
|
|
|
|
return self.data_dir
|
2016-10-27 21:18:25 +02:00
|
|
|
|
2016-10-31 22:19:19 +01:00
|
|
|
def get_log_filename(self):
|
|
|
|
"""Return the log file for this platform.
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2016-10-31 22:19:19 +01:00
|
|
|
Also ensure the containing directory exists
|
|
|
|
"""
|
|
|
|
return os.path.join(self.ensure_data_dir(), self.LOG_FILE_NAME)
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def get_db_revision_filename(self):
|
|
|
|
return os.path.join(self.ensure_data_dir(), self.DB_REVISION_FILE_NAME)
|
|
|
|
|
2016-10-31 22:19:19 +01:00
|
|
|
def get_conf_filename(self):
|
2016-11-05 19:23:48 +01:00
|
|
|
return get_settings_file_ext(self.ensure_data_dir())
|
2016-10-23 07:17:24 +02:00
|
|
|
|
2016-10-31 22:19:19 +01:00
|
|
|
|
|
|
|
def update_settings_from_file(filename=None):
|
|
|
|
filename = filename or settings.get_conf_filename()
|
2016-11-02 16:32:46 +01:00
|
|
|
try:
|
|
|
|
updates = load_settings(filename)
|
|
|
|
log.info("Loaded settings file: %s", updates)
|
|
|
|
settings.update(updates)
|
|
|
|
except (IOError, OSError) as ex:
|
|
|
|
log.info('%s: Failed to update settings from %s', ex, filename)
|
2016-10-31 22:19:19 +01:00
|
|
|
|
|
|
|
|
2016-11-05 19:23:48 +01:00
|
|
|
def get_settings_file_ext(data_dir):
|
|
|
|
yml_path = os.path.join(data_dir, "daemon_settings.yml")
|
|
|
|
json_path = os.path.join(data_dir, "daemon_settings.json")
|
|
|
|
if os.path.isfile(yml_path):
|
|
|
|
return yml_path
|
|
|
|
elif os.path.isfile(json_path):
|
|
|
|
return json_path
|
|
|
|
else:
|
|
|
|
return yml_path
|
|
|
|
|
|
|
|
|
2016-10-31 22:19:19 +01:00
|
|
|
settings_decoders = {
|
|
|
|
'.json': json.loads,
|
|
|
|
'.yml': yaml.load
|
|
|
|
}
|
|
|
|
|
|
|
|
settings_encoders = {
|
|
|
|
'.json': json.dumps,
|
|
|
|
'.yml': yaml.safe_dump
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2016-11-02 15:41:47 +01:00
|
|
|
def load_settings(path):
|
2016-10-31 22:19:19 +01:00
|
|
|
ext = os.path.splitext(path)[1]
|
|
|
|
with open(path, 'r') as settings_file:
|
|
|
|
data = settings_file.read()
|
|
|
|
decoder = settings_decoders.get(ext, False)
|
|
|
|
assert decoder is not False, "Unknown settings format .%s" % ext
|
|
|
|
return decoder(data)
|
|
|
|
|
|
|
|
|
|
|
|
# TODO: be careful with this. If a setting is overriden by an environment variable
|
|
|
|
# or command line flag we don't want to persist it for future settings.
|
|
|
|
def save_settings(path=None):
|
|
|
|
path = path or settings.get_conf_filename()
|
|
|
|
to_save = {k: v for k, v in settings.__dict__.iteritems() if k in ADJUSTABLE_SETTINGS}
|
|
|
|
ext = os.path.splitext(path)[1]
|
|
|
|
encoder = settings_encoders.get(ext, False)
|
|
|
|
assert encoder is not False, "Unknown settings format .%s" % ext
|
|
|
|
with open(path, 'w') as settings_file:
|
|
|
|
settings_file.write(encoder(to_save))
|
2016-10-23 07:17:24 +02:00
|
|
|
|
|
|
|
|
2016-10-27 21:18:25 +02:00
|
|
|
# TODO: don't load the configuration automatically. The configuration
|
|
|
|
# should be loaded at runtime, not at module import time. Module
|
|
|
|
# import should have no side-effects. This is also bad because
|
|
|
|
# it means that settings are read from the environment even for
|
|
|
|
# tests, which is rarely what you want to happen.
|
|
|
|
settings = Config()
|