refactor conf.settings

This commit is contained in:
Alex Grintsvayg 2017-01-16 22:23:20 -05:00
parent 8a026071eb
commit 267c6cbaca
46 changed files with 568 additions and 586 deletions

View file

@ -83,6 +83,6 @@ class Api(object):
session = sessions.FuturesSession()
return cls(
session,
conf.settings.ANALYTICS_ENDPOINT,
utils.deobfuscate(conf.settings.ANALYTICS_TOKEN)
conf.settings['ANALYTICS_ENDPOINT'],
utils.deobfuscate(conf.settings['ANALYTICS_TOKEN'])
)

View file

@ -28,9 +28,9 @@ class Manager(object):
api = Api.new_instance()
if events is None:
events = Events(
make_context(get_platform(), conf.settings.wallet),
base58.b58encode(conf.settings.lbryid),
conf.settings.session_id,
make_context(get_platform(), conf.settings['wallet']),
base58.b58encode(conf.settings.get_lbry_id()),
conf.settings.get_session_id(),
)
return cls(api, events, Track())

View file

@ -1,128 +1,29 @@
import base58
import copy
import json
import logging
import os
import sys
import yaml
from appdirs import user_data_dir
import envparse
from appdirs import user_data_dir
from lbrynet.core import utils
log = logging.getLogger(__name__)
ENV_NAMESPACE = 'LBRY_'
LBRYCRD_WALLET = 'lbrycrd'
LBRYUM_WALLET = 'lbryum'
PTC_WALLET = 'ptc'
PROTOCOL_PREFIX = "lbry"
APP_NAME = "LBRY"
log = logging.getLogger(__name__)
PROTOCOL_PREFIX = 'lbry'
APP_NAME = 'LBRY'
LINUX = 1
DARWIN = 2
WINDOWS = 3
KB = 2**10
MB = 2**20
if sys.platform.startswith("darwin"):
platform = DARWIN
default_download_directory = os.path.join(os.path.expanduser("~"), 'Downloads')
default_data_dir = user_data_dir("LBRY")
default_lbryum_dir = os.path.join(os.path.expanduser("~"), ".lbryum")
elif sys.platform.startswith("win"):
platform = WINDOWS
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
default_download_directory = get_path(FOLDERID.Downloads, UserHandle.current)
default_data_dir = os.path.join(
get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbrynet")
default_lbryum_dir = os.path.join(
get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbryum")
else:
platform = LINUX
default_download_directory = os.path.join(os.path.expanduser("~"), 'Downloads')
default_data_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
default_lbryum_dir = os.path.join(os.path.expanduser("~"), ".lbryum")
ICON_PATH = "icons" if platform is WINDOWS else "app.icns"
class Settings(object):
"""A collection of configuration settings"""
__fixed = []
_excluded = ['get_dict', 'update']
def __iter__(self):
for k in self.__dict__.iterkeys():
if k.startswith('_') or k in self._excluded:
continue
yield k
def __getitem__(self, item):
assert item in self, IndexError
return self.__dict__[item]
def __setitem__(self, key, value):
assert key in self and key not in self.__fixed, KeyError(key)
self.__dict__[key] = value
def __contains__(self, item):
return item in iter(self)
def get_dict(self):
return {k: self[k] for k in self}
def update(self, updated_settings):
for k, v in updated_settings.iteritems():
try:
self.__setitem__(k, v)
except (KeyError, AssertionError):
pass
class Env(envparse.Env):
"""An Env parser that automatically namespaces the variables with LBRY"""
NAMESPACE = 'LBRY_'
def __init__(self, **schema):
self.original_schema = schema
my_schema = {
self._convert_key(key): self._convert_value(value)
for key, value in schema.items()
}
envparse.Env.__init__(self, **my_schema)
def __call__(self, key, *args, **kwargs):
my_key = self._convert_key(key)
return super(Env, self).__call__(my_key, *args, **kwargs)
def _convert_key(self, key):
return Env.NAMESPACE + key.upper()
def _convert_value(self, value):
"""Allow value to be specified as an object, tuple or dict
if object or dict, follow default envparse rules, if tuple
it needs to be of the form (cast, default) or (cast, default, subcast)
"""
if isinstance(value, dict):
return value
if isinstance(value, (tuple, list)):
new_value = {'cast': value[0], 'default': value[1]}
if len(value) == 3:
new_value['subcast'] = value[2]
return new_value
return value
def server_port(server_port):
server, port = server_port.split(':')
return server, int(port)
KB = 2 ** 10
MB = 2 ** 20
DEFAULT_DHT_NODES = [
('lbrynet1.lbry.io', 4444),
@ -130,215 +31,6 @@ DEFAULT_DHT_NODES = [
('lbrynet3.lbry.io', 4444)
]
ENVIRONMENT = Env(
is_generous_host=(bool, True),
run_on_startup=(bool, False),
download_directory=(str, default_download_directory),
max_upload=(float, 0.0),
max_download=(float, 0.0),
upload_log=(bool, True),
delete_blobs_on_remove=(bool, True),
use_upnp=(bool, True),
run_reflector_server=(bool, False),
startup_scripts=(list, []),
# TODO: this doesn't seem like the kind of thing that should
# be configured; move it elsewhere.
last_version=(dict, {'lbrynet': '0.0.1', 'lbryum': '0.0.1'}),
peer_port=(int, 3333),
dht_node_port=(int, 4444),
reflector_port=(int, 5566),
download_timeout=(int, 30),
max_search_results=(int, 25),
cache_time=(int, 150),
search_timeout=(float, 5.0),
host_ui=(bool, True),
check_ui_requirements=(bool, True),
local_ui_path=(str, ''),
api_port=(int, 5279),
search_servers=(list, ['lighthouse1.lbry.io:50005']),
data_rate=(float, .0001), # points/megabyte
min_info_rate=(float, .02), # points/1000 infos
min_valuable_info_rate=(float, .05), # points/1000 infos
min_valuable_hash_rate=(float, .05), # points/1000 infos
max_connections_per_stream=(int, 5),
known_dht_nodes=(list, DEFAULT_DHT_NODES, server_port),
pointtrader_server=(str, 'http://127.0.0.1:2424'),
reflector_servers=(list, [("reflector.lbry.io", 5566)], server_port),
wallet=(str, LBRYUM_WALLET),
ui_branch=(str, "master"),
default_ui_branch=(str, 'master'),
data_dir=(str, default_data_dir),
lbryum_wallet_dir=(str, default_lbryum_dir),
use_auth_http=(bool, False),
sd_download_timeout=(int, 3),
# By default, daemon will block all cross origin requests
# but if this is set, this value will be used for the
# Access-Control-Allow-Origin. For example
# set to '*' to allow all requests, or set to 'http://localhost:8080'
# if you're running a test UI on that port
allowed_origin=(str, ''),
# TODO: this field is more complicated than it needs to be because
# it goes through a Fee validator when loaded by the exchange rate
# manager. Look into refactoring the exchange rate conversion to
# take in a simpler form.
#
# TODO: writing json on the cmd line is a pain, come up with a nicer
# parser for this data structure. (maybe MAX_KEY_FEE=USD:25
max_key_fee=(json.loads, {'USD': {'amount': 25.0, 'address': ''}}),
# Changing this value is not-advised as it could potentially
# expose the lbrynet daemon to the outside world which would
# give an attacker access to your wallet and you could lose
# all of your credits.
API_INTERFACE=(str, "localhost"),
bittrex_feed=(str, "https://bittrex.com/api/v1.1/public/getmarkethistory"),
reflector_reupload=(bool, True),
)
class AdjustableSettings(Settings):
_excluded = ['get_dict', 'update', 'environ']
"""Settings that are allowed to be overriden by the user"""
def __init__(self, environ=None):
self.environ = environ or ENVIRONMENT
for opt in self.environ.original_schema:
self.__dict__[opt] = self.environ(opt)
Settings.__init__(self)
def __getattr__(self, attr):
if attr in self.environ.original_schema:
return self.environ(attr)
return self.__getattribute__(attr)
class ApplicationSettings(Settings):
"""Settings that are constants and shouldn't be overriden"""
def __init__(self):
self.MAX_HANDSHAKE_SIZE = 64*KB
self.MAX_REQUEST_SIZE = 64*KB
self.MAX_BLOB_REQUEST_SIZE = 64*KB
self.MAX_RESPONSE_INFO_SIZE = 64*KB
self.MAX_BLOB_INFOS_TO_REQUEST = 20
self.BLOBFILES_DIR = "blobfiles"
self.BLOB_SIZE = 2*MB
self.LOG_FILE_NAME = "lbrynet.log"
self.LOG_POST_URL = "https://lbry.io/log-upload"
self.CRYPTSD_FILE_EXTENSION = ".cryptsd"
self.API_ADDRESS = "lbryapi"
self.ICON_PATH = ICON_PATH
self.APP_NAME = APP_NAME
self.PROTOCOL_PREFIX = PROTOCOL_PREFIX
self.WALLET_TYPES = [LBRYUM_WALLET, LBRYCRD_WALLET]
self.SOURCE_TYPES = ['lbry_sd_hash', 'url', 'btih']
self.CURRENCIES = {
'BTC': {'type': 'crypto'},
'LBC': {'type': 'crypto'},
'USD': {'type': 'fiat'},
}
self.LOGGLY_TOKEN = 'LJEzATH4AzRgAwxjAP00LwZ2YGx3MwVgZTMuBQZ3MQuxLmOv'
self.ANALYTICS_ENDPOINT = 'https://api.segment.io/v1'
self.ANALYTICS_TOKEN = 'Ax5LZzR1o3q3Z3WjATASDwR5rKyHH0qOIRIbLmMXn2H='
self.SLACK_WEBHOOK = ('nUE0pUZ6Yl9bo29epl5moTSwnl5wo20ip2IlqzywMKZiIQSFZR5'
'AHx4mY0VmF0WQZ1ESEP9kMHZlp1WzJwWOoKN3ImR1M2yUAaMyqGZ=')
self.DB_REVISION_FILE_NAME = 'db_revision'
Settings.__init__(self)
APPLICATION_SETTINGS = AdjustableSettings()
ADJUSTABLE_SETTINGS = AdjustableSettings()
class DefaultSettings(ApplicationSettings, AdjustableSettings):
__fixed = APPLICATION_SETTINGS.get_dict().keys()
def __init__(self):
ApplicationSettings.__init__(self)
AdjustableSettings.__init__(self)
def get_dict(self):
d = ApplicationSettings.get_dict(self)
d.update(AdjustableSettings.get_dict(self))
return d
DEFAULT_SETTINGS = DefaultSettings()
class Config(DefaultSettings):
__shared_state = copy.deepcopy(DEFAULT_SETTINGS.get_dict())
@property
def ORIGIN(self):
return "http://%s:%i" % (DEFAULT_SETTINGS.API_INTERFACE, self.api_port)
@property
def REFERER(self):
return "http://%s:%i/" % (DEFAULT_SETTINGS.API_INTERFACE, self.api_port)
@property
def API_CONNECTION_STRING(self):
return "http://%s:%i/%s" % (
DEFAULT_SETTINGS.API_INTERFACE, self.api_port, DEFAULT_SETTINGS.API_ADDRESS)
@property
def UI_ADDRESS(self):
return "http://%s:%i" % (DEFAULT_SETTINGS.API_INTERFACE, self.api_port)
def get_dict(self):
return {k: self[k] for k in self}
def get_adjustable_settings_dict(self):
return {
opt: val for opt, val in self.get_dict().iteritems()
if opt in self.environ.original_schema
}
def ensure_data_dir(self):
# although there is a risk of a race condition here we don't
# expect there to be multiple processes accessing this
# directory so the risk can be ignored
if not os.path.isdir(self.data_dir):
os.makedirs(self.data_dir)
return self.data_dir
def get_log_filename(self):
"""Return the log file for this platform.
Also ensure the containing directory exists
"""
return os.path.join(self.ensure_data_dir(), self.LOG_FILE_NAME)
def get_db_revision_filename(self):
return os.path.join(self.ensure_data_dir(), self.DB_REVISION_FILE_NAME)
def get_conf_filename(self):
return get_settings_file_ext(self.ensure_data_dir())
def update_settings_from_file(filename=None):
filename = filename or settings.get_conf_filename()
try:
updates = load_settings(filename)
log.info("Loaded settings file: %s", updates)
settings.update(updates)
except (IOError, OSError) as ex:
log.info('%s: Failed to update settings from %s', ex, filename)
def get_settings_file_ext(data_dir):
yml_path = os.path.join(data_dir, "daemon_settings.yml")
json_path = os.path.join(data_dir, "daemon_settings.json")
if os.path.isfile(yml_path):
return yml_path
elif os.path.isfile(json_path):
return json_path
else:
return yml_path
settings_decoders = {
'.json': json.loads,
'.yml': yaml.load
@ -349,46 +41,350 @@ settings_encoders = {
'.yml': yaml.safe_dump
}
if sys.platform.startswith('darwin'):
platform = DARWIN
default_download_directory = os.path.join(os.path.expanduser('~'), 'Downloads')
default_data_dir = user_data_dir('LBRY')
default_lbryum_dir = os.path.join(os.path.expanduser('~'), '.lbryum')
elif sys.platform.startswith('win'):
platform = WINDOWS
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
def load_settings(path):
ext = os.path.splitext(path)[1]
with open(path, 'r') as settings_file:
data = settings_file.read()
decoder = settings_decoders.get(ext, False)
assert decoder is not False, "Unknown settings format .%s" % ext
return decoder(data)
default_download_directory = get_path(FOLDERID.Downloads, UserHandle.current)
default_data_dir = os.path.join(
get_path(FOLDERID.RoamingAppData, UserHandle.current), 'lbrynet')
default_lbryum_dir = os.path.join(
get_path(FOLDERID.RoamingAppData, UserHandle.current), 'lbryum')
else:
platform = LINUX
default_download_directory = os.path.join(os.path.expanduser('~'), 'Downloads')
default_data_dir = os.path.join(os.path.expanduser('~'), '.lbrynet')
default_lbryum_dir = os.path.join(os.path.expanduser('~'), '.lbryum')
ICON_PATH = 'icons' if platform is WINDOWS else 'app.icns'
# TODO: be careful with this. If a setting is overriden by an environment variable
# or command line flag we don't want to persist it for future settings.
def save_settings(path=None):
path = path or settings.get_conf_filename()
to_save = settings.get_adjustable_settings_dict()
ext = os.path.splitext(path)[1]
encoder = settings_encoders.get(ext, False)
assert encoder is not False, "Unknown settings format .%s" % ext
with open(path, 'w') as settings_file:
settings_file.write(encoder(to_save))
def server_port(server_and_port):
server, port = server_and_port.split(':')
return server, int(port)
class Env(envparse.Env):
"""An Env parser that automatically namespaces the variables with LBRY"""
def __init__(self, **schema):
self.original_schema = schema
my_schema = {
self._convert_key(key): self._convert_value(value)
for key, value in schema.items()
}
envparse.Env.__init__(self, **my_schema)
def __call__(self, key, *args, **kwargs):
my_key = self._convert_key(key)
return super(Env, self).__call__(my_key, *args, **kwargs)
@staticmethod
def _convert_key(key):
return ENV_NAMESPACE + key.upper()
@staticmethod
def _convert_value(value):
"""
Allow value to be specified as a tuple or list. If you do this, the tuple/list
must be of the form (cast, default) or (cast, default, subcast)
"""
if isinstance(value, (tuple, list)):
new_value = {'cast': value[0], 'default': value[1]}
if len(value) == 3:
new_value['subcast'] = value[2]
return new_value
return value
FIXED_SETTINGS = {
'ANALYTICS_ENDPOINT': 'https://api.segment.io/v1',
'ANALYTICS_TOKEN': 'Ax5LZzR1o3q3Z3WjATASDwR5rKyHH0qOIRIbLmMXn2H=',
'API_ADDRESS': 'lbryapi',
'APP_NAME': APP_NAME,
'BLOBFILES_DIR': 'blobfiles',
'BLOB_SIZE': 2 * MB,
'CRYPTSD_FILE_EXTENSION': '.cryptsd',
'CURRENCIES': {
'BTC': {'type': 'crypto'},
'LBC': {'type': 'crypto'},
'USD': {'type': 'fiat'},
},
'DB_REVISION_FILE_NAME': 'db_revision',
'ICON_PATH': ICON_PATH,
'LOGGLY_TOKEN': 'LJEzATH4AzRgAwxjAP00LwZ2YGx3MwVgZTMuBQZ3MQuxLmOv',
'LOG_FILE_NAME': 'lbrynet.log',
'LOG_POST_URL': 'https://lbry.io/log-upload',
'MAX_BLOB_REQUEST_SIZE': 64 * KB,
'MAX_HANDSHAKE_SIZE': 64 * KB,
'MAX_REQUEST_SIZE': 64 * KB,
'MAX_RESPONSE_INFO_SIZE': 64 * KB,
'MAX_BLOB_INFOS_TO_REQUEST': 20,
'PROTOCOL_PREFIX': PROTOCOL_PREFIX,
'SLACK_WEBHOOK': ('nUE0pUZ6Yl9bo29epl5moTSwnl5wo20ip2IlqzywMKZiIQSFZR5'
'AHx4mY0VmF0WQZ1ESEP9kMHZlp1WzJwWOoKN3ImR1M2yUAaMyqGZ='),
'SOURCE_TYPES': ['lbry_sd_hash', 'url', 'btih'],
'WALLET_TYPES': [LBRYUM_WALLET, LBRYCRD_WALLET],
}
ADJUSTABLE_SETTINGS = {
# By default, daemon will block all cross origin requests
# but if this is set, this value will be used for the
# Access-Control-Allow-Origin. For example
# set to '*' to allow all requests, or set to 'http://localhost:8080'
# if you're running a test UI on that port
'allowed_origin': (str, ''),
# Changing this value is not-advised as it could potentially
# expose the lbrynet daemon to the outside world which would
# give an attacker access to your wallet and you could lose
# all of your credits.
'api_host': (str, 'localhost'),
'api_port': (int, 5279),
'bittrex_feed': (str, 'https://bittrex.com/api/v1.1/public/getmarkethistory'),
'cache_time': (int, 150),
'check_ui_requirements': (bool, True),
'data_dir': (str, default_data_dir),
'data_rate': (float, .0001), # points/megabyte
'default_ui_branch': (str, 'master'),
'delete_blobs_on_remove': (bool, True),
'dht_node_port': (int, 4444),
'download_directory': (str, default_download_directory),
'download_timeout': (int, 30),
'host_ui': (bool, True),
'is_generous_host': (bool, True),
'known_dht_nodes': (list, DEFAULT_DHT_NODES, server_port),
# TODO: this should not be configured; move it elsewhere
'last_version': (dict, {'lbrynet': '0.0.1', 'lbryum': '0.0.1'}),
'lbryum_wallet_dir': (str, default_lbryum_dir),
'local_ui_path': (str, ''),
'max_connections_per_stream': (int, 5),
'max_download': (float, 0.0),
# TODO: this field is more complicated than it needs to be because
# it goes through a Fee validator when loaded by the exchange rate
# manager. Look into refactoring the exchange rate conversion to
# take in a simpler form.
#
# TODO: writing json on the cmd line is a pain, come up with a nicer
# parser for this data structure. (maybe MAX_KEY_FEE': USD:25
'max_key_fee': (json.loads, {'USD': {'amount': 25.0, 'address': ''}}),
'max_search_results': (int, 25),
'max_upload': (float, 0.0),
'min_info_rate': (float, .02), # points/1000 infos
'min_valuable_hash_rate': (float, .05), # points/1000 infos
'min_valuable_info_rate': (float, .05), # points/1000 infos
'peer_port': (int, 3333),
'pointtrader_server': (str, 'http://127.0.0.1:2424'),
'reflector_port': (int, 5566),
'reflector_reupload': (bool, True),
'reflector_servers': (list, [('reflector.lbry.io', 5566)], server_port),
'run_on_startup': (bool, False),
'run_reflector_server': (bool, False),
'sd_download_timeout': (int, 3),
'search_servers': (list, ['lighthouse1.lbry.io:50005']),
'search_timeout': (float, 5.0),
'startup_scripts': (list, []),
'ui_branch': (str, 'master'),
'upload_log': (bool, True),
'use_auth_http': (bool, False),
'use_upnp': (bool, True),
'wallet': (str, LBRYUM_WALLET),
}
class Config:
def __init__(self, fixed_defaults, adjustable_defaults, conf_file_settings={}, environment=None,
cli_settings={}):
self._lbry_id = None
self._session_id = base58.b58encode(utils.generate_id())
self.__fixed = fixed_defaults
self.__adjustable = adjustable_defaults
self._init_defaults()
self.conf_file_settings = conf_file_settings # from daemon_settings.yml
self.env_settings = self._parse_environment(environment) # from environment variables
self.cli_settings = cli_settings # from command_line flags/args
self.runtime_settings = {} # set by self.set() during runtime
self._assert_valid_settings()
self._combine_settings()
def __repr__(self):
return self.combined_settings.__repr__()
def __iter__(self):
for k in self.combined_settings.iterkeys():
yield k
def __getitem__(self, name):
return self.get(name)
def __setitem__(self, name, value):
return self.set(name, value)
def __contains__(self, name):
return name in self.combined_settings
def _parse_environment(self, environment):
env_settings = {}
if environment is None:
environment = Env(**self.__adjustable)
if environment:
assert isinstance(environment, Env)
for opt in environment.original_schema:
env_settings[opt] = environment(opt)
return env_settings
def _init_defaults(self):
self.defaults = self.__fixed.copy()
for k, v in self.__adjustable.iteritems():
self.defaults[k] = v[1]
def _assert_valid_settings(self):
for s in [self.conf_file_settings, self.env_settings, self.cli_settings,
self.runtime_settings]:
for name in s:
assert name in self.defaults, IndexError('%s is not a valid setting' % name)
def _combine_settings(self):
self.combined_settings = {}
for s in [self.defaults, self.conf_file_settings, self.env_settings, self.cli_settings,
self.runtime_settings]:
self.combined_settings.update(s)
def get(self, name):
assert name in self.defaults, IndexError('%s is not a valid setting' % name)
return self.combined_settings[name]
def set(self, name, value, set_conf_setting=False):
assert name in self.defaults and name not in self.__fixed, KeyError
self.runtime_settings[name] = value
if set_conf_setting:
self.conf_file_settings[name] = value
self._combine_settings()
def set_cli_settings(self, cli_settings):
self.cli_settings = cli_settings
self._assert_valid_settings()
self._combine_settings()
def update(self, updated_settings, set_conf_setting=False):
for k, v in updated_settings.iteritems():
try:
self.set(k, v, set_conf_setting=set_conf_setting)
except (KeyError, AssertionError):
pass
def get_current_settings_dict(self):
return self.combined_settings
def get_adjustable_settings_dict(self):
return {
opt: val for opt, val in self.get_current_settings_dict().iteritems()
if opt in self.__adjustable
}
def save_conf_file_settings(self):
path = self.get_conf_filename()
ext = os.path.splitext(path)[1]
encoder = settings_encoders.get(ext, False)
assert encoder is not False, 'Unknown settings format .%s' % ext
with open(path, 'w') as settings_file:
settings_file.write(encoder(self.conf_file_settings))
def load_conf_file_settings(self):
path = self.get_conf_filename()
ext = os.path.splitext(path)[1]
decoder = settings_decoders.get(ext, False)
assert decoder is not False, 'Unknown settings format .%s' % ext
try:
with open(path, 'r') as settings_file:
data = settings_file.read()
decoded = self._fix_old_conf_file_settings(decoder(data))
log.info('Loaded settings file: %s', path)
self.conf_file_settings.update(decoded)
self._assert_valid_settings()
self._combine_settings()
except (IOError, OSError) as err:
log.info('%s: Failed to update settings from %s', err, path)
@staticmethod
def _fix_old_conf_file_settings(settings_dict):
if 'API_INTERFACE' in settings_dict:
settings_dict['api_host'] = settings_dict['API_INTERFACE']
del settings_dict['API_INTERFACE']
if 'startup_scripts' in settings_dict:
del settings_dict['startup_scripts']
return settings_dict
def ensure_data_dir(self):
# although there is a risk of a race condition here we don't
# expect there to be multiple processes accessing this
# directory so the risk can be ignored
if not os.path.isdir(self['data_dir']):
os.makedirs(self['data_dir'])
return self['data_dir']
def get_log_filename(self):
"""
Return the log file for this platform.
Also ensure the containing directory exists.
"""
return os.path.join(self.ensure_data_dir(), self['LOG_FILE_NAME'])
def get_api_connection_string(self):
return 'http://%s:%i/%s' % (self['api_host'], self['api_port'], self['API_ADDRESS'])
def get_ui_address(self):
return 'http://%s:%i' % (self['api_host'], self['api_port'])
def get_db_revision_filename(self):
return os.path.join(self.ensure_data_dir(), self['DB_REVISION_FILE_NAME'])
def get_conf_filename(self):
data_dir = self.ensure_data_dir()
yml_path = os.path.join(data_dir, 'daemon_settings.yml')
json_path = os.path.join(data_dir, 'daemon_settings.json')
if os.path.isfile(yml_path):
return yml_path
elif os.path.isfile(json_path):
return json_path
else:
return yml_path
def get_lbry_id(self):
lbry_id_filename = os.path.join(self.ensure_data_dir(), 'lbryid')
if not self._lbry_id:
if os.path.isfile(lbry_id_filename):
with open(lbry_id_filename, 'r') as lbryid_file:
self._lbry_id = base58.b58decode(lbryid_file.read())
if not self._lbry_id:
self._lbry_id = utils.generate_id()
with open(lbry_id_filename, 'w') as lbryid_file:
lbryid_file.write(base58.b58encode(self._lbry_id))
return self._lbry_id
def get_session_id(self):
return self._session_id
# type: Config
settings = None
def initialize_settings():
def initialize_settings(load_conf_file=True):
global settings
settings = Config()
settings.lbryid = get_lbryid()
settings.session_id = base58.b58encode(utils.generate_id())
def get_lbryid():
lbryid_filename = os.path.join(settings.ensure_data_dir(), "lbryid")
if os.path.isfile(lbryid_filename):
with open(lbryid_filename, "r") as lbryid_file:
return base58.b58decode(lbryid_file.read())
else:
lbryid = utils.generate_id()
with open(lbryid_filename, "w") as lbryid_file:
lbryid_file.write(base58.b58encode(lbryid))
return lbryid
if settings is None:
settings = Config(FIXED_SETTINGS, ADJUSTABLE_SETTINGS)
if load_conf_file:
settings.load_conf_file_settings()

View file

@ -89,7 +89,7 @@ class HashBlob(object):
def set_length(self, length):
if self.length is not None and length == self.length:
return True
if self.length is None and 0 <= length <= conf.settings.BLOB_SIZE:
if self.length is None and 0 <= length <= conf.settings['BLOB_SIZE']:
self.length = length
return True
log.warning("Got an invalid length. Previous length: %s, Invalid length: %s",

View file

@ -5,9 +5,9 @@ from decimal import Decimal
class BasePaymentRateManager(object):
def __init__(self, rate=None, info_rate=None):
self.min_blob_data_payment_rate = rate if rate is not None else conf.settings.data_rate
self.min_blob_data_payment_rate = rate if rate is not None else conf.settings['data_rate']
self.min_blob_info_payment_rate = (
info_rate if info_rate is not None else conf.settings.min_info_rate)
info_rate if info_rate is not None else conf.settings['min_info_rate'])
class PaymentRateManager(object):
@ -47,7 +47,7 @@ class NegotiatedPaymentRateManager(object):
self.base = base
self.points_paid = 0.0
self.blob_tracker = availability_tracker
self.generous = generous if generous is not None else conf.settings.is_generous_host
self.generous = generous if generous is not None else conf.settings['is_generous_host']
self.strategy = get_default_strategy(self.blob_tracker,
base_price=self.base.min_blob_data_payment_rate,
is_generous=generous)

View file

@ -24,7 +24,7 @@ class MeanAvailabilityWeightedPrice(object):
def __init__(self, tracker, base_price=None, alpha=1.0):
self.blob_tracker = tracker
base_price = base_price if base_price is not None else conf.settings.data_rate
base_price = base_price if base_price is not None else conf.settings['data_rate']
self.base_price = Decimal(base_price)
self.alpha = Decimal(alpha)

View file

@ -19,7 +19,7 @@ class Strategy(object):
def __init__(self, price_model, max_rate, min_rate, is_generous=None):
self.price_model = price_model
self.is_generous = (
is_generous if is_generous is not None else conf.settings.is_generous_host)
is_generous if is_generous is not None else conf.settings['is_generous_host'])
self.accepted_offers = {}
self.pending_sent_offers = {}
self.offers_sent = {}

View file

@ -48,7 +48,7 @@ class ClientProtocol(Protocol):
self._blob_download_request.write(data)
else:
self._response_buff += data
if len(self._response_buff) > conf.settings.MAX_RESPONSE_INFO_SIZE:
if len(self._response_buff) > conf.settings['MAX_RESPONSE_INFO_SIZE']:
log.warning("Response is too large. Size %s", len(self._response_buff))
self.transport.loseConnection()
response, extra_data = self._get_valid_response(self._response_buff)

View file

@ -17,7 +17,7 @@ class ClientBlobRequest(ClientPaidRequest):
def __init__(self, request_dict, response_identifier, write_func, finished_deferred,
cancel_func, blob):
if blob.length is None:
max_pay_units = conf.settings.BLOB_SIZE
max_pay_units = conf.settings['BLOB_SIZE']
else:
max_pay_units = blob.length
ClientPaidRequest.__init__(self, request_dict, response_identifier, max_pay_units)

View file

@ -136,7 +136,7 @@ class ConnectionManager(object):
self._manage_deferred = defer.Deferred()
from twisted.internet import reactor
if len(self._peer_connections) < conf.settings.max_connections_per_stream:
if len(self._peer_connections) < conf.settings['max_connections_per_stream']:
try:
ordered_request_creators = self._rank_request_creator_connections()
peers = yield self._get_new_peers(ordered_request_creators)

View file

@ -144,7 +144,7 @@ def configure_analytics_handler(analytics_manager):
def get_loggly_url(token=None, version=None):
token = token or utils.deobfuscate(conf.settings.LOGGLY_TOKEN)
token = token or utils.deobfuscate(conf.settings['LOGGLY_TOKEN'])
version = version or lbrynet.__version__
return LOGGLY_URL.format(token=token, tag='lbrynet-' + version)
@ -334,7 +334,7 @@ class LogUploader(object):
'type': self.get_type(log_type),
'log': log_contents
}
requests.post(conf.settings.LOG_POST_URL, params)
requests.post(conf.settings['LOG_POST_URL'], params)
def log_contents(self, exclude_previous):
with open(self.log_file) as f:

View file

@ -66,7 +66,7 @@ class CryptStreamBlobMaker(object):
self.length = 0
def write(self, data):
max_bytes_to_write = conf.settings.BLOB_SIZE - self.length - 1
max_bytes_to_write = conf.settings['BLOB_SIZE'] - self.length - 1
done = False
if max_bytes_to_write <= len(data):
num_bytes_to_write = max_bytes_to_write

View file

@ -127,7 +127,7 @@ def create_lbry_file(session, lbry_file_manager, file_name, file_handle, key=Non
def make_stream_desc_file(stream_hash):
log.debug("creating the stream descriptor file")
descriptor_file_path = os.path.join(
session.db_dir, file_name + conf.settings.CRYPTSD_FILE_EXTENSION)
session.db_dir, file_name + conf.settings['CRYPTSD_FILE_EXTENSION'])
descriptor_writer = PlainStreamDescriptorWriter(descriptor_file_path)
d = get_sd_info(lbry_file_manager.stream_info_manager, stream_hash, True)

View file

@ -95,9 +95,9 @@ class ManagedEncryptedFileDownloader(EncryptedFileSaver):
return d
def _reupload(self):
if not conf.settings.reflector_reupload:
if not conf.settings['reflector_reupload']:
return
reflector_server = random.choice(conf.settings.reflector_servers)
reflector_server = random.choice(conf.settings['reflector_servers'])
return reupload.check_and_restore_availability(self, reflector_server)
@defer.inlineCallbacks

View file

@ -24,7 +24,7 @@ class LiveStreamCreator(CryptStreamCreator):
self.stream_info_manager = stream_info_manager
self.delete_after_num = delete_after_num
self.secret_pass_phrase = secret_pass_phrase
self.file_extension = conf.settings.CRYPTSD_FILE_EXTENSION
self.file_extension = conf.settings['CRYPTSD_FILE_EXTENSION']
self.finished_blob_hashes = {}
def _save_stream(self):

View file

@ -137,7 +137,7 @@ class LiveStreamMetadataHandler(object):
if count is not None:
further_blobs_request['count'] = count
else:
further_blobs_request['count'] = conf.settings.MAX_BLOB_INFOS_TO_REQUEST
further_blobs_request['count'] = conf.settings['MAX_BLOB_INFOS_TO_REQUEST']
log.debug("Requesting %s blob infos from %s", str(further_blobs_request['count']), str(peer))
r_dict = {'further_blobs': further_blobs_request}
response_identifier = 'further_blobs'

View file

@ -197,10 +197,11 @@ class Daemon(AuthJSONRPCServer):
LBRYnet daemon, a jsonrpc interface to lbry functions
"""
def __init__(self, root, analytics_manager):
AuthJSONRPCServer.__init__(self, conf.settings.use_auth_http)
def __init__(self, root, analytics_manager, upload_logs_on_shutdown=True):
AuthJSONRPCServer.__init__(self, conf.settings['use_auth_http'])
reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown)
self.upload_logs_on_shutdown = upload_logs_on_shutdown
self.allowed_during_startup = [
'stop', 'status', 'version',
# delete these once they are fully removed:
@ -209,36 +210,35 @@ class Daemon(AuthJSONRPCServer):
]
last_version = {'last_version': {'lbrynet': lbrynet_version, 'lbryum': lbryum_version}}
conf.settings.update(last_version)
self.db_dir = conf.settings.data_dir
self.download_directory = conf.settings.download_directory
self.db_dir = conf.settings['data_dir']
self.download_directory = conf.settings['download_directory']
self.created_data_dir = False
if not os.path.exists(self.db_dir):
os.mkdir(self.db_dir)
self.created_data_dir = True
if conf.settings.BLOBFILES_DIR == "blobfiles":
if conf.settings['BLOBFILES_DIR'] == "blobfiles":
self.blobfile_dir = os.path.join(self.db_dir, "blobfiles")
else:
log.info("Using non-default blobfiles directory: %s", conf.settings.BLOBFILES_DIR)
self.blobfile_dir = conf.settings.BLOBFILES_DIR
log.info("Using non-default blobfiles directory: %s", conf.settings['BLOBFILES_DIR'])
self.blobfile_dir = conf.settings['BLOBFILES_DIR']
self.run_on_startup = conf.settings.run_on_startup
self.data_rate = conf.settings.data_rate
self.max_key_fee = conf.settings.max_key_fee
self.max_upload = conf.settings.max_upload
self.max_download = conf.settings.max_download
self.upload_log = conf.settings.upload_log
self.search_timeout = conf.settings.search_timeout
self.download_timeout = conf.settings.download_timeout
self.max_search_results = conf.settings.max_search_results
self.run_reflector_server = conf.settings.run_reflector_server
self.wallet_type = conf.settings.wallet
self.delete_blobs_on_remove = conf.settings.delete_blobs_on_remove
self.peer_port = conf.settings.peer_port
self.reflector_port = conf.settings.reflector_port
self.dht_node_port = conf.settings.dht_node_port
self.use_upnp = conf.settings.use_upnp
self.cache_time = conf.settings.cache_time
self.startup_scripts = conf.settings.startup_scripts
self.run_on_startup = conf.settings['run_on_startup']
self.data_rate = conf.settings['data_rate']
self.max_key_fee = conf.settings['max_key_fee']
self.max_upload = conf.settings['max_upload']
self.max_download = conf.settings['max_download']
self.upload_log = conf.settings['upload_log']
self.search_timeout = conf.settings['search_timeout']
self.download_timeout = conf.settings['download_timeout']
self.max_search_results = conf.settings['max_search_results']
self.run_reflector_server = conf.settings['run_reflector_server']
self.wallet_type = conf.settings['wallet']
self.delete_blobs_on_remove = conf.settings['delete_blobs_on_remove']
self.peer_port = conf.settings['peer_port']
self.reflector_port = conf.settings['reflector_port']
self.dht_node_port = conf.settings['dht_node_port']
self.use_upnp = conf.settings['use_upnp']
self.cache_time = conf.settings['cache_time']
self.startup_status = STARTUP_STAGES[0]
self.connected_to_internet = True
@ -252,14 +252,14 @@ class Daemon(AuthJSONRPCServer):
self.db_revision_file = conf.settings.get_db_revision_filename()
self.session = None
self.uploaded_temp_files = []
self._session_id = conf.settings.session_id
self._session_id = conf.settings.get_session_id()
# TODO: this should probably be passed into the daemon, or
# possibly have the entire log upload functionality taken out
# of the daemon, but I don't want to deal with that now
self.log_uploader = log_support.LogUploader.load('lbrynet', self.log_file)
self.analytics_manager = analytics_manager
self.lbryid = conf.settings.lbryid
self.lbryid = conf.settings.get_lbry_id()
self.wallet_user = None
self.wallet_password = None
@ -298,9 +298,6 @@ class Daemon(AuthJSONRPCServer):
self.announced_startup = True
self.startup_status = STARTUP_STAGES[5]
log.info("Started lbrynet-daemon")
if len(self.startup_scripts):
log.info("Scheduling scripts")
reactor.callLater(3, self._run_scripts)
if self.first_run:
d = self._upload_log(log_type="first_run")
@ -319,7 +316,7 @@ class Daemon(AuthJSONRPCServer):
self.looping_call_manager.start(Checker.CONNECTION_STATUS, 1)
self.exchange_rate_manager.start()
if conf.settings.host_ui:
if conf.settings['host_ui']:
self.lbry_ui_manager.update_checker.start(1800, now=False)
yield self.lbry_ui_manager.setup()
if launch_ui:
@ -559,11 +556,14 @@ class Daemon(AuthJSONRPCServer):
self._clean_up_temp_files()
try:
d = self._upload_log(
log_type="close", exclude_previous=False if self.first_run else True)
except Exception:
log.warn('Failed to upload log', exc_info=True)
if self.upload_logs_on_shutdown:
try:
d = self._upload_log(
log_type="close", exclude_previous=False if self.first_run else True)
except Exception:
log.warn('Failed to upload log', exc_info=True)
d = defer.succeed(None)
else:
d = defer.succeed(None)
d.addCallback(lambda _: self._stop_server())
@ -603,26 +603,26 @@ class Daemon(AuthJSONRPCServer):
for key, setting_type in setting_types.iteritems():
if key in settings:
if can_update_key(settings, key, setting_type):
conf.settings.update({key: settings[key]})
conf.settings.update({key: settings[key]}, set_conf_setting=True)
else:
try:
converted = setting_type(settings[key])
conf.settings.update({key: converted})
conf.settings.update({key: converted}, set_conf_setting=True)
except Exception as err:
log.warning(err.message)
log.warning("error converting setting '%s' to type %s", key, setting_type)
conf.save_settings()
conf.settings.save_conf_file_settings()
self.run_on_startup = conf.settings.run_on_startup
self.data_rate = conf.settings.data_rate
self.max_key_fee = conf.settings.max_key_fee
self.download_directory = conf.settings.download_directory
self.max_upload = conf.settings.max_upload
self.max_download = conf.settings.max_download
self.upload_log = conf.settings.upload_log
self.download_timeout = conf.settings.download_timeout
self.search_timeout = conf.settings.search_timeout
self.cache_time = conf.settings.cache_time
self.run_on_startup = conf.settings['run_on_startup']
self.data_rate = conf.settings['data_rate']
self.max_key_fee = conf.settings['max_key_fee']
self.download_directory = conf.settings['download_directory']
self.max_upload = conf.settings['max_upload']
self.max_download = conf.settings['max_download']
self.upload_log = conf.settings['upload_log']
self.download_timeout = conf.settings['download_timeout']
self.search_timeout = conf.settings['search_timeout']
self.cache_time = conf.settings['cache_time']
return defer.succeed(True)
@ -701,8 +701,8 @@ class Daemon(AuthJSONRPCServer):
elif self.wallet_type == LBRYUM_WALLET:
log.info("Using lbryum wallet")
config = {'auto_connect': True}
if conf.settings.lbryum_wallet_dir:
config['lbryum_path'] = conf.settings.lbryum_wallet_dir
if conf.settings['lbryum_wallet_dir']:
config['lbryum_path'] = conf.settings['lbryum_wallet_dir']
storage = SqliteStorage(self.db_dir)
wallet = LBRYumWallet(storage, config)
return defer.succeed(wallet)
@ -717,16 +717,16 @@ class Daemon(AuthJSONRPCServer):
def create_session(wallet):
self.session = Session(
conf.settings.data_rate,
conf.settings['data_rate'],
db_dir=self.db_dir,
lbryid=self.lbryid,
blob_dir=self.blobfile_dir,
dht_node_port=self.dht_node_port,
known_dht_nodes=conf.settings.known_dht_nodes,
known_dht_nodes=conf.settings['known_dht_nodes'],
peer_port=self.peer_port,
use_upnp=self.use_upnp,
wallet=wallet,
is_generous=conf.settings.is_generous_host
is_generous=conf.settings['is_generous_host']
)
self.startup_status = STARTUP_STAGES[2]
@ -757,7 +757,7 @@ class Daemon(AuthJSONRPCServer):
return defer.succeed(None)
def _download_sd_blob(self, sd_hash, timeout=None):
timeout = timeout if timeout is not None else conf.settings.sd_download_timeout
timeout = timeout if timeout is not None else conf.settings['sd_download_timeout']
def cb(result):
if not r.called:
@ -785,7 +785,7 @@ class Daemon(AuthJSONRPCServer):
Add a lbry file to the file manager, start the download, and return the new lbry file.
If it already exists in the file manager, return the existing lbry file
"""
timeout = timeout if timeout is not None else conf.settings.download_timeout
timeout = timeout if timeout is not None else conf.settings['download_timeout']
helper = _DownloadNameHelper(
self, name, timeout, download_directory, file_name, wait_for_write)
@ -903,7 +903,7 @@ class Daemon(AuthJSONRPCServer):
if self.session.payment_rate_manager.generous:
return 0.0
return size / (10 ** 6) * conf.settings.data_rate
return size / (10 ** 6) * conf.settings['data_rate']
def get_est_cost_using_known_size(self, name, size):
"""
@ -1029,26 +1029,6 @@ class Daemon(AuthJSONRPCServer):
)
return run_reflector_factory(factory)
def _run_scripts(self):
if len([k for k in self.startup_scripts if 'run_once' in k.keys()]):
log.info("Removing one time startup scripts")
remaining_scripts = [s for s in self.startup_scripts if 'run_once' not in s.keys()]
startup_scripts = self.startup_scripts
self.startup_scripts = conf.settings.startup_scripts = remaining_scripts
conf.save_settings()
for script in startup_scripts:
if script['script_name'] == 'migrateto025':
log.info("Running migrator to 0.2.5")
from lbrynet.lbrynet_daemon.daemon_scripts.migrateto025 import run as run_migrate
run_migrate(self)
if script['script_name'] == 'Autofetcher':
log.info("Starting autofetcher script")
from lbrynet.lbrynet_daemon.daemon_scripts.Autofetcher import run as run_autofetcher
run_autofetcher(self)
return defer.succeed(None)
############################################################################
# #
@ -1291,9 +1271,7 @@ class Daemon(AuthJSONRPCServer):
"""
log.info("Get daemon settings")
settings_dict = conf.settings.get_dict()
settings_dict['lbryid'] = binascii.hexlify(settings_dict['lbryid'])
return self._render_response(settings_dict)
return self._render_response(conf.settings.get_current_settings_dict())
@AuthJSONRPCServer.auth_required
def jsonrpc_set_settings(self, p):
@ -2150,7 +2128,7 @@ class Daemon(AuthJSONRPCServer):
sd blob, dict
"""
sd_hash = p[FileID.SD_HASH]
timeout = p.get('timeout', conf.settings.sd_download_timeout)
timeout = p.get('timeout', conf.settings['sd_download_timeout'])
d = self._download_sd_blob(sd_hash, timeout)
d.addCallbacks(
lambda r: self._render_response(r),
@ -2488,7 +2466,7 @@ class _DownloadNameHelper(object):
wait_for_write=True):
self.daemon = daemon
self.name = name
self.timeout = timeout if timeout is not None else conf.settings.download_timeout
self.timeout = timeout if timeout is not None else conf.settings['download_timeout']
if not download_directory or not os.path.isdir(download_directory):
self.download_directory = daemon.download_directory
else:
@ -2774,7 +2752,7 @@ def get_loggly_query_string(lbry_id):
def report_bug_to_slack(message, lbry_id, platform_name, app_version):
webhook = utils.deobfuscate(conf.settings.SLACK_WEBHOOK)
webhook = utils.deobfuscate(conf.settings['SLACK_WEBHOOK'])
payload_template = "os: %s\n version: %s\n<%s|loggly>\n%s"
payload_params = (
platform_name,
@ -2797,7 +2775,7 @@ def get_lbry_file_search_value(p):
def run_reflector_factory(factory):
reflector_server = random.choice(conf.settings.reflector_servers)
reflector_server = random.choice(conf.settings['reflector_servers'])
reflector_address, reflector_port = reflector_server
log.info("Start reflector client")
d = reactor.resolve(reflector_address)

View file

@ -16,7 +16,6 @@ def main():
_, arguments = parser.parse_known_args()
conf.initialize_settings()
conf.update_settings_from_file()
api = LBRYAPIClient.get_client()
try:

View file

@ -12,7 +12,6 @@ from lbrynet.core import utils
from lbrynet.lbrynet_daemon.auth.client import LBRYAPIClient
from lbrynet.lbrynet_daemon.DaemonServer import DaemonServer
log = logging.getLogger(__name__)
@ -32,19 +31,22 @@ def stop():
def start():
utils.setup_certs_for_windows()
conf.initialize_settings()
parser = argparse.ArgumentParser(description="Launch lbrynet-daemon")
parser.add_argument("--wallet",
help="lbryum or ptc for testing, default lbryum",
type=str,
default=conf.LBRYUM_WALLET)
default=conf.settings['wallet'])
parser.add_argument("--ui", help="path to custom UI folder", default=None)
parser.add_argument(
"--branch",
help='Branch of lbry-web-ui repo to use, defaults to {}'.format(conf.settings.ui_branch),
default=conf.settings.ui_branch)
help='Branch of lbry-web-ui repo to use, defaults to {}'.format(conf.settings['ui_branch']),
default=conf.settings['ui_branch'])
parser.add_argument('--no-launch', dest='launchui', action="store_false")
parser.add_argument("--http-auth", dest="useauth", action="store_true")
parser.add_argument("--http-auth", dest="useauth", action="store_true",
default=conf.settings['use_auth_http'])
parser.add_argument(
'--log-to-console', dest='logtoconsole', action='store_true',
help=('Set to enable console logging. Set the --verbose flag '
@ -57,21 +59,17 @@ def start():
'--verbose', nargs="*",
help=('Enable debug output. Optionally specify loggers for which debug output '
'should selectively be applied.'))
args = parser.parse_args()
conf.initialize_settings()
utils.setup_certs_for_windows()
conf.update_settings_from_file()
update_settings_from_args(args)
lbrynet_log = conf.settings.get_log_filename()
log_support.configure_logging(lbrynet_log, args.logtoconsole, args.verbose)
log.debug('Final Settings: %s', conf.settings.get_dict())
log.debug('Final Settings: %s', conf.settings.get_current_settings_dict())
try:
log.debug('Checking for an existing lbrynet daemon instance')
JSONRPCProxy.from_url(conf.settings.API_CONNECTION_STRING).is_running()
JSONRPCProxy.from_url(conf.settings.get_api_connection_string()).is_running()
log.info("lbrynet-daemon is already running")
if not args.logtoconsole:
print "lbrynet-daemon is already running"
@ -86,8 +84,8 @@ def start():
print "Starting lbrynet-daemon from command line"
print "To view activity, view the log file here: " + lbrynet_log
print "Web UI is available at http://%s:%i" % (
conf.settings.API_INTERFACE, conf.settings.api_port)
print "JSONRPC API is available at " + conf.settings.API_CONNECTION_STRING
conf.settings['api_host'], conf.settings['api_port'])
print "JSONRPC API is available at " + conf.settings.get_api_connection_string()
print "To quit press ctrl-c or call 'stop' via the API"
if test_internet_connection():
@ -105,14 +103,14 @@ def start():
def update_settings_from_args(args):
to_pass = {}
cli_settings = {}
if args.ui:
to_pass['local_ui_path'] = args.ui
cli_settings['local_ui_path'] = args.ui
if args.branch:
to_pass['ui_branch'] = args.branch
to_pass['use_auth_http'] = args.useauth
to_pass['wallet'] = args.wallet
conf.settings.update(to_pass)
cli_settings['ui_branch'] = args.branch
cli_settings['use_auth_http'] = args.useauth
cli_settings['wallet'] = args.wallet
conf.settings.set_cli_settings(cli_settings)
@defer.inlineCallbacks

View file

@ -28,14 +28,14 @@ class DaemonServer(object):
self._api = Daemon(self.root, self.analytics_manager)
self.root.putChild("view", HostedEncryptedFile(self._api))
self.root.putChild("upload", EncryptedFileUpload(self._api))
self.root.putChild(conf.settings.API_ADDRESS, self._api)
self.root.putChild(conf.settings['API_ADDRESS'], self._api)
lbrynet_server = server.Site(get_site_base(use_auth, self.root))
lbrynet_server.requestFactory = DaemonRequest
try:
reactor.listenTCP(
conf.settings.api_port, lbrynet_server, interface=conf.settings.API_INTERFACE)
conf.settings['api_port'], lbrynet_server, interface=conf.settings['api_host'])
except error.CannotListenError:
log.info('Daemon already running, exiting app')
sys.exit(1)
@ -58,7 +58,7 @@ def get_site_base(use_auth, root):
def create_auth_session(root):
pw_path = os.path.join(conf.settings.data_dir, ".api_keys")
pw_path = os.path.join(conf.settings['data_dir'], ".api_keys")
initialize_api_key_file(pw_path)
checker = PasswordChecker.load_file(pw_path)
realm = HttpPasswordRealm(root)

View file

@ -35,7 +35,7 @@ class GetStream(object):
max_key_fee, data_rate=0.5, timeout=None,
download_directory=None, file_name=None):
if timeout is None:
timeout = conf.settings.download_timeout
timeout = conf.settings['download_timeout']
self.wallet = wallet
self.resolved_name = None
self.description = None

View file

@ -85,7 +85,7 @@ class BittrexFeed(MarketFeed):
self,
"BTCLBC",
"Bittrex",
conf.settings.bittrex_feed,
conf.settings['bittrex_feed'],
{'market': 'BTC-LBC', 'count': 50},
BITTREX_FEE
)

View file

@ -34,7 +34,7 @@ class Publisher(object):
self.fee = None
self.stream_hash = None
# TODO: this needs to be passed into the constructor
reflector_server = random.choice(conf.settings.reflector_servers)
reflector_server = random.choice(conf.settings['reflector_servers'])
self.reflector_server, self.reflector_port = reflector_server[0], reflector_server[1]
self.metadata = {}
@ -84,7 +84,7 @@ class Publisher(object):
def start_reflector(self):
# TODO: is self.reflector_server unused?
reflector_server = random.choice(conf.settings.reflector_servers)
reflector_server = random.choice(conf.settings['reflector_servers'])
reflector_address, reflector_port = reflector_server[0], reflector_server[1]
log.info("Reflecting new publication")
factory = reflector.ClientFactory(

View file

@ -88,10 +88,10 @@ class HostedEncryptedFile(resource.Resource):
d.addCallback(lambda sd_hash: self._api._get_lbry_file_by_sd_hash(sd_hash))
d.addCallback(lambda lbry_file: self._make_stream_producer(request, lbry_file))
elif request.args['name'][0] in self._api.waiting_on.keys():
request.redirect(conf.settings.UI_ADDRESS + "/?watch=" + request.args['name'][0])
request.redirect(conf.settings.get_ui_address() + "/?watch=" + request.args['name'][0])
request.finish()
else:
request.redirect(conf.settings.UI_ADDRESS)
request.redirect(conf.settings.get_ui_address())
request.finish()
return server.NOT_DONE_YET

View file

@ -21,7 +21,7 @@ log = logging.getLogger(__name__)
class UIManager(object):
def __init__(self, root):
self.ui_root = os.path.join(conf.settings.data_dir, "lbry-ui")
self.ui_root = os.path.join(conf.settings['data_dir'], "lbry-ui")
self.active_dir = os.path.join(self.ui_root, "active")
self.update_dir = os.path.join(self.ui_root, "update")
@ -60,11 +60,11 @@ class UIManager(object):
self.loaded_requirements = None
def setup(self, branch=None, check_requirements=None, user_specified=None):
local_ui_path = user_specified or conf.settings.local_ui_path
local_ui_path = user_specified or conf.settings['local_ui_path']
self.branch = branch or conf.settings.ui_branch
self.branch = branch or conf.settings['ui_branch']
self.check_requirements = (check_requirements if check_requirements is not None
else conf.settings.check_ui_requirements)
else conf.settings['check_ui_requirements'])
# Note that this currently overrides any manual setting of UI.
# It might be worth considering changing that behavior but the expectation
@ -212,7 +212,7 @@ class UIManager(object):
return load_ui(self.root, self.active_dir)
def launch(self):
webbrowser.open(conf.settings.UI_ADDRESS)
webbrowser.open(conf.settings.get_ui_address())
class BundledUIManager(object):

View file

@ -101,7 +101,7 @@ class AuthAPIClient(object):
url=None, login_url=None):
api_key_name = API_KEY_NAME if not key_name else key_name
pw_path = os.path.join(conf.settings.data_dir, ".api_keys") if not pw_path else pw_path
pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") if not pw_path else pw_path
if not key:
keys = load_api_keys(pw_path)
api_key = keys.get(api_key_name, False)
@ -110,9 +110,9 @@ class AuthAPIClient(object):
if login_url is None:
service_url = "http://%s:%s@%s:%i/%s" % (api_key_name,
api_key.secret,
conf.settings.API_INTERFACE,
conf.settings.api_port,
conf.settings.API_ADDRESS)
conf.settings['api_host'],
conf.settings['api_port'],
conf.settings['API_ADDRESS'])
else:
service_url = login_url
id_count = count
@ -158,5 +158,5 @@ class AuthAPIClient(object):
class LBRYAPIClient(object):
@staticmethod
def get_client():
return AuthAPIClient.config() if conf.settings.use_auth_http else \
JSONRPCProxy.from_url(conf.settings.API_CONNECTION_STRING)
return AuthAPIClient.config() if conf.settings['use_auth_http'] else \
JSONRPCProxy.from_url(conf.settings.get_api_connection_string())

View file

@ -99,7 +99,7 @@ class AuthJSONRPCServer(AuthorizedBase):
def __init__(self, use_authentication=None):
AuthorizedBase.__init__(self)
self._use_authentication = (
use_authentication if use_authentication is not None else conf.settings.use_auth_http)
use_authentication if use_authentication is not None else conf.settings['use_auth_http'])
self.announced_startup = False
self.allowed_during_startup = []
self.sessions = {}
@ -108,8 +108,8 @@ class AuthJSONRPCServer(AuthorizedBase):
return NotImplementedError()
def _set_headers(self, request, data, update_secret=False):
if conf.settings.allowed_origin:
request.setHeader("Access-Control-Allow-Origin", conf.settings.allowed_origin)
if conf.settings['allowed_origin']:
request.setHeader("Access-Control-Allow-Origin", conf.settings['allowed_origin'])
request.setHeader("Content-Type", "text/json")
request.setHeader("Content-Length", str(len(data)))
if update_secret:
@ -248,12 +248,12 @@ class AuthJSONRPCServer(AuthorizedBase):
def _check_source_of_request(self, source):
if source is None:
return True
if conf.settings.API_INTERFACE == '0.0.0.0':
if conf.settings['api_host'] == '0.0.0.0':
return True
server, port = self.get_server_port(source)
return (
server == conf.settings.API_INTERFACE and
port == conf.settings.api_port)
server == conf.settings['api_host'] and
port == conf.settings['api_port'])
def get_server_port(self, origin):
parsed = urlparse.urlparse(origin)

View file

@ -47,7 +47,7 @@ def get_body_from_request(path, data):
jsondata = FileBodyProducer(StringIO(json.dumps(data)))
agent = Agent(reactor)
d = agent.request(
'POST', conf.settings.pointtrader_server + path,
'POST', conf.settings['pointtrader_server'] + path,
Headers({'Content-Type': ['application/json']}), jsondata)
d.addCallback(get_body)
return d

View file

@ -27,7 +27,7 @@ class LBRYDaemonApp(AppKit.NSApplication):
self.connection = False
statusbar = AppKit.NSStatusBar.systemStatusBar()
self.statusitem = statusbar.statusItemWithLength_(AppKit.NSVariableStatusItemLength)
self.icon = AppKit.NSImage.alloc().initByReferencingFile_(conf.settings.ICON_PATH)
self.icon = AppKit.NSImage.alloc().initByReferencingFile_(conf.settings['ICON_PATH'])
self.icon.setScalesWhenResized_(True)
self.icon.setSize_((20, 20))
self.statusitem.setImage_(self.icon)
@ -39,7 +39,7 @@ class LBRYDaemonApp(AppKit.NSApplication):
"Quit", "applicationShouldTerminate:", "")
self.menubarMenu.addItem_(self.quit)
self.statusitem.setMenu_(self.menubarMenu)
self.statusitem.setToolTip_(conf.settings.APP_NAME)
self.statusitem.setToolTip_(conf.settings['APP_NAME'])
if test_internet_connection():
notify("Starting LBRY")
@ -53,7 +53,7 @@ class LBRYDaemonApp(AppKit.NSApplication):
)
def openui_(self, sender):
webbrowser.open(conf.settings.UI_ADDRESS)
webbrowser.open(conf.settings.get_ui_address())
# this code is from the example
# https://pythonhosted.org/pyobjc/examples/Cocoa/Twisted/WebServicesTool/index.html

View file

@ -15,7 +15,6 @@ log = logging.getLogger()
def main():
conf.initialize_settings()
conf.update_settings_from_file()
log_file = conf.settings.get_log_filename()
log_support.configure_logging(log_file, console=True)
app = LBRYDaemonApp.sharedApplication()

View file

@ -70,9 +70,9 @@ class LBRYURIHandler(object):
@staticmethod
def open_address(lbry_name):
if lbry_name == "lbry" or lbry_name == "" or lbry_name is None:
webbrowser.open(conf.settings.UI_ADDRESS)
webbrowser.open(conf.settings.get_ui_address())
else:
webbrowser.open(conf.settings.UI_ADDRESS + "/?show=" + lbry_name)
webbrowser.open(conf.settings.get_ui_address() + "/?show=" + lbry_name)
def main(args):

View file

@ -240,7 +240,7 @@ def main(lbry_name=None):
return SysTrayIcon(icon, hover_text, menu_options, on_quit=stop)
def openui_(sender):
webbrowser.open(conf.settings.UI_ADDRESS)
webbrowser.open(conf.settings.get_ui_address())
def replyToApplicationShouldTerminate_():
try:
@ -252,11 +252,11 @@ def main(lbry_name=None):
replyToApplicationShouldTerminate_()
if getattr(sys, 'frozen', False) and os.name == "nt":
icon = os.path.join(os.path.dirname(sys.executable), conf.settings.ICON_PATH, 'lbry16.ico')
icon = os.path.join(os.path.dirname(sys.executable), conf.settings['ICON_PATH'], 'lbry16.ico')
else:
icon = os.path.join(conf.settings.ICON_PATH, 'lbry16.ico')
icon = os.path.join(conf.settings['ICON_PATH'], 'lbry16.ico')
hover_text = conf.settings.APP_NAME
hover_text = conf.settings['APP_NAME']
menu_options = (('Open', icon, openui_),)
if not test_internet_connection():
@ -277,12 +277,11 @@ def main(lbry_name=None):
if __name__ == '__main__':
utils.setup_certs_for_windows()
conf.initialize_settings()
conf.update_settings_from_file()
log_file = conf.settings.get_log_filename()
log_support.configure_logging(log_file, console=True)
lbry_daemon = JSONRPCProxy.from_url(conf.settings.API_CONNECTION_STRING)
lbry_daemon = JSONRPCProxy.from_url(conf.settings.get_api_connection_string())
try:
daemon_running = lbry_daemon.is_running()

View file

@ -69,8 +69,8 @@ class MyBlobManager(BlobManager.BlobManager):
def getWallet():
config = {'auto_connect': True}
if conf.settings.lbryum_wallet_dir:
config['lbryum_path'] = conf.settings.lbryum_wallet_dir
if conf.settings['lbryum_wallet_dir']:
config['lbryum_path'] = conf.settings['lbryum_wallet_dir']
db_dir = tempfile.mkdtemp()
return Wallet.LBRYumWallet(db_dir, config)

View file

@ -66,7 +66,7 @@ def main(args=None):
lbryid=utils.generate_id(),
blob_dir=blob_dir,
dht_node_port=4444,
known_dht_nodes=conf.settings.known_dht_nodes,
known_dht_nodes=conf.settings['known_dht_nodes'],
peer_port=3333,
use_upnp=False,
wallet=wallet

View file

@ -42,6 +42,7 @@ log = logging.getLogger('main')
def main(args=None):
conf.initialize_settings()
parser = argparse.ArgumentParser()
parser.add_argument('destination', type=conf.server_port, nargs='+')
parser.add_argument('--names', nargs='*')
@ -69,7 +70,7 @@ def main(args=None):
lbryid=utils.generate_id(),
blob_dir=blob_dir,
dht_node_port=4444,
known_dht_nodes=conf.settings.known_dht_nodes,
known_dht_nodes=conf.settings['known_dht_nodes'],
peer_port=3333,
use_upnp=False,
wallet=wallet,

View file

@ -2,8 +2,3 @@
# and so we need to ensure that it is also
# setup for the tests
from lbrynet.core import log_support
from lbrynet import conf
# TODO: stop doing this, would be better to mock out the settings
conf.initialize_settings()

View file

@ -117,7 +117,7 @@ class LbryUploader(object):
db_dir = "server"
os.mkdir(db_dir)
self.session = Session(
conf.settings.data_rate, db_dir=db_dir, lbryid="abcd",
conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker,
@ -222,11 +222,11 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event,
os.mkdir(db_dir)
os.mkdir(blob_dir)
session = Session(conf.settings.data_rate, db_dir=db_dir, lbryid="abcd" + str(n),
session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd" + str(n),
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=None, peer_port=peer_port,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=conf.settings.is_generous_host)
blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1])
stream_info_manager = TempEncryptedFileMetadataManager()
@ -329,10 +329,11 @@ def start_live_server(sd_hash_queue, kill_event, dead_event):
db_dir = "server"
os.mkdir(db_dir)
session = Session(conf.settings.data_rate, db_dir=db_dir, lbryid="abcd",
session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=conf.settings.is_generous_host)
blob_tracker_class=DummyBlobAvailabilityTracker,
is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1])
stream_info_manager = DBLiveStreamMetadataManager(session.db_dir, hash_announcer)
logging.debug("Created the session")
@ -457,11 +458,12 @@ def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_genero
os.mkdir(db_dir)
os.mkdir(blob_dir)
session = Session(conf.settings.data_rate, db_dir=db_dir, lbryid="efgh",
session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="efgh",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=peer_port,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=conf.settings.is_generous_host)
blob_tracker_class=DummyBlobAvailabilityTracker,
is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1])
if slow is True:
session.rate_limiter.set_ul_limit(2 ** 11)
@ -533,6 +535,7 @@ def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_genero
class TestTransfer(TestCase):
def setUp(self):
mocks.mock_conf_settings(self)
self.server_processes = []
self.session = None
self.stream_info_manager = None
@ -635,7 +638,7 @@ class TestTransfer(TestCase):
os.mkdir(blob_dir)
self.session = Session(
conf.settings.data_rate, db_dir=db_dir, lbryid="abcd",
conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
@ -723,7 +726,7 @@ class TestTransfer(TestCase):
os.mkdir(db_dir)
self.session = Session(
conf.settings.data_rate, db_dir=db_dir, lbryid="abcd",
conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=None,
peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, dht_node_class=Node
@ -825,12 +828,12 @@ class TestTransfer(TestCase):
os.mkdir(blob_dir)
self.session = Session(
conf.settings.data_rate, db_dir=db_dir, lbryid="abcd",
conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker,
is_generous=conf.settings.is_generous_host)
is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1])
d1 = self.wait_for_hash_from_queue(blob_hash_queue_1)
d2 = self.wait_for_hash_from_queue(blob_hash_queue_2)
@ -903,11 +906,12 @@ class TestTransfer(TestCase):
os.mkdir(db_dir)
os.mkdir(blob_dir)
self.session = Session(conf.settings.data_rate, db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553, use_upnp=False,
rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=conf.settings.is_generous_host)
self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir,
lbryid="abcd", peer_finder=peer_finder,
hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker,
is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1])
self.stream_info_manager = DBEncryptedFileMetadataManager(self.session.db_dir)
self.lbry_file_manager = EncryptedFileManager(self.session, self.stream_info_manager, sd_identifier)
@ -1023,12 +1027,12 @@ class TestTransfer(TestCase):
os.mkdir(db_dir)
os.mkdir(blob_dir)
self.session = Session(conf.settings.data_rate, db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=None, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker,
is_generous=conf.settings.is_generous_host)
self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir,
lbryid="abcd", peer_finder=peer_finder,
hash_announcer=hash_announcer, blob_dir=None,
peer_port=5553, use_upnp=False, rate_limiter=rate_limiter,
wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker,
is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1])
self.stream_info_manager = TempEncryptedFileMetadataManager()

View file

@ -23,6 +23,7 @@ from tests import mocks
class TestReflector(unittest.TestCase):
def setUp(self):
mocks.mock_conf_settings(self)
self.session = None
self.stream_info_manager = None
self.lbry_file_manager = None
@ -83,7 +84,7 @@ class TestReflector(unittest.TestCase):
os.mkdir(db_dir)
self.session = Session.Session(
conf.settings.data_rate,
conf.settings['data_rate'],
db_dir=db_dir,
lbryid="abcd",
peer_finder=peer_finder,

View file

@ -32,6 +32,7 @@ DummyBlobAvailabilityTracker = mocks.BlobAvailabilityTracker
class TestStreamify(TestCase):
def setUp(self):
mocks.mock_conf_settings(self)
self.session = None
self.stream_info_manager = None
self.lbry_file_manager = None
@ -70,7 +71,7 @@ class TestStreamify(TestCase):
os.mkdir(blob_dir)
self.session = Session(
conf.settings.data_rate, db_dir=db_dir, lbryid="abcd",
conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
@ -126,7 +127,7 @@ class TestStreamify(TestCase):
os.mkdir(blob_dir)
self.session = Session(
conf.settings.data_rate, db_dir=db_dir, lbryid="abcd",
conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,

View file

@ -6,7 +6,7 @@ from twisted.internet import defer
from lbrynet.core import PTCWallet
from lbrynet.core import BlobAvailability
from lbrynet import conf
KB = 2**10
@ -205,3 +205,14 @@ create_stream_sd_file = {
'suggested_file_name': '746573745f66696c65',
'stream_hash': '6d27fbe10c86d81aacfb897c7a426d0a2214f5a299455a6d315c0f998c4b3545c2dc60906122d94653c23b1898229e3f'
}
def mock_conf_settings(obj, settings={}):
original_settings = conf.settings
conf.settings = conf.Config(conf.FIXED_SETTINGS, conf.ADJUSTABLE_SETTINGS, environment=False)
conf.settings.update(settings)
def _reset_settings():
conf.settings = original_settings
obj.addCleanup(_reset_settings)

View file

@ -9,11 +9,12 @@ from lbrynet import analytics
from lbrynet.core import Peer
from lbrynet.core.server import BlobRequestHandler
from lbrynet.core.PaymentRateManager import NegotiatedPaymentRateManager, BasePaymentRateManager
from tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker
from tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker, mock_conf_settings
class TestBlobRequestHandlerQueries(unittest.TestCase):
def setUp(self):
mock_conf_settings(self)
self.blob_manager = mock.Mock()
self.payment_rate_manager = NegotiatedPaymentRateManager(
BasePaymentRateManager(0.001), DummyBlobAvailabilityTracker())

View file

@ -5,7 +5,7 @@ import mock
from lbrynet.core.PaymentRateManager import NegotiatedPaymentRateManager, BasePaymentRateManager
from lbrynet.core.Strategy import BasicAvailabilityWeightedStrategy
from lbrynet.core.Offer import Offer
from tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker
from tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker, mock_conf_settings
MAX_NEGOTIATION_TURNS = 10
random.seed(12345)
@ -57,6 +57,9 @@ def calculate_negotation_turns(client_base, host_base, host_is_generous=True, cl
class AvailabilityWeightedStrategyTests(unittest.TestCase):
def setUp(self):
mock_conf_settings(self)
def test_first_offer_is_zero_and_second_is_not_if_offer_not_accepted(self):
strategy = BasicAvailabilityWeightedStrategy(DummyBlobAvailabilityTracker())
peer = "1.1.1.1"

View file

@ -26,6 +26,7 @@ def iv_generator():
class CreateEncryptedFileTest(unittest.TestCase):
timeout = 5
def setUp(self):
mocks.mock_conf_settings(self)
self.tmp_dir = tempfile.mkdtemp()
def tearDown(self):

View file

@ -1,8 +1,7 @@
import mock
import requests
from twisted.trial import unittest
from lbrynet import conf
from tests.mocks import mock_conf_settings
from lbrynet.lbrynet_daemon.auth import server
@ -11,12 +10,7 @@ class AuthJSONRPCServerTest(unittest.TestCase):
# and add useful general utilities like this
# onto it.
def setUp(self):
self.server = server.AuthJSONRPCServer(False)
def _set_setting(self, attr, value):
original = getattr(conf.settings, attr)
setattr(conf.settings, attr, value)
self.addCleanup(lambda: setattr(conf.settings, attr, original))
self.server = server.AuthJSONRPCServer(use_authentication=False)
def test_get_server_port(self):
self.assertSequenceEqual(
@ -25,26 +19,25 @@ class AuthJSONRPCServerTest(unittest.TestCase):
('example.com', 1234), self.server.get_server_port('http://example.com:1234'))
def test_foreign_origin_is_rejected(self):
mock_conf_settings(self) # have to call this to generate Config mock
request = mock.Mock(['getHeader'])
request.getHeader = mock.Mock(return_value='http://example.com')
self.assertFalse(self.server._check_header_source(request, 'Origin'))
def test_wrong_port_is_rejected(self):
self._set_setting('api_port', 1234)
mock_conf_settings(self, {'api_port': 1234})
request = mock.Mock(['getHeader'])
request.getHeader = mock.Mock(return_value='http://localhost:9999')
self.assertFalse(self.server._check_header_source(request, 'Origin'))
def test_matching_origin_is_allowed(self):
self._set_setting('API_INTERFACE', 'example.com')
self._set_setting('api_port', 1234)
mock_conf_settings(self, {'api_host': 'example.com', 'api_port': 1234})
request = mock.Mock(['getHeader'])
request.getHeader = mock.Mock(return_value='http://example.com:1234')
self.assertTrue(self.server._check_header_source(request, 'Origin'))
def test_any_origin_is_allowed(self):
self._set_setting('API_INTERFACE', '0.0.0.0')
self._set_setting('api_port', 80)
mock_conf_settings(self, {'api_host': '0.0.0.0', 'api_port': 80})
request = mock.Mock(['getHeader'])
request.getHeader = mock.Mock(return_value='http://example.com')
self.assertTrue(self.server._check_header_source(request, 'Origin'))
@ -53,8 +46,7 @@ class AuthJSONRPCServerTest(unittest.TestCase):
self.assertTrue(self.server._check_header_source(request, 'Origin'))
def test_matching_referer_is_allowed(self):
self._set_setting('API_INTERFACE', 'the_api')
self._set_setting('api_port', 1111)
mock_conf_settings(self, {'api_host': 'the_api', 'api_port': 1111})
request = mock.Mock(['getHeader'])
request.getHeader = mock.Mock(return_value='http://the_api:1111?settings')
self.assertTrue(self.server._check_header_source(request, 'Referer'))

View file

@ -9,6 +9,7 @@ from lbrynet.core import Session, PaymentRateManager
from lbrynet.lbrynet_daemon.Daemon import Daemon as LBRYDaemon
from lbrynet.lbrynet_daemon import ExchangeRateManager
from lbrynet import conf
from tests.mocks import mock_conf_settings
class MiscTests(unittest.TestCase):
@ -46,13 +47,13 @@ class MiscTests(unittest.TestCase):
def get_test_daemon(data_rate=None, generous=True, with_fee=False):
if data_rate is None:
data_rate = conf.settings.data_rate
data_rate = conf.ADJUSTABLE_SETTINGS['data_rate'][1]
rates = {
'BTCLBC': {'spot': 3.0, 'ts': util.DEFAULT_ISO_TIME + 1},
'USDBTC': {'spot': 2.0, 'ts': util.DEFAULT_ISO_TIME + 2}
}
daemon = LBRYDaemon(None, None)
daemon = LBRYDaemon(None, None, upload_logs_on_shutdown=False)
daemon.session = mock.Mock(spec=Session.Session)
daemon.exchange_rate_manager = ExchangeRateManager.DummyExchangeRateManager(rates)
base_prm = PaymentRateManager.BasePaymentRateManager(rate=data_rate)
@ -80,6 +81,7 @@ def get_test_daemon(data_rate=None, generous=True, with_fee=False):
class TestCostEst(unittest.TestCase):
def setUp(self):
mock_conf_settings(self)
util.resetTime(self)
def test_fee_and_generous_data(self):
@ -91,7 +93,7 @@ class TestCostEst(unittest.TestCase):
def test_fee_and_ungenerous_data(self):
size = 10000000
fake_fee_amount = 4.5
data_rate = conf.settings.data_rate
data_rate = conf.ADJUSTABLE_SETTINGS['data_rate'][1]
correct_result = size / 10**6 * data_rate + fake_fee_amount
daemon = get_test_daemon(generous=False, with_fee=True)
self.assertEquals(daemon.get_est_cost("test", size).result, correct_result)
@ -104,7 +106,7 @@ class TestCostEst(unittest.TestCase):
def test_ungenerous_data_and_no_fee(self):
size = 10000000
data_rate = conf.settings.data_rate
data_rate = conf.ADJUSTABLE_SETTINGS['data_rate'][1]
correct_result = size / 10**6 * data_rate
daemon = get_test_daemon(generous=False)
self.assertEquals(daemon.get_est_cost("test", size).result, correct_result)

View file

@ -12,25 +12,26 @@ class SettingsTest(unittest.TestCase):
def tearDown(self):
del os.environ['LBRY_TEST']
def test_envvar_is_read(self):
@staticmethod
def get_mock_config_instance():
env = conf.Env(test=(str, ''))
settings = conf.AdjustableSettings(env)
self.assertEqual('test_string', settings.test)
return conf.Config({}, {'test': (str, '')}, environment=env)
def test_setting_can_be_overriden(self):
env = conf.Env(test=(str, ''))
settings = conf.AdjustableSettings(env)
settings.test = 'my_override'
self.assertEqual('my_override', settings.test)
def test_envvar_is_read(self):
settings = self.get_mock_config_instance()
self.assertEqual('test_string', settings['test'])
def test_setting_can_be_overridden(self):
settings = self.get_mock_config_instance()
settings['test'] = 'my_override'
self.assertEqual('my_override', settings['test'])
def test_setting_can_be_updated(self):
env = conf.Env(test=(str, ''))
settings = conf.AdjustableSettings(env)
settings = self.get_mock_config_instance()
settings.update({'test': 'my_update'})
self.assertEqual('my_update', settings.test)
self.assertEqual('my_update', settings['test'])
def test_setting_is_in_dict(self):
env = conf.Env(test=(str, ''))
settings = conf.AdjustableSettings(env)
setting_dict = settings.get_dict()
settings = self.get_mock_config_instance()
setting_dict = settings.get_current_settings_dict()
self.assertEqual({'test': 'test_string'}, setting_dict)