Merge pull request #725 from lbryio/update-settings

remove unused settings
This commit is contained in:
Jack Robison 2017-06-21 23:06:54 -04:00 committed by GitHub
commit 732fdaa377
6 changed files with 80 additions and 112 deletions

View file

@ -9,11 +9,11 @@ at anytime.
## [Unreleased] ## [Unreleased]
### Added ### Added
* * Missing docstring for `blob_list`
* *
### Changed ### Changed
* * Change `max_key_fee` setting to be a dictionary with values for `currency` and `amount`
* *
### Fixed ### Fixed
@ -26,7 +26,7 @@ at anytime.
* *
### Removed ### Removed
* * Removed unused settings from conf.py and `settings_set`
* *
## [0.13.1] - 2017-06-15 ## [0.13.1] - 2017-06-15

View file

@ -137,7 +137,6 @@ FIXED_SETTINGS = {
'PROTOCOL_PREFIX': PROTOCOL_PREFIX, 'PROTOCOL_PREFIX': PROTOCOL_PREFIX,
'SLACK_WEBHOOK': ('nUE0pUZ6Yl9bo29epl5moTSwnl5wo20ip2IlqzywMKZiIQSFZR5' 'SLACK_WEBHOOK': ('nUE0pUZ6Yl9bo29epl5moTSwnl5wo20ip2IlqzywMKZiIQSFZR5'
'AHx4mY0VmF0WQZ1ESEP9kMHZlp1WzJwWOoKN3ImR1M2yUAaMyqGZ='), 'AHx4mY0VmF0WQZ1ESEP9kMHZlp1WzJwWOoKN3ImR1M2yUAaMyqGZ='),
'SOURCE_TYPES': ['lbry_sd_hash', 'url', 'btih'],
'WALLET_TYPES': [LBRYUM_WALLET, LBRYCRD_WALLET], 'WALLET_TYPES': [LBRYUM_WALLET, LBRYCRD_WALLET],
} }
@ -154,40 +153,21 @@ ADJUSTABLE_SETTINGS = {
# give an attacker access to your wallet and you could lose # give an attacker access to your wallet and you could lose
# all of your credits. # all of your credits.
'api_host': (str, 'localhost'), 'api_host': (str, 'localhost'),
'api_port': (int, 5279), 'api_port': (int, 5279),
'cache_time': (int, 150), 'cache_time': (int, 150),
'check_ui_requirements': (bool, True),
'data_dir': (str, default_data_dir), 'data_dir': (str, default_data_dir),
'data_rate': (float, .0001), # points/megabyte 'data_rate': (float, .0001), # points/megabyte
'default_ui_branch': (str, 'master'),
'delete_blobs_on_remove': (bool, True), 'delete_blobs_on_remove': (bool, True),
'dht_node_port': (int, 4444), 'dht_node_port': (int, 4444),
'download_directory': (str, default_download_directory), 'download_directory': (str, default_download_directory),
'download_timeout': (int, 180), 'download_timeout': (int, 180),
'host_ui': (bool, True),
'is_generous_host': (bool, True), 'is_generous_host': (bool, True),
'known_dht_nodes': (list, DEFAULT_DHT_NODES, server_port), 'known_dht_nodes': (list, DEFAULT_DHT_NODES, server_port),
# TODO: this should not be configured; move it elsewhere
'last_version': (dict, {'lbrynet': '0.0.1', 'lbryum': '0.0.1'}),
'lbryum_wallet_dir': (str, default_lbryum_dir), 'lbryum_wallet_dir': (str, default_lbryum_dir),
'local_ui_path': (str, ''),
'max_connections_per_stream': (int, 5), 'max_connections_per_stream': (int, 5),
'max_download': (float, 0.0),
# TODO: this field is more complicated than it needs to be because
# it goes through a Fee validator when loaded by the exchange rate
# manager. Look into refactoring the exchange rate conversion to
# take in a simpler form.
#
# TODO: writing json on the cmd line is a pain, come up with a nicer # TODO: writing json on the cmd line is a pain, come up with a nicer
# parser for this data structure. (maybe MAX_KEY_FEE': USD:25 # parser for this data structure. maybe 'USD:25'
'max_key_fee': (json.loads, {'currency': 'USD', 'amount': 25.0, 'address': ''}), 'max_key_fee': (json.loads, {'currency': 'USD', 'amount': 25.0}),
'max_search_results': (int, 25),
'max_upload': (float, 0.0),
'min_info_rate': (float, .02), # points/1000 infos 'min_info_rate': (float, .02), # points/1000 infos
'min_valuable_hash_rate': (float, .05), # points/1000 infos 'min_valuable_hash_rate': (float, .05), # points/1000 infos
'min_valuable_info_rate': (float, .05), # points/1000 infos 'min_valuable_info_rate': (float, .05), # points/1000 infos
@ -196,15 +176,10 @@ ADJUSTABLE_SETTINGS = {
'reflector_port': (int, 5566), 'reflector_port': (int, 5566),
'reflect_uploads': (bool, True), 'reflect_uploads': (bool, True),
'reflector_servers': (list, [('reflector.lbry.io', 5566)], server_port), 'reflector_servers': (list, [('reflector.lbry.io', 5566)], server_port),
'run_on_startup': (bool, False),
'run_reflector_server': (bool, False), 'run_reflector_server': (bool, False),
'sd_download_timeout': (int, 3), 'sd_download_timeout': (int, 3),
'share_usage_data': (bool, True), # whether to share usage stats and diagnostic info with LBRY 'share_usage_data': (bool, True), # whether to share usage stats and diagnostic info with LBRY
'peer_search_timeout': (int, 3), 'peer_search_timeout': (int, 3),
'search_servers': (list, ['lighthouse1.lbry.io:50005']),
'search_timeout': (float, 5.0),
'startup_scripts': (list, []),
'ui_branch': (str, 'master'),
'use_auth_http': (bool, False), 'use_auth_http': (bool, False),
'use_upnp': (bool, True), 'use_upnp': (bool, True),
'wallet': (str, LBRYUM_WALLET), 'wallet': (str, LBRYUM_WALLET),

View file

@ -22,6 +22,7 @@ from lbryschema.claim import ClaimDict
from lbryschema.error import DecodeError from lbryschema.error import DecodeError
from lbryschema.decode import smart_decode from lbryschema.decode import smart_decode
from lbrynet import conf
from lbrynet.core.sqlite_helpers import rerun_if_locked from lbrynet.core.sqlite_helpers import rerun_if_locked
from lbrynet.interfaces import IRequestCreator, IQueryHandlerFactory, IQueryHandler, IWallet from lbrynet.interfaces import IRequestCreator, IQueryHandlerFactory, IQueryHandler, IWallet
from lbrynet.core.client.ClientRequest import ClientRequest from lbrynet.core.client.ClientRequest import ClientRequest
@ -30,8 +31,6 @@ from lbrynet.core.Error import UnknownClaimID, UnknownURI
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
CLAIM_CACHE_TIME = 600
class ReservedPoints(object): class ReservedPoints(object):
def __init__(self, identifier, amount): def __init__(self, identifier, amount):
@ -84,7 +83,8 @@ class CachedClaim(object):
self.nout = nout self.nout = nout
def response_dict(self, check_expires=True): def response_dict(self, check_expires=True):
if check_expires and (time.time() - int(self.cache_timestamp)) > CLAIM_CACHE_TIME: if check_expires:
if (time.time() - int(self.cache_timestamp)) > conf.settings['cache_time']:
return return
claim = { claim = {
"height": self.height, "height": self.height,
@ -370,7 +370,7 @@ class SqliteStorage(MetaDataStorage):
if result: if result:
claim_id, certificate_id, last_modified = result[0] claim_id, certificate_id, last_modified = result[0]
last_modified = int(last_modified) last_modified = int(last_modified)
if check_expire and time.time() - last_modified > CLAIM_CACHE_TIME: if check_expire and time.time() - last_modified > conf.settings['cache_time']:
defer.returnValue(None) defer.returnValue(None)
claim = yield self.get_cached_claim(claim_id) claim = yield self.get_cached_claim(claim_id)
if claim: if claim:

View file

@ -17,10 +17,8 @@ from twisted.python.failure import Failure
from lbryschema.claim import ClaimDict from lbryschema.claim import ClaimDict
from lbryschema.uri import parse_lbry_uri from lbryschema.uri import parse_lbry_uri
from lbryschema.error import URIParseError from lbryschema.error import URIParseError
from lbryschema.fee import Fee
# TODO: importing this when internet is disabled raises a socket.gaierror # TODO: importing this when internet is disabled raises a socket.gaierror
from lbryum.version import LBRYUM_VERSION
from lbrynet.core.system_info import get_lbrynet_version from lbrynet.core.system_info import get_lbrynet_version
from lbrynet import conf, analytics from lbrynet import conf, analytics
from lbrynet.conf import LBRYCRD_WALLET, LBRYUM_WALLET, PTC_WALLET from lbrynet.conf import LBRYCRD_WALLET, LBRYUM_WALLET, PTC_WALLET
@ -170,12 +168,7 @@ class Daemon(AuthJSONRPCServer):
AuthJSONRPCServer.__init__(self, conf.settings['use_auth_http']) AuthJSONRPCServer.__init__(self, conf.settings['use_auth_http'])
self.allowed_during_startup = [ self.allowed_during_startup = [
'stop', 'status', 'version', 'stop', 'status', 'version',
# delete these once they are fully removed:
'is_running', 'is_first_run', 'get_time_behind_blockchain', 'daemon_status',
'get_start_notice',
] ]
conf.settings.set('last_version',
{'lbrynet': get_lbrynet_version(), 'lbryum': LBRYUM_VERSION})
self.db_dir = conf.settings['data_dir'] self.db_dir = conf.settings['data_dir']
self.download_directory = conf.settings['download_directory'] self.download_directory = conf.settings['download_directory']
if conf.settings['BLOBFILES_DIR'] == "blobfiles": if conf.settings['BLOBFILES_DIR'] == "blobfiles":
@ -183,14 +176,9 @@ class Daemon(AuthJSONRPCServer):
else: else:
log.info("Using non-default blobfiles directory: %s", conf.settings['BLOBFILES_DIR']) log.info("Using non-default blobfiles directory: %s", conf.settings['BLOBFILES_DIR'])
self.blobfile_dir = conf.settings['BLOBFILES_DIR'] self.blobfile_dir = conf.settings['BLOBFILES_DIR']
self.run_on_startup = conf.settings['run_on_startup']
self.data_rate = conf.settings['data_rate'] self.data_rate = conf.settings['data_rate']
self.max_key_fee = conf.settings['max_key_fee'] self.max_key_fee = conf.settings['max_key_fee']
self.max_upload = conf.settings['max_upload']
self.max_download = conf.settings['max_download']
self.search_timeout = conf.settings['search_timeout']
self.download_timeout = conf.settings['download_timeout'] self.download_timeout = conf.settings['download_timeout']
self.max_search_results = conf.settings['max_search_results']
self.run_reflector_server = conf.settings['run_reflector_server'] self.run_reflector_server = conf.settings['run_reflector_server']
self.wallet_type = conf.settings['wallet'] self.wallet_type = conf.settings['wallet']
self.delete_blobs_on_remove = conf.settings['delete_blobs_on_remove'] self.delete_blobs_on_remove = conf.settings['delete_blobs_on_remove']
@ -198,14 +186,11 @@ class Daemon(AuthJSONRPCServer):
self.reflector_port = conf.settings['reflector_port'] self.reflector_port = conf.settings['reflector_port']
self.dht_node_port = conf.settings['dht_node_port'] self.dht_node_port = conf.settings['dht_node_port']
self.use_upnp = conf.settings['use_upnp'] self.use_upnp = conf.settings['use_upnp']
self.cache_time = conf.settings['cache_time']
self.startup_status = STARTUP_STAGES[0] self.startup_status = STARTUP_STAGES[0]
self.connected_to_internet = True self.connected_to_internet = True
self.connection_status_code = None self.connection_status_code = None
self.platform = None self.platform = None
self.first_run = None
self.log_file = conf.settings.get_log_filename()
self.current_db_revision = 3 self.current_db_revision = 3
self.db_revision_file = conf.settings.get_db_revision_filename() self.db_revision_file = conf.settings.get_db_revision_filename()
self.session = None self.session = None
@ -235,7 +220,7 @@ class Daemon(AuthJSONRPCServer):
self.lbry_file_manager = None self.lbry_file_manager = None
@defer.inlineCallbacks @defer.inlineCallbacks
def setup(self, launch_ui): def setup(self):
reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown)
self._modify_loggly_formatter() self._modify_loggly_formatter()
@ -430,32 +415,28 @@ class Daemon(AuthJSONRPCServer):
def _update_settings(self, settings): def _update_settings(self, settings):
setting_types = { setting_types = {
'run_on_startup': bool,
'data_rate': float,
'max_key_fee': float,
'download_directory': str, 'download_directory': str,
'max_upload': float, 'data_rate': float,
'max_download': float,
'download_timeout': int, 'download_timeout': int,
'search_timeout': float, 'max_key_fee': dict,
'use_upnp': bool,
'run_reflector_server': bool,
'cache_time': int, 'cache_time': int,
'reflect_uploads': bool,
'share_usage_data': bool, 'share_usage_data': bool,
'peer_search_timeout': int,
'sd_download_timeout': int,
} }
def can_update_key(settings, key, setting_type):
return (
isinstance(settings[key], setting_type) or
(
key == "max_key_fee" and
isinstance(Fee(settings[key]).amount, setting_type)
)
)
for key, setting_type in setting_types.iteritems(): for key, setting_type in setting_types.iteritems():
if key in settings: if key in settings:
if can_update_key(settings, key, setting_type): if isinstance(settings[key], setting_type):
conf.settings.update({key: settings[key]}, conf.settings.update({key: settings[key]},
data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED))
elif setting_type is dict and isinstance(settings[key], (unicode, str)):
decoded = json.loads(str(settings[key]))
conf.settings.update({key: decoded},
data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED))
else: else:
try: try:
converted = setting_type(settings[key]) converted = setting_type(settings[key])
@ -463,18 +444,14 @@ class Daemon(AuthJSONRPCServer):
data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED))
except Exception as err: except Exception as err:
log.warning(err.message) log.warning(err.message)
log.warning("error converting setting '%s' to type %s", key, setting_type) log.warning("error converting setting '%s' to type %s from type %s", key,
setting_type, str(type(settings[key])))
conf.settings.save_conf_file_settings() conf.settings.save_conf_file_settings()
self.run_on_startup = conf.settings['run_on_startup']
self.data_rate = conf.settings['data_rate'] self.data_rate = conf.settings['data_rate']
self.max_key_fee = conf.settings['max_key_fee'] self.max_key_fee = conf.settings['max_key_fee']
self.download_directory = conf.settings['download_directory'] self.download_directory = conf.settings['download_directory']
self.max_upload = conf.settings['max_upload']
self.max_download = conf.settings['max_download']
self.download_timeout = conf.settings['download_timeout'] self.download_timeout = conf.settings['download_timeout']
self.search_timeout = conf.settings['search_timeout']
self.cache_time = conf.settings['cache_time']
return defer.succeed(True) return defer.succeed(True)
@ -1142,16 +1119,41 @@ class Daemon(AuthJSONRPCServer):
""" """
Set daemon settings Set daemon settings
Args: Usage:
'run_on_startup': (bool) currently not supported settings_set [<download_directory> | --download_directory=<download_directory>]
'data_rate': (float) data rate, [<data_rate> | --data_rate=<data_rate>]
'max_key_fee': (float) maximum key fee, [<download_timeout> | --download_timeout=<download_timeout>]
'download_directory': (str) path of where files are downloaded, [<max_key_fee> | --max_key_fee=<max_key_fee>]
'max_upload': (float), currently not supported [<use_upnp> | --use_upnp=<use_upnp>]
'max_download': (float), currently not supported [<run_reflector_server> | --run_reflector_server=<run_reflector_server>]
'download_timeout': (int) download timeout in seconds [<cache_time> | --cache_time=<cache_time>]
'search_timeout': (float) search timeout in seconds [<reflect_uploads> | --reflect_uploads=<reflect_uploads>]
'cache_time': (int) cache timeout in seconds [<share_usage_data> | --share_usage_data=<share_usage_data>]
[<peer_search_timeout> | --peer_search_timeout=<peer_search_timeout>]
[<sd_download_timeout> | --sd_download_timeout=<sd_download_timeout>]
Options:
<download_directory>, --download_directory=<download_directory> : (str)
<data_rate>, --data_rate=<data_rate> : (float), 0.0001
<download_timeout>, --download_timeout=<download_timeout> : (int), 180
<max_key_fee>, --max_key_fee=<max_key_fee> : (dict) maximum key fee for downloads,
in the format: {
"currency": <currency_symbol>,
"amount": <amount>
}. In the CLI, it must be an escaped
JSON string
Supported currency symbols:
LBC
BTC
USD
<use_upnp>, --use_upnp=<use_upnp> : (bool), True
<run_reflector_server>, --run_reflector_server=<run_reflector_server> : (bool), False
<cache_time>, --cache_time=<cache_time> : (int), 150
<reflect_uploads>, --reflect_uploads=<reflect_uploads> : (bool), True
<share_usage_data>, --share_usage_data=<share_usage_data> : (bool), True
<peer_search_timeout>, --peer_search_timeout=<peer_search_timeout> : (int), 3
<sd_download_timeout>, --sd_download_timeout=<sd_download_timeout> : (int), 3
Returns: Returns:
(dict) Updated dictionary of daemon settings (dict) Updated dictionary of daemon settings
""" """
@ -2402,19 +2404,26 @@ class Daemon(AuthJSONRPCServer):
defer.returnValue("Reflect success") defer.returnValue("Reflect success")
@defer.inlineCallbacks @defer.inlineCallbacks
@AuthJSONRPCServer.flags(needed="-n", finished="-f")
def jsonrpc_blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None, def jsonrpc_blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None,
finished=None, page_size=None, page=None): finished=None, page_size=None, page=None):
""" """
Returns blob hashes. If not given filters, returns all blobs known by the blob manager Returns blob hashes. If not given filters, returns all blobs known by the blob manager
Args: Usage:
'uri' (optional): (str) filter by blobs in stream for winning claim blob_list [-n] [-f] [<uri> | --uri=<uri>] [<stream_hash> | --stream_hash=<stream_hash>]
'stream_hash' (optional): (str) filter by blobs in given stream hash [<sd_hash> | --sd_hash=<sd_hash>] [<page_size> | --page_size=<page_size>]
'sd_hash' (optional): (str) filter by blobs in given sd hash [<page> | --page=<page>]
'needed' (optional): (bool) only return needed blobs
'finished' (optional): (bool) only return finished blobs Options:
'page_size' (optional): (int) limit number of results returned -n : only return needed blobs
'page' (optional): (int) filter to page x of [page_size] results -f : only return finished blobs
<uri>, --uri=<uri> : filter blobs by stream in a uri
<stream_hash>, --stream_hash=<stream_hash> : filter blobs by stream hash
<sd_hash>, --sd_hash=<sd_hash> : filter blobs by sd hash
<page_size>, --page_size=<page_size> : results page size
<page>, --page=<page> : page of results to return
Returns: Returns:
(list) List of blob hashes (list) List of blob hashes
""" """

View file

@ -40,17 +40,6 @@ def start():
type=str, type=str,
default=conf.settings['wallet'] default=conf.settings['wallet']
) )
parser.add_argument(
"--ui", help="path to custom UI folder", default=None
)
parser.add_argument(
"--branch",
help='Branch of lbry-web-ui repo to use, defaults to {}'.format(conf.settings['ui_branch']),
default=conf.settings['ui_branch']
)
parser.add_argument(
'--launch-ui', dest='launchui', action="store_true"
)
parser.add_argument( parser.add_argument(
"--http-auth", dest="useauth", action="store_true", default=conf.settings['use_auth_http'] "--http-auth", dest="useauth", action="store_true", default=conf.settings['use_auth_http']
) )
@ -93,7 +82,7 @@ def start():
if test_internet_connection(): if test_internet_connection():
analytics_manager = analytics.Manager.new_instance() analytics_manager = analytics.Manager.new_instance()
start_server_and_listen(args.launchui, args.useauth, analytics_manager) start_server_and_listen(args.useauth, analytics_manager)
reactor.run() reactor.run()
else: else:
log.info("Not connected to internet, unable to start") log.info("Not connected to internet, unable to start")
@ -101,28 +90,23 @@ def start():
def update_settings_from_args(args): def update_settings_from_args(args):
cli_settings = {} cli_settings = {}
if args.ui:
cli_settings['local_ui_path'] = args.ui
if args.branch:
cli_settings['ui_branch'] = args.branch
cli_settings['use_auth_http'] = args.useauth cli_settings['use_auth_http'] = args.useauth
cli_settings['wallet'] = args.wallet cli_settings['wallet'] = args.wallet
conf.settings.update(cli_settings, data_types=(conf.TYPE_CLI,)) conf.settings.update(cli_settings, data_types=(conf.TYPE_CLI,))
@defer.inlineCallbacks @defer.inlineCallbacks
def start_server_and_listen(launchui, use_auth, analytics_manager, max_tries=5): def start_server_and_listen(use_auth, analytics_manager, max_tries=5):
"""The primary entry point for launching the daemon. """The primary entry point for launching the daemon.
Args: Args:
launchui: set to true to open a browser window
use_auth: set to true to enable http authentication use_auth: set to true to enable http authentication
analytics_manager: to send analytics analytics_manager: to send analytics
""" """
analytics_manager.send_server_startup() analytics_manager.send_server_startup()
daemon_server = DaemonServer(analytics_manager) daemon_server = DaemonServer(analytics_manager)
try: try:
yield daemon_server.start(use_auth, launchui) yield daemon_server.start(use_auth)
analytics_manager.send_server_startup_success() analytics_manager.send_server_startup_success()
except Exception as e: except Exception as e:
log.exception('Failed to startup') log.exception('Failed to startup')

View file

@ -44,9 +44,9 @@ class DaemonServer(object):
return defer.succeed(True) return defer.succeed(True)
@defer.inlineCallbacks @defer.inlineCallbacks
def start(self, use_auth, launch_ui=False): def start(self, use_auth):
yield self._setup_server(use_auth) yield self._setup_server(use_auth)
yield self._api.setup(launch_ui) yield self._api.setup()
@defer.inlineCallbacks @defer.inlineCallbacks
def stop(self): def stop(self):