diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 98db36c8d..3a2d0e4cd 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.13.1 +current_version = 0.14.1rc1 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)((?P[a-z]+)(?P\d+))? diff --git a/.travis.yml b/.travis.yml index eed1fd928..93e361d89 100644 --- a/.travis.yml +++ b/.travis.yml @@ -41,4 +41,5 @@ script: - pylint lbrynet - PYTHONPATH=. trial tests - python -m unittest discover tests/integration + - rvm install ruby-2.3.1 - rvm use 2.3.1 && gem install danger --version '~> 4.0' && danger diff --git a/CHANGELOG.md b/CHANGELOG.md index 6435504c1..672433bfc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,25 +9,29 @@ at anytime. ## [Unreleased] ### Added - * + * Missing docstring for `blob_list` * ### Changed + * Change `max_key_fee` setting to be a dictionary with values for `currency` and `amount` * Linux default downloads folder changed from `~/Downloads` to `XDG_DOWNLOAD_DIR` * Linux folders moved from the home directory to `~/.local/share/lbry` * Windows folders moved from `%APPDATA%/Roaming` to `%APPDATA%/Local/lbry` + * ### Fixed * Fixed some log messages throwing exceptions * Fix shutdown of the blob tracker by Session + * Fixed claim_new_support docstrings * Fix default directories to comply to XDG + * ### Deprecated * * ### Removed - * + * Removed unused settings from conf.py and `settings_set` * ## [0.13.1] - 2017-06-15 diff --git a/lbrynet/__init__.py b/lbrynet/__init__.py index 57c79dbf5..39c5f0f88 100644 --- a/lbrynet/__init__.py +++ b/lbrynet/__init__.py @@ -1,6 +1,6 @@ import logging -__version__ = "0.13.1" +__version__ = "0.14.1rc1" version = tuple(__version__.split('.')) logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/lbrynet/conf.py b/lbrynet/conf.py index 2d422ae00..faf7585a7 100644 --- a/lbrynet/conf.py +++ b/lbrynet/conf.py @@ -161,7 +161,6 @@ FIXED_SETTINGS = { 'PROTOCOL_PREFIX': PROTOCOL_PREFIX, 'SLACK_WEBHOOK': ('nUE0pUZ6Yl9bo29epl5moTSwnl5wo20ip2IlqzywMKZiIQSFZR5' 'AHx4mY0VmF0WQZ1ESEP9kMHZlp1WzJwWOoKN3ImR1M2yUAaMyqGZ='), - 'SOURCE_TYPES': ['lbry_sd_hash', 'url', 'btih'], 'WALLET_TYPES': [LBRYUM_WALLET, LBRYCRD_WALLET], } @@ -178,40 +177,21 @@ ADJUSTABLE_SETTINGS = { # give an attacker access to your wallet and you could lose # all of your credits. 'api_host': (str, 'localhost'), - 'api_port': (int, 5279), 'cache_time': (int, 150), - 'check_ui_requirements': (bool, True), 'data_dir': (str, default_data_dir), 'data_rate': (float, .0001), # points/megabyte - 'default_ui_branch': (str, 'master'), 'delete_blobs_on_remove': (bool, True), 'dht_node_port': (int, 4444), 'download_directory': (str, default_download_dir), 'download_timeout': (int, 180), - 'host_ui': (bool, True), 'is_generous_host': (bool, True), 'known_dht_nodes': (list, DEFAULT_DHT_NODES, server_port), - - # TODO: this should not be configured; move it elsewhere - 'last_version': (dict, {'lbrynet': '0.0.1', 'lbryum': '0.0.1'}), - 'lbryum_wallet_dir': (str, default_lbryum_dir), - 'local_ui_path': (str, ''), 'max_connections_per_stream': (int, 5), - 'max_download': (float, 0.0), - - # TODO: this field is more complicated than it needs to be because - # it goes through a Fee validator when loaded by the exchange rate - # manager. Look into refactoring the exchange rate conversion to - # take in a simpler form. - # # TODO: writing json on the cmd line is a pain, come up with a nicer - # parser for this data structure. (maybe MAX_KEY_FEE': USD:25 - 'max_key_fee': (json.loads, {'currency': 'USD', 'amount': 25.0, 'address': ''}), - - 'max_search_results': (int, 25), - 'max_upload': (float, 0.0), + # parser for this data structure. maybe 'USD:25' + 'max_key_fee': (json.loads, {'currency': 'USD', 'amount': 25.0}), 'min_info_rate': (float, .02), # points/1000 infos 'min_valuable_hash_rate': (float, .05), # points/1000 infos 'min_valuable_info_rate': (float, .05), # points/1000 infos @@ -220,15 +200,10 @@ ADJUSTABLE_SETTINGS = { 'reflector_port': (int, 5566), 'reflect_uploads': (bool, True), 'reflector_servers': (list, [('reflector.lbry.io', 5566)], server_port), - 'run_on_startup': (bool, False), 'run_reflector_server': (bool, False), 'sd_download_timeout': (int, 3), 'share_usage_data': (bool, True), # whether to share usage stats and diagnostic info with LBRY 'peer_search_timeout': (int, 3), - 'search_servers': (list, ['lighthouse1.lbry.io:50005']), - 'search_timeout': (float, 5.0), - 'startup_scripts': (list, []), - 'ui_branch': (str, 'master'), 'use_auth_http': (bool, False), 'use_upnp': (bool, True), 'wallet': (str, LBRYUM_WALLET), diff --git a/lbrynet/core/Wallet.py b/lbrynet/core/Wallet.py index 90ba050c2..23b03d1e4 100644 --- a/lbrynet/core/Wallet.py +++ b/lbrynet/core/Wallet.py @@ -22,6 +22,7 @@ from lbryschema.claim import ClaimDict from lbryschema.error import DecodeError from lbryschema.decode import smart_decode +from lbrynet import conf from lbrynet.core.sqlite_helpers import rerun_if_locked from lbrynet.interfaces import IRequestCreator, IQueryHandlerFactory, IQueryHandler, IWallet from lbrynet.core.client.ClientRequest import ClientRequest @@ -30,8 +31,6 @@ from lbrynet.core.Error import UnknownClaimID, UnknownURI log = logging.getLogger(__name__) -CLAIM_CACHE_TIME = 600 - class ReservedPoints(object): def __init__(self, identifier, amount): @@ -84,8 +83,9 @@ class CachedClaim(object): self.nout = nout def response_dict(self, check_expires=True): - if check_expires and (time.time() - int(self.cache_timestamp)) > CLAIM_CACHE_TIME: - return + if check_expires: + if (time.time() - int(self.cache_timestamp)) > conf.settings['cache_time']: + return claim = { "height": self.height, "address": self.address, @@ -370,7 +370,7 @@ class SqliteStorage(MetaDataStorage): if result: claim_id, certificate_id, last_modified = result[0] last_modified = int(last_modified) - if check_expire and time.time() - last_modified > CLAIM_CACHE_TIME: + if check_expire and time.time() - last_modified > conf.settings['cache_time']: defer.returnValue(None) claim = yield self.get_cached_claim(claim_id) if claim: diff --git a/lbrynet/lbrynet_daemon/Daemon.py b/lbrynet/lbrynet_daemon/Daemon.py index ab684cec2..b79646cf9 100644 --- a/lbrynet/lbrynet_daemon/Daemon.py +++ b/lbrynet/lbrynet_daemon/Daemon.py @@ -17,10 +17,8 @@ from twisted.python.failure import Failure from lbryschema.claim import ClaimDict from lbryschema.uri import parse_lbry_uri from lbryschema.error import URIParseError -from lbryschema.fee import Fee # TODO: importing this when internet is disabled raises a socket.gaierror -from lbryum.version import LBRYUM_VERSION from lbrynet.core.system_info import get_lbrynet_version from lbrynet import conf, analytics from lbrynet.conf import LBRYCRD_WALLET, LBRYUM_WALLET, PTC_WALLET @@ -170,12 +168,7 @@ class Daemon(AuthJSONRPCServer): AuthJSONRPCServer.__init__(self, conf.settings['use_auth_http']) self.allowed_during_startup = [ 'stop', 'status', 'version', - # delete these once they are fully removed: - 'is_running', 'is_first_run', 'get_time_behind_blockchain', 'daemon_status', - 'get_start_notice', ] - conf.settings.set('last_version', - {'lbrynet': get_lbrynet_version(), 'lbryum': LBRYUM_VERSION}) self.db_dir = conf.settings['data_dir'] self.download_directory = conf.settings['download_directory'] if conf.settings['BLOBFILES_DIR'] == "blobfiles": @@ -183,14 +176,9 @@ class Daemon(AuthJSONRPCServer): else: log.info("Using non-default blobfiles directory: %s", conf.settings['BLOBFILES_DIR']) self.blobfile_dir = conf.settings['BLOBFILES_DIR'] - self.run_on_startup = conf.settings['run_on_startup'] self.data_rate = conf.settings['data_rate'] self.max_key_fee = conf.settings['max_key_fee'] - self.max_upload = conf.settings['max_upload'] - self.max_download = conf.settings['max_download'] - self.search_timeout = conf.settings['search_timeout'] self.download_timeout = conf.settings['download_timeout'] - self.max_search_results = conf.settings['max_search_results'] self.run_reflector_server = conf.settings['run_reflector_server'] self.wallet_type = conf.settings['wallet'] self.delete_blobs_on_remove = conf.settings['delete_blobs_on_remove'] @@ -198,14 +186,11 @@ class Daemon(AuthJSONRPCServer): self.reflector_port = conf.settings['reflector_port'] self.dht_node_port = conf.settings['dht_node_port'] self.use_upnp = conf.settings['use_upnp'] - self.cache_time = conf.settings['cache_time'] self.startup_status = STARTUP_STAGES[0] self.connected_to_internet = True self.connection_status_code = None self.platform = None - self.first_run = None - self.log_file = conf.settings.get_log_filename() self.current_db_revision = 3 self.db_revision_file = conf.settings.get_db_revision_filename() self.session = None @@ -235,7 +220,7 @@ class Daemon(AuthJSONRPCServer): self.lbry_file_manager = None @defer.inlineCallbacks - def setup(self, launch_ui): + def setup(self): reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) self._modify_loggly_formatter() @@ -430,32 +415,28 @@ class Daemon(AuthJSONRPCServer): def _update_settings(self, settings): setting_types = { - 'run_on_startup': bool, - 'data_rate': float, - 'max_key_fee': float, 'download_directory': str, - 'max_upload': float, - 'max_download': float, + 'data_rate': float, 'download_timeout': int, - 'search_timeout': float, + 'max_key_fee': dict, + 'use_upnp': bool, + 'run_reflector_server': bool, 'cache_time': int, + 'reflect_uploads': bool, 'share_usage_data': bool, + 'peer_search_timeout': int, + 'sd_download_timeout': int, } - def can_update_key(settings, key, setting_type): - return ( - isinstance(settings[key], setting_type) or - ( - key == "max_key_fee" and - isinstance(Fee(settings[key]).amount, setting_type) - ) - ) - for key, setting_type in setting_types.iteritems(): if key in settings: - if can_update_key(settings, key, setting_type): + if isinstance(settings[key], setting_type): conf.settings.update({key: settings[key]}, data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) + elif setting_type is dict and isinstance(settings[key], (unicode, str)): + decoded = json.loads(str(settings[key])) + conf.settings.update({key: decoded}, + data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) else: try: converted = setting_type(settings[key]) @@ -463,18 +444,14 @@ class Daemon(AuthJSONRPCServer): data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) except Exception as err: log.warning(err.message) - log.warning("error converting setting '%s' to type %s", key, setting_type) + log.warning("error converting setting '%s' to type %s from type %s", key, + setting_type, str(type(settings[key]))) conf.settings.save_conf_file_settings() - self.run_on_startup = conf.settings['run_on_startup'] self.data_rate = conf.settings['data_rate'] self.max_key_fee = conf.settings['max_key_fee'] self.download_directory = conf.settings['download_directory'] - self.max_upload = conf.settings['max_upload'] - self.max_download = conf.settings['max_download'] self.download_timeout = conf.settings['download_timeout'] - self.search_timeout = conf.settings['search_timeout'] - self.cache_time = conf.settings['cache_time'] return defer.succeed(True) @@ -1142,16 +1119,41 @@ class Daemon(AuthJSONRPCServer): """ Set daemon settings - Args: - 'run_on_startup': (bool) currently not supported - 'data_rate': (float) data rate, - 'max_key_fee': (float) maximum key fee, - 'download_directory': (str) path of where files are downloaded, - 'max_upload': (float), currently not supported - 'max_download': (float), currently not supported - 'download_timeout': (int) download timeout in seconds - 'search_timeout': (float) search timeout in seconds - 'cache_time': (int) cache timeout in seconds + Usage: + settings_set [ | --download_directory=] + [ | --data_rate=] + [ | --download_timeout=] + [ | --max_key_fee=] + [ | --use_upnp=] + [ | --run_reflector_server=] + [ | --cache_time=] + [ | --reflect_uploads=] + [ | --share_usage_data=] + [ | --peer_search_timeout=] + [ | --sd_download_timeout=] + + Options: + , --download_directory= : (str) + , --data_rate= : (float), 0.0001 + , --download_timeout= : (int), 180 + , --max_key_fee= : (dict) maximum key fee for downloads, + in the format: { + "currency": , + "amount": + }. In the CLI, it must be an escaped + JSON string + Supported currency symbols: + LBC + BTC + USD + , --use_upnp= : (bool), True + , --run_reflector_server= : (bool), False + , --cache_time= : (int), 150 + , --reflect_uploads= : (bool), True + , --share_usage_data= : (bool), True + , --peer_search_timeout= : (int), 3 + , --sd_download_timeout= : (int), 3 + Returns: (dict) Updated dictionary of daemon settings """ @@ -1917,7 +1919,7 @@ class Daemon(AuthJSONRPCServer): Usage: claim_new_support ( | --name=) ( | --claim_id=) - ( | --amount) + ( | --amount=) Return: (dict) Dictionary containing result of the claim @@ -2402,19 +2404,26 @@ class Daemon(AuthJSONRPCServer): defer.returnValue("Reflect success") @defer.inlineCallbacks + @AuthJSONRPCServer.flags(needed="-n", finished="-f") def jsonrpc_blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None, finished=None, page_size=None, page=None): """ Returns blob hashes. If not given filters, returns all blobs known by the blob manager - Args: - 'uri' (optional): (str) filter by blobs in stream for winning claim - 'stream_hash' (optional): (str) filter by blobs in given stream hash - 'sd_hash' (optional): (str) filter by blobs in given sd hash - 'needed' (optional): (bool) only return needed blobs - 'finished' (optional): (bool) only return finished blobs - 'page_size' (optional): (int) limit number of results returned - 'page' (optional): (int) filter to page x of [page_size] results + Usage: + blob_list [-n] [-f] [ | --uri=] [ | --stream_hash=] + [ | --sd_hash=] [ | --page_size=] + [ | --page=] + + Options: + -n : only return needed blobs + -f : only return finished blobs + , --uri= : filter blobs by stream in a uri + , --stream_hash= : filter blobs by stream hash + , --sd_hash= : filter blobs by sd hash + , --page_size= : results page size + , --page= : page of results to return + Returns: (list) List of blob hashes """ diff --git a/lbrynet/lbrynet_daemon/DaemonControl.py b/lbrynet/lbrynet_daemon/DaemonControl.py index adff304cd..e9ffec5bb 100644 --- a/lbrynet/lbrynet_daemon/DaemonControl.py +++ b/lbrynet/lbrynet_daemon/DaemonControl.py @@ -40,17 +40,6 @@ def start(): type=str, default=conf.settings['wallet'] ) - parser.add_argument( - "--ui", help="path to custom UI folder", default=None - ) - parser.add_argument( - "--branch", - help='Branch of lbry-web-ui repo to use, defaults to {}'.format(conf.settings['ui_branch']), - default=conf.settings['ui_branch'] - ) - parser.add_argument( - '--launch-ui', dest='launchui', action="store_true" - ) parser.add_argument( "--http-auth", dest="useauth", action="store_true", default=conf.settings['use_auth_http'] ) @@ -93,7 +82,7 @@ def start(): if test_internet_connection(): analytics_manager = analytics.Manager.new_instance() - start_server_and_listen(args.launchui, args.useauth, analytics_manager) + start_server_and_listen(args.useauth, analytics_manager) reactor.run() else: log.info("Not connected to internet, unable to start") @@ -101,28 +90,23 @@ def start(): def update_settings_from_args(args): cli_settings = {} - if args.ui: - cli_settings['local_ui_path'] = args.ui - if args.branch: - cli_settings['ui_branch'] = args.branch cli_settings['use_auth_http'] = args.useauth cli_settings['wallet'] = args.wallet conf.settings.update(cli_settings, data_types=(conf.TYPE_CLI,)) @defer.inlineCallbacks -def start_server_and_listen(launchui, use_auth, analytics_manager, max_tries=5): +def start_server_and_listen(use_auth, analytics_manager, max_tries=5): """The primary entry point for launching the daemon. Args: - launchui: set to true to open a browser window use_auth: set to true to enable http authentication analytics_manager: to send analytics """ analytics_manager.send_server_startup() daemon_server = DaemonServer(analytics_manager) try: - yield daemon_server.start(use_auth, launchui) + yield daemon_server.start(use_auth) analytics_manager.send_server_startup_success() except Exception as e: log.exception('Failed to startup') diff --git a/lbrynet/lbrynet_daemon/DaemonServer.py b/lbrynet/lbrynet_daemon/DaemonServer.py index c246287cb..abec2f04e 100644 --- a/lbrynet/lbrynet_daemon/DaemonServer.py +++ b/lbrynet/lbrynet_daemon/DaemonServer.py @@ -44,9 +44,9 @@ class DaemonServer(object): return defer.succeed(True) @defer.inlineCallbacks - def start(self, use_auth, launch_ui=False): + def start(self, use_auth): yield self._setup_server(use_auth) - yield self._api.setup(launch_ui) + yield self._api.setup() @defer.inlineCallbacks def stop(self): diff --git a/lbrynet/lbrynet_daemon/auth/server.py b/lbrynet/lbrynet_daemon/auth/server.py index 9fa1b92be..7452e084c 100644 --- a/lbrynet/lbrynet_daemon/auth/server.py +++ b/lbrynet/lbrynet_daemon/auth/server.py @@ -331,8 +331,8 @@ class AuthJSONRPCServer(AuthorizedBase): request, id_ ) return server.NOT_DONE_YET - except NotAllowedDuringStartupError as err: - log.warning('Function not allowed during startup %s: %s', function_name, err) + except NotAllowedDuringStartupError: + log.warning('Function not allowed during startup: %s', function_name) self._render_error( JSONRPCError("This method is unavailable until the daemon is fully started", code=JSONRPCError.CODE_INVALID_REQUEST), diff --git a/requirements.txt b/requirements.txt index 8f8126c58..ff431058e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,7 +14,7 @@ jsonrpc==1.2 jsonrpclib==0.1.7 jsonschema==2.5.1 git+https://github.com/lbryio/lbryschema.git@v0.0.7#egg=lbryschema -git+https://github.com/lbryio/lbryum.git@v2.8.4#egg=lbryum +git+https://github.com/lbryio/lbryum.git@v3.0.1#egg=lbryum miniupnpc==1.9 pbkdf2==1.3 pycrypto==2.6.1 diff --git a/setup.py b/setup.py index b1c96323e..094ed90c1 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ requires = [ 'envparse', 'jsonrpc', 'jsonschema', - 'lbryum==2.8.4', + 'lbryum==3.0.1', 'lbryschema==0.0.7', 'miniupnpc', 'pycrypto',