diff --git a/CHANGELOG.md b/CHANGELOG.md index 1f513039e..ca2f70285 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,35 +8,36 @@ can and probably will change functionality and break backwards compatability at anytime. ## [Unreleased] - -## [0.20.3] - 2018-07-20 -### Changed -* Additional information added to the balance error message when editing a claim. -(https://github.com/lbryio/lbry/pull/1309) - ### Security * * ### Fixed - * + * loggly error reporting not following `share_usage_data` * ### Deprecated - * + * automatic claim renew, this is no longer needed * ### Changed - * - * + * api server class to use components, and for all JSONRPC API commands to be callable so long as the required components are available. + * return error messages when required conditions on components are not met for API calls + * `status` to no longer return a base58 encoded `lbry_id`, instead return this as the hex encoded `node_id` in a new `dht_node_status` field. + * `startup_status` field in the response to `status` to be a dict of component names to status booleans + * moved wallet, upnp and dht startup code from `Session` to `Components` ### Added + * `skipped_components` list to the response from `status` + * `skipped_components` config setting, accemapts a list of names of components to not run + * `ComponentManager` for managing the lifecycles of dependencies + * `requires` decorator to register the components required by a `jsonrpc_` command, to facilitate commands registering asynchronously + * unittests for `ComponentManager` * script to generate docs/api.json file (https://github.com/lbryio/lbry.tech/issues/42) - * + * additional information to the balance error message when editing a claim (https://github.com/lbryio/lbry/pull/1309) ### Removed - * - * + * most of the internal attributes from `Daemon` ## [0.20.4] - 2018-07-18 diff --git a/lbrynet/conf.py b/lbrynet/conf.py index 14fa45b53..1d0020f89 100644 --- a/lbrynet/conf.py +++ b/lbrynet/conf.py @@ -168,9 +168,11 @@ def server_port(server_and_port): def server_list(servers): return [server_port(server) for server in servers] + def server_list_reverse(servers): return ["%s:%s" % (server, port) for server, port in servers] + class Env(envparse.Env): """An Env parser that automatically namespaces the variables with LBRY""" @@ -288,7 +290,7 @@ ADJUSTABLE_SETTINGS = { 'reflect_uploads': (bool, True), 'auto_re_reflect_interval': (int, 86400), # set to 0 to disable 'reflector_servers': (list, [('reflector2.lbry.io', 5566)], server_list, server_list_reverse), - 'run_reflector_server': (bool, False), + 'run_reflector_server': (bool, False), # adds `reflector` to components_to_skip unless True 'sd_download_timeout': (int, 3), 'share_usage_data': (bool, True), # whether to share usage stats and diagnostic info with LBRY 'peer_search_timeout': (int, 30), @@ -299,7 +301,8 @@ ADJUSTABLE_SETTINGS = { 'blockchain_name': (str, 'lbrycrd_main'), 'lbryum_servers': (list, [('lbryumx1.lbry.io', 50001), ('lbryumx2.lbry.io', 50001)], server_list, server_list_reverse), - 's3_headers_depth': (int, 96 * 10) # download headers from s3 when the local height is more than 10 chunks behind + 's3_headers_depth': (int, 96 * 10), # download headers from s3 when the local height is more than 10 chunks behind + 'components_to_skip': (list, []) # components which will be skipped during start-up of daemon } diff --git a/lbrynet/core/BlobManager.py b/lbrynet/core/BlobManager.py index 370a3ddeb..4a86ed581 100644 --- a/lbrynet/core/BlobManager.py +++ b/lbrynet/core/BlobManager.py @@ -27,7 +27,8 @@ class DiskBlobManager(object): self.blob_hashes_to_delete = {} # {blob_hash: being_deleted (True/False)} self.check_should_announce_lc = None - if conf.settings['run_reflector_server']: # TODO: move this looping call to SQLiteStorage + # TODO: move this looping call to SQLiteStorage + if 'reflector' not in conf.settings['components_to_skip']: self.check_should_announce_lc = task.LoopingCall(self.storage.verify_will_announce_all_head_and_sd_blobs) @defer.inlineCallbacks diff --git a/lbrynet/core/Error.py b/lbrynet/core/Error.py index 729ceab76..68a6df78e 100644 --- a/lbrynet/core/Error.py +++ b/lbrynet/core/Error.py @@ -155,13 +155,23 @@ class InvalidAuthenticationToken(Exception): class NegotiationError(Exception): pass + class InvalidCurrencyError(Exception): def __init__(self, currency): self.currency = currency Exception.__init__( self, 'Invalid currency: {} is not a supported currency.'.format(currency)) + class NoSuchDirectoryError(Exception): def __init__(self, directory): self.directory = directory Exception.__init__(self, 'No such directory {}'.format(directory)) + + +class ComponentStartConditionNotMet(Exception): + pass + + +class ComponentsNotStarted(Exception): + pass diff --git a/lbrynet/core/Session.py b/lbrynet/core/Session.py index d3a1febbc..83519ae66 100644 --- a/lbrynet/core/Session.py +++ b/lbrynet/core/Session.py @@ -1,11 +1,8 @@ import logging -import miniupnpc -from twisted.internet import threads, defer +from twisted.internet import defer from lbrynet.core.BlobManager import DiskBlobManager -from lbrynet.dht import node, hashannouncer from lbrynet.database.storage import SQLiteStorage from lbrynet.core.RateLimiter import RateLimiter -from lbrynet.core.utils import generate_id from lbrynet.core.PaymentRateManager import BasePaymentRateManager, OnlyFreePaymentsManager log = logging.getLogger(__name__) @@ -32,11 +29,10 @@ class Session(object): peers can connect to this peer. """ - def __init__(self, blob_data_payment_rate, db_dir=None, node_id=None, peer_manager=None, dht_node_port=None, + def __init__(self, blob_data_payment_rate, db_dir=None, node_id=None, dht_node_port=None, known_dht_nodes=None, peer_finder=None, hash_announcer=None, blob_dir=None, blob_manager=None, - peer_port=None, use_upnp=True, rate_limiter=None, wallet=None, dht_node_class=node.Node, - blob_tracker_class=None, payment_rate_manager_class=None, is_generous=True, external_ip=None, - storage=None): + peer_port=None, rate_limiter=None, wallet=None, external_ip=None, storage=None, + dht_node=None, peer_manager=None): """@param blob_data_payment_rate: The default payment rate for blob data @param db_dir: The directory in which levelDB files should be stored @@ -78,10 +74,6 @@ class Session(object): @param peer_port: The port on which other peers should connect to this peer - @param use_upnp: Whether or not to try to open a hole in the - firewall so that outside peers can connect to this peer's - peer_port and dht_node_port - @param rate_limiter: An object which keeps track of the amount of data transferred to and from this peer, and can limit that rate if desired @@ -103,20 +95,14 @@ class Session(object): self.known_dht_nodes = [] self.blob_dir = blob_dir self.blob_manager = blob_manager - # self.blob_tracker = None - # self.blob_tracker_class = blob_tracker_class or BlobAvailabilityTracker self.peer_port = peer_port - self.use_upnp = use_upnp self.rate_limiter = rate_limiter self.external_ip = external_ip self.upnp_redirects = [] self.wallet = wallet - self.dht_node_class = dht_node_class - self.dht_node = None + self.dht_node = dht_node self.base_payment_rate_manager = BasePaymentRateManager(blob_data_payment_rate) self.payment_rate_manager = OnlyFreePaymentsManager() - # self.payment_rate_manager_class = payment_rate_manager_class or NegotiatedPaymentRateManager - # self.is_generous = is_generous self.storage = storage or SQLiteStorage(self.db_dir) def setup(self): @@ -124,15 +110,14 @@ class Session(object): log.debug("Starting session.") - if self.node_id is None: - self.node_id = generate_id() + if self.dht_node is not None: + if self.peer_manager is None: + self.peer_manager = self.dht_node.peer_manager - if self.use_upnp is True: - d = self._try_upnp() - else: - d = defer.succeed(True) - d.addCallback(lambda _: self.storage.setup()) - d.addCallback(lambda _: self._setup_dht()) + if self.peer_finder is None: + self.peer_finder = self.dht_node.peer_finder + + d = self.storage.setup() d.addCallback(lambda _: self._setup_other_components()) return d @@ -140,97 +125,12 @@ class Session(object): """Stop all services""" log.info('Stopping session.') ds = [] - if self.hash_announcer: - self.hash_announcer.stop() - # if self.blob_tracker is not None: - # ds.append(defer.maybeDeferred(self.blob_tracker.stop)) - if self.dht_node is not None: - ds.append(defer.maybeDeferred(self.dht_node.stop)) if self.rate_limiter is not None: ds.append(defer.maybeDeferred(self.rate_limiter.stop)) - if self.wallet is not None: - ds.append(defer.maybeDeferred(self.wallet.stop)) if self.blob_manager is not None: ds.append(defer.maybeDeferred(self.blob_manager.stop)) - if self.use_upnp is True: - ds.append(defer.maybeDeferred(self._unset_upnp)) return defer.DeferredList(ds) - def _try_upnp(self): - - log.debug("In _try_upnp") - - def get_free_port(upnp, port, protocol): - # returns an existing mapping if it exists - mapping = upnp.getspecificportmapping(port, protocol) - if not mapping: - return port - if upnp.lanaddr == mapping[0]: - return mapping[1] - return get_free_port(upnp, port + 1, protocol) - - def get_port_mapping(upnp, port, protocol, description): - # try to map to the requested port, if there is already a mapping use the next external - # port available - if protocol not in ['UDP', 'TCP']: - raise Exception("invalid protocol") - port = get_free_port(upnp, port, protocol) - if isinstance(port, tuple): - log.info("Found existing UPnP redirect %s:%i (%s) to %s:%i, using it", - self.external_ip, port, protocol, upnp.lanaddr, port) - return port - upnp.addportmapping(port, protocol, upnp.lanaddr, port, - description, '') - log.info("Set UPnP redirect %s:%i (%s) to %s:%i", self.external_ip, port, - protocol, upnp.lanaddr, port) - return port - - def threaded_try_upnp(): - if self.use_upnp is False: - log.debug("Not using upnp") - return False - u = miniupnpc.UPnP() - num_devices_found = u.discover() - if num_devices_found > 0: - u.selectigd() - external_ip = u.externalipaddress() - if external_ip != '0.0.0.0' and not self.external_ip: - # best not to rely on this external ip, the router can be behind layers of NATs - self.external_ip = external_ip - if self.peer_port: - self.peer_port = get_port_mapping(u, self.peer_port, 'TCP', 'LBRY peer port') - self.upnp_redirects.append((self.peer_port, 'TCP')) - if self.dht_node_port: - self.dht_node_port = get_port_mapping(u, self.dht_node_port, 'UDP', 'LBRY DHT port') - self.upnp_redirects.append((self.dht_node_port, 'UDP')) - return True - return False - - def upnp_failed(err): - log.warning("UPnP failed. Reason: %s", err.getErrorMessage()) - return False - - d = threads.deferToThread(threaded_try_upnp) - d.addErrback(upnp_failed) - return d - - def _setup_dht(self): # does not block startup, the dht will re-attempt if necessary - self.dht_node = self.dht_node_class( - node_id=self.node_id, - udpPort=self.dht_node_port, - externalIP=self.external_ip, - peerPort=self.peer_port, - peer_manager=self.peer_manager, - peer_finder=self.peer_finder, - ) - if not self.hash_announcer: - self.hash_announcer = hashannouncer.DHTHashAnnouncer(self.dht_node, self.storage) - self.peer_manager = self.dht_node.peer_manager - self.peer_finder = self.dht_node.peer_finder - d = self.dht_node.start(self.known_dht_nodes) - d.addCallback(lambda _: log.info("Joined the dht")) - d.addCallback(lambda _: self.hash_announcer.start()) - def _setup_other_components(self): log.debug("Setting up the rest of the components") @@ -244,39 +144,6 @@ class Session(object): else: self.blob_manager = DiskBlobManager(self.blob_dir, self.storage, self.dht_node._dataStore) - # if self.blob_tracker is None: - # self.blob_tracker = self.blob_tracker_class( - # self.blob_manager, self.dht_node.peer_finder, self.dht_node - # ) - # if self.payment_rate_manager is None: - # self.payment_rate_manager = self.payment_rate_manager_class( - # self.base_payment_rate_manager, self.blob_tracker, self.is_generous - # ) - self.rate_limiter.start() d = self.blob_manager.setup() - d.addCallback(lambda _: self.wallet.start()) - # d.addCallback(lambda _: self.blob_tracker.start()) - return d - - def _unset_upnp(self): - log.info("Unsetting upnp for session") - - def threaded_unset_upnp(): - u = miniupnpc.UPnP() - num_devices_found = u.discover() - if num_devices_found > 0: - u.selectigd() - for port, protocol in self.upnp_redirects: - if u.getspecificportmapping(port, protocol) is None: - log.warning( - "UPnP redirect for %s %d was removed by something else.", - protocol, port) - else: - u.deleteportmapping(port, protocol) - log.info("Removed UPnP redirect for %s %d.", protocol, port) - self.upnp_redirects = [] - - d = threads.deferToThread(threaded_unset_upnp) - d.addErrback(lambda err: str(err)) return d diff --git a/lbrynet/core/Wallet.py b/lbrynet/core/Wallet.py index 0b71ed59d..3052fdce8 100644 --- a/lbrynet/core/Wallet.py +++ b/lbrynet/core/Wallet.py @@ -938,9 +938,7 @@ class LBRYumWallet(Wallet): self._lag_counter = 0 self.blocks_behind = 0 self.catchup_progress = 0 - - # fired when the wallet actually unlocks (wallet_unlocked_d can be called multiple times) - self.wallet_unlock_success = defer.Deferred() + self.is_wallet_unlocked = None def _is_first_run(self): return (not self.printed_retrieving_headers and @@ -953,21 +951,23 @@ class LBRYumWallet(Wallet): return self._cmd_runner def check_locked(self): - if not self.wallet.use_encryption: - log.info("Wallet is not encrypted") - self.wallet_unlock_success.callback(True) - elif not self._cmd_runner: + """ + Checks if the wallet is encrypted(locked) or not + + :return: (boolean) indicating whether the wallet is locked or not + """ + if not self._cmd_runner: raise Exception("Command runner hasn't been initialized yet") elif self._cmd_runner.locked: log.info("Waiting for wallet password") self.wallet_unlocked_d.addCallback(self.unlock) - return self.wallet_unlock_success + return self.is_wallet_unlocked def unlock(self, password): if self._cmd_runner and self._cmd_runner.locked: try: self._cmd_runner.unlock_wallet(password) - self.wallet_unlock_success.callback(True) + self.is_wallet_unlocked = True log.info("Unlocked the wallet!") except InvalidPassword: log.warning("Incorrect password, try again") @@ -1054,6 +1054,7 @@ class LBRYumWallet(Wallet): wallet.create_main_account() wallet.synchronize() self.wallet = wallet + self.is_wallet_unlocked = not self.wallet.use_encryption self._check_large_wallet() return defer.succeed(True) diff --git a/lbrynet/core/__init__.py b/lbrynet/core/__init__.py index 6ac1f3432..df7d37558 100644 --- a/lbrynet/core/__init__.py +++ b/lbrynet/core/__init__.py @@ -5,3 +5,5 @@ This includes classes for connecting to other peers and downloading blobs from t connections from peers and responding to their requests, managing locally stored blobs, sending and receiving payments, and locating peers in the DHT. """ + +from lbrynet import custom_logger diff --git a/lbrynet/core/log_support.py b/lbrynet/core/log_support.py index 9e0a635d1..add93ea84 100644 --- a/lbrynet/core/log_support.py +++ b/lbrynet/core/log_support.py @@ -1,8 +1,6 @@ -import inspect import json import logging import logging.handlers -import os import sys import traceback @@ -13,25 +11,6 @@ import twisted.python.log from lbrynet import __version__ as lbrynet_version, build_type, conf from lbrynet.core import utils -#### -# This code is copied from logging/__init__.py in the python source code -#### -# -# _srcfile is used when walking the stack to check when we've got the first -# caller stack frame. -# -if hasattr(sys, 'frozen'): # support for py2exe - _srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:]) -elif __file__[-4:].lower() in ['.pyc', '.pyo']: - _srcfile = __file__[:-4] + '.py' -else: - _srcfile = __file__ -_srcfile = os.path.normcase(_srcfile) -##### - - -TRACE = 5 - class HTTPSHandler(logging.Handler): def __init__(self, url, fqdn=False, localname=None, facility=None, cookies=None): @@ -139,6 +118,8 @@ def get_loggly_url(token=None, version=None): def configure_loggly_handler(): if build_type.BUILD == 'dev': return + if not conf.settings['share_usage_data']: + return level = logging.ERROR handler = get_loggly_handler(level=level, installation_id=conf.settings.installation_id, session_id=conf.settings.get_session_id()) @@ -185,33 +166,6 @@ class JsonFormatter(logging.Formatter): return json.dumps(data) -#### -# This code is copied from logging/__init__.py in the python source code -#### -def findCaller(srcfile=None): - """Returns the filename, line number and function name of the caller""" - srcfile = srcfile or _srcfile - f = inspect.currentframe() - # On some versions of IronPython, currentframe() returns None if - # IronPython isn't run with -X:Frames. - if f is not None: - f = f.f_back - rv = "(unknown file)", 0, "(unknown function)" - while hasattr(f, "f_code"): - co = f.f_code - filename = os.path.normcase(co.co_filename) - # ignore any function calls that are in this file - if filename == srcfile: - f = f.f_back - continue - rv = (filename, f.f_lineno, co.co_name) - break - return rv - - -### - - def failure(failure, log, msg, *args): """Log a failure message from a deferred. @@ -316,65 +270,3 @@ def get_parent(logger_name): return '' names = names[:-1] return '.'.join(names) - - -class Logger(logging.Logger): - """A logger that has an extra `fail` method useful for handling twisted failures.""" - - def fail(self, callback=None, *args, **kwargs): - """Returns a function to log a failure from an errback. - - The returned function appends the error message and extracts - the traceback from `err`. - - Example usage: - d.addErrback(log.fail(), 'This is an error message') - - Although odd, making the method call is necessary to extract - out useful filename and line number information; otherwise the - reported values are from inside twisted's deferred handling - code. - - Args: - callback: callable to call after making the log. The first argument - will be the `err` from the deferred - args: extra arguments to pass into `callback` - - Returns: a function that takes the following arguments: - err: twisted.python.failure.Failure - msg: the message to log, using normal logging string iterpolation. - msg_args: the values to subtitute into `msg` - msg_kwargs: set `level` to change from the default ERROR severity. Other - keywoards are treated as normal log kwargs. - """ - fn, lno, func = findCaller() - - def _fail(err, msg, *msg_args, **msg_kwargs): - level = msg_kwargs.pop('level', logging.ERROR) - msg += ": %s" - msg_args += (err.getErrorMessage(),) - exc_info = (err.type, err.value, err.getTracebackObject()) - record = self.makeRecord( - self.name, level, fn, lno, msg, msg_args, exc_info, func, msg_kwargs) - self.handle(record) - if callback: - try: - return callback(err, *args, **kwargs) - except Exception: - # log.fail is almost always called within an - # errback. If callback fails and we didn't catch - # the exception we would need to attach a second - # errback to deal with that, which we will almost - # never do and then we end up with an unhandled - # error that will get swallowed by twisted - self.exception('Failed to run callback') - - return _fail - - def trace(self, msg, *args, **kwargs): - if self.isEnabledFor(TRACE): - self._log(TRACE, msg, args, **kwargs) - - -logging.setLoggerClass(Logger) -logging.addLevelName(TRACE, 'TRACE') diff --git a/lbrynet/custom_logger.py b/lbrynet/custom_logger.py new file mode 100644 index 000000000..860f0b3c2 --- /dev/null +++ b/lbrynet/custom_logger.py @@ -0,0 +1,106 @@ +import os +import sys +import inspect +import logging +TRACE = 5 + + +#### +# This code is copied from logging/__init__.py in the python source code +#### +# +# _srcfile is used when walking the stack to check when we've got the first +# caller stack frame. +# +if hasattr(sys, 'frozen'): # support for py2exe + _srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:]) +elif __file__[-4:].lower() in ['.pyc', '.pyo']: + _srcfile = __file__[:-4] + '.py' +else: + _srcfile = __file__ +_srcfile = os.path.normcase(_srcfile) + + +def findCaller(srcfile=None): + """Returns the filename, line number and function name of the caller""" + srcfile = srcfile or _srcfile + f = inspect.currentframe() + # On some versions of IronPython, currentframe() returns None if + # IronPython isn't run with -X:Frames. + if f is not None: + f = f.f_back + rv = "(unknown file)", 0, "(unknown function)" + while hasattr(f, "f_code"): + co = f.f_code + filename = os.path.normcase(co.co_filename) + # ignore any function calls that are in this file + if filename == srcfile: + f = f.f_back + continue + rv = (filename, f.f_lineno, co.co_name) + break + return rv + + +### + +class Logger(logging.Logger): + """A logger that has an extra `fail` method useful for handling twisted failures.""" + + def fail(self, callback=None, *args, **kwargs): + """Returns a function to log a failure from an errback. + + The returned function appends the error message and extracts + the traceback from `err`. + + Example usage: + d.addErrback(log.fail(), 'This is an error message') + + Although odd, making the method call is necessary to extract + out useful filename and line number information; otherwise the + reported values are from inside twisted's deferred handling + code. + + Args: + callback: callable to call after making the log. The first argument + will be the `err` from the deferred + args: extra arguments to pass into `callback` + + Returns: a function that takes the following arguments: + err: twisted.python.failure.Failure + msg: the message to log, using normal logging string iterpolation. + msg_args: the values to subtitute into `msg` + msg_kwargs: set `level` to change from the default ERROR severity. Other + keywoards are treated as normal log kwargs. + """ + fn, lno, func = findCaller() + + def _fail(err, msg, *msg_args, **msg_kwargs): + level = msg_kwargs.pop('level', logging.ERROR) + msg += ": %s" + msg_args += (err.getErrorMessage(),) + exc_info = (err.type, err.value, err.getTracebackObject()) + record = self.makeRecord( + self.name, level, fn, lno, msg, msg_args, exc_info, func, msg_kwargs) + self.handle(record) + if callback: + try: + return callback(err, *args, **kwargs) + except Exception: + # log.fail is almost always called within an + # errback. If callback fails and we didn't catch + # the exception we would need to attach a second + # errback to deal with that, which we will almost + # never do and then we end up with an unhandled + # error that will get swallowed by twisted + self.exception('Failed to run callback') + + return _fail + + def trace(self, msg, *args, **kwargs): + if self.isEnabledFor(TRACE): + self._log(TRACE, msg, args, **kwargs) + + +logging.setLoggerClass(Logger) +logging.addLevelName(TRACE, 'TRACE') diff --git a/lbrynet/daemon/Component.py b/lbrynet/daemon/Component.py new file mode 100644 index 000000000..8909df65e --- /dev/null +++ b/lbrynet/daemon/Component.py @@ -0,0 +1,72 @@ +import logging +from twisted.internet import defer +from twisted._threads import AlreadyQuit +from ComponentManager import ComponentManager + +log = logging.getLogger(__name__) + + +class ComponentType(type): + def __new__(mcs, name, bases, newattrs): + klass = type.__new__(mcs, name, bases, newattrs) + if name != "Component": + ComponentManager.default_component_classes[klass.component_name] = klass + return klass + + +class Component(object): + """ + lbrynet-daemon component helper + + Inheriting classes will be automatically registered with the ComponentManager and must implement setup and stop + methods + """ + + __metaclass__ = ComponentType + depends_on = [] + component_name = None + + def __init__(self, component_manager): + self.component_manager = component_manager + self._running = False + + def __lt__(self, other): + return self.component_name < other.component_name + + @property + def running(self): + return self._running + + def start(self): + raise NotImplementedError() + + def stop(self): + raise NotImplementedError() + + @property + def component(self): + raise NotImplementedError() + + @defer.inlineCallbacks + def _setup(self): + try: + result = yield defer.maybeDeferred(self.start) + self._running = True + defer.returnValue(result) + except (defer.CancelledError, AlreadyQuit): + pass + except Exception as err: + log.exception("Error setting up %s", self.component_name or self.__class__.__name__) + raise err + + @defer.inlineCallbacks + def _stop(self): + try: + result = yield defer.maybeDeferred(self.stop) + self._running = False + defer.returnValue(result) + except (defer.CancelledError, AlreadyQuit): + pass + except Exception as err: + log.exception("Error stopping %s", self.__class__.__name__) + raise err diff --git a/lbrynet/daemon/ComponentManager.py b/lbrynet/daemon/ComponentManager.py new file mode 100644 index 000000000..cd4bb84fe --- /dev/null +++ b/lbrynet/daemon/ComponentManager.py @@ -0,0 +1,177 @@ +import logging +from twisted.internet import defer + +from lbrynet.core.Error import ComponentStartConditionNotMet + +log = logging.getLogger(__name__) + + +class RegisteredConditions(object): + conditions = {} + + +class RequiredConditionType(type): + def __new__(mcs, name, bases, newattrs): + klass = type.__new__(mcs, name, bases, newattrs) + if name != "RequiredCondition": + if klass.name in RegisteredConditions.conditions: + raise SyntaxError("already have a component registered for \"%s\"" % klass.name) + RegisteredConditions.conditions[klass.name] = klass + return klass + + +class RequiredCondition(object): + name = "" + component = "" + message = "" + + @staticmethod + def evaluate(component): + raise NotImplementedError() + + __metaclass__ = RequiredConditionType + + +class ComponentManager(object): + default_component_classes = {} + + def __init__(self, reactor=None, analytics_manager=None, skip_components=None, **override_components): + self.skip_components = skip_components or [] + + self.reactor = reactor + self.component_classes = {} + self.components = set() + self.analytics_manager = analytics_manager + + for component_name, component_class in self.default_component_classes.iteritems(): + if component_name in override_components: + component_class = override_components.pop(component_name) + if component_name not in self.skip_components: + self.component_classes[component_name] = component_class + + if override_components: + raise SyntaxError("unexpected components: %s" % override_components) + + for component_class in self.component_classes.itervalues(): + self.components.add(component_class(self)) + + @defer.inlineCallbacks + def evaluate_condition(self, condition_name): + if condition_name not in RegisteredConditions.conditions: + raise NameError(condition_name) + condition = RegisteredConditions.conditions[condition_name] + try: + component = self.get_component(condition.component) + result = yield defer.maybeDeferred(condition.evaluate, component) + except Exception as err: + result = False + defer.returnValue((result, "" if result else condition.message)) + + def sort_components(self, reverse=False): + """ + Sort components by requirements + """ + steps = [] + staged = set() + components = set(self.components) + + # components with no requirements + step = [] + for component in set(components): + if not component.depends_on: + step.append(component) + staged.add(component.component_name) + components.remove(component) + + if step: + step.sort() + steps.append(step) + + while components: + step = [] + to_stage = set() + for component in set(components): + reqs_met = 0 + for needed in component.depends_on: + if needed in staged: + reqs_met += 1 + if reqs_met == len(component.depends_on): + step.append(component) + to_stage.add(component.component_name) + components.remove(component) + if step: + step.sort() + staged.update(to_stage) + steps.append(step) + elif components: + raise ComponentStartConditionNotMet("Unresolved dependencies for: %s" % components) + if reverse: + steps.reverse() + return steps + + @defer.inlineCallbacks + def setup(self, **callbacks): + """ + Start Components in sequence sorted by requirements + + :return: (defer.Deferred) + """ + + for component_name, cb in callbacks.iteritems(): + if component_name not in self.component_classes: + raise NameError("unknown component: %s" % component_name) + if not callable(cb): + raise ValueError("%s is not callable" % cb) + + def _setup(component): + if component.component_name in callbacks: + d = component._setup() + d.addCallback(callbacks[component.component_name], component) + return d + return component._setup() + + stages = self.sort_components() + for stage in stages: + yield defer.DeferredList([_setup(component) for component in stage]) + + @defer.inlineCallbacks + def stop(self): + """ + Stop Components in reversed startup order + + :return: (defer.Deferred) + """ + stages = self.sort_components(reverse=True) + for stage in stages: + yield defer.DeferredList([component._stop() for component in stage if component.running]) + + def all_components_running(self, *component_names): + """ + Check if components are running + + :return: (bool) True if all specified components are running + """ + components = {component.component_name: component for component in self.components} + for component in component_names: + if component not in components: + raise NameError("%s is not a known Component" % component) + if not components[component].running: + return False + return True + + def get_components_status(self): + """ + List status of all the components, whether they are running or not + + :return: (dict) {(str) component_name: (bool) True is running else False} + """ + return { + component.component_name: component.running + for component in self.components + } + + def get_component(self, component_name): + for component in self.components: + if component.component_name == component_name: + return component.component + raise NameError(component_name) diff --git a/lbrynet/daemon/Components.py b/lbrynet/daemon/Components.py new file mode 100644 index 000000000..acc216567 --- /dev/null +++ b/lbrynet/daemon/Components.py @@ -0,0 +1,552 @@ +import os +import logging +import miniupnpc +from twisted.internet import defer, threads, reactor, error + +from lbrynet import conf +from lbrynet.core.Session import Session +from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, EncryptedFileStreamType +from lbrynet.core.Wallet import LBRYumWallet +from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory +from lbrynet.core.server.ServerProtocol import ServerProtocolFactory +from lbrynet.daemon.Component import Component +from lbrynet.daemon.ExchangeRateManager import ExchangeRateManager +from lbrynet.database.storage import SQLiteStorage +from lbrynet.dht import node, hashannouncer +from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager +from lbrynet.lbry_file.client.EncryptedFileDownloader import EncryptedFileSaverFactory +from lbrynet.lbry_file.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier +from lbrynet.reflector import ServerFactory as reflector_server_factory + +from lbrynet.core.utils import generate_id + +log = logging.getLogger(__name__) + +# settings must be initialized before this file is imported + +DATABASE_COMPONENT = "database" +WALLET_COMPONENT = "wallet" +SESSION_COMPONENT = "session" +DHT_COMPONENT = "dht" +HASH_ANNOUNCER_COMPONENT = "hash_announcer" +STREAM_IDENTIFIER_COMPONENT = "stream_identifier" +FILE_MANAGER_COMPONENT = "file_manager" +PEER_PROTOCOL_SERVER_COMPONENT = "peer_protocol_server" +REFLECTOR_COMPONENT = "reflector" +UPNP_COMPONENT = "upnp" +EXCHANGE_RATE_MANAGER_COMPONENT = "exchange_rate_manager" + + +class ConfigSettings(object): + @staticmethod + def get_conf_setting(setting_name): + return conf.settings[setting_name] + + @staticmethod + def get_blobfiles_dir(): + if conf.settings['BLOBFILES_DIR'] == "blobfiles": + return os.path.join(GCS("data_dir"), "blobfiles") + else: + log.info("Using non-default blobfiles directory: %s", conf.settings['BLOBFILES_DIR']) + return conf.settings['BLOBFILES_DIR'] + + @staticmethod + def get_node_id(): + return conf.settings.node_id + + @staticmethod + def get_external_ip(): + from lbrynet.core.system_info import get_platform + platform = get_platform(get_ip=True) + return platform['ip'] + + +# Shorthand for common ConfigSettings methods +CS = ConfigSettings +GCS = ConfigSettings.get_conf_setting + + +class DatabaseComponent(Component): + component_name = DATABASE_COMPONENT + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.storage = None + + @property + def component(self): + return self.storage + + @staticmethod + def get_current_db_revision(): + return 9 + + @staticmethod + def get_revision_filename(): + return conf.settings.get_db_revision_filename() + + @staticmethod + def _write_db_revision_file(version_num): + with open(conf.settings.get_db_revision_filename(), mode='w') as db_revision: + db_revision.write(str(version_num)) + + @defer.inlineCallbacks + def start(self): + # check directories exist, create them if they don't + log.info("Loading databases") + + if not os.path.exists(GCS('download_directory')): + os.mkdir(GCS('download_directory')) + + if not os.path.exists(GCS('data_dir')): + os.mkdir(GCS('data_dir')) + self._write_db_revision_file(self.get_current_db_revision()) + log.debug("Created the db revision file: %s", self.get_revision_filename()) + + if not os.path.exists(CS.get_blobfiles_dir()): + os.mkdir(CS.get_blobfiles_dir()) + log.debug("Created the blobfile directory: %s", str(CS.get_blobfiles_dir())) + + if not os.path.exists(self.get_revision_filename()): + log.warning("db_revision file not found. Creating it") + self._write_db_revision_file(self.get_current_db_revision()) + + # check the db migration and run any needed migrations + with open(self.get_revision_filename(), "r") as revision_read_handle: + old_revision = int(revision_read_handle.read().strip()) + + if old_revision > self.get_current_db_revision(): + raise Exception('This version of lbrynet is not compatible with the database\n' + 'Your database is revision %i, expected %i' % + (old_revision, self.get_current_db_revision())) + if old_revision < self.get_current_db_revision(): + from lbrynet.database.migrator import dbmigrator + log.info("Upgrading your databases (revision %i to %i)", old_revision, self.get_current_db_revision()) + yield threads.deferToThread( + dbmigrator.migrate_db, GCS('data_dir'), old_revision, self.get_current_db_revision() + ) + self._write_db_revision_file(self.get_current_db_revision()) + log.info("Finished upgrading the databases.") + + # start SQLiteStorage + self.storage = SQLiteStorage(GCS('data_dir')) + yield self.storage.setup() + + @defer.inlineCallbacks + def stop(self): + yield self.storage.stop() + self.storage = None + + +class WalletComponent(Component): + component_name = WALLET_COMPONENT + depends_on = [DATABASE_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.wallet = None + + @property + def component(self): + return self.wallet + + @defer.inlineCallbacks + def start(self): + storage = self.component_manager.get_component(DATABASE_COMPONENT) + wallet_type = GCS('wallet') + + if wallet_type == conf.LBRYCRD_WALLET: + raise ValueError('LBRYcrd Wallet is no longer supported') + elif wallet_type == conf.LBRYUM_WALLET: + + log.info("Using lbryum wallet") + + lbryum_servers = {address: {'t': str(port)} + for address, port in GCS('lbryum_servers')} + + config = { + 'auto_connect': True, + 'chain': GCS('blockchain_name'), + 'default_servers': lbryum_servers + } + + if 'use_keyring' in conf.settings: + config['use_keyring'] = GCS('use_keyring') + if conf.settings['lbryum_wallet_dir']: + config['lbryum_path'] = GCS('lbryum_wallet_dir') + self.wallet = LBRYumWallet(storage, config) + yield self.wallet.start() + else: + raise ValueError('Wallet Type {} is not valid'.format(wallet_type)) + + @defer.inlineCallbacks + def stop(self): + yield self.wallet.stop() + self.wallet = None + + +class SessionComponent(Component): + component_name = SESSION_COMPONENT + depends_on = [DATABASE_COMPONENT, WALLET_COMPONENT, DHT_COMPONENT, HASH_ANNOUNCER_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.session = None + + @property + def component(self): + return self.session + + @defer.inlineCallbacks + def start(self): + self.session = Session( + GCS('data_rate'), + db_dir=GCS('data_dir'), + node_id=CS.get_node_id(), + blob_dir=CS.get_blobfiles_dir(), + dht_node=self.component_manager.get_component(DHT_COMPONENT), + hash_announcer=self.component_manager.get_component(HASH_ANNOUNCER_COMPONENT), + dht_node_port=GCS('dht_node_port'), + known_dht_nodes=GCS('known_dht_nodes'), + peer_port=GCS('peer_port'), + wallet=self.component_manager.get_component(WALLET_COMPONENT), + external_ip=CS.get_external_ip(), + storage=self.component_manager.get_component(DATABASE_COMPONENT) + ) + yield self.session.setup() + + @defer.inlineCallbacks + def stop(self): + yield self.session.shut_down() + + +class DHTComponent(Component): + component_name = DHT_COMPONENT + depends_on = [UPNP_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.dht_node = None + self.upnp_component = None + self.udp_port, self.peer_port = None, None + + @property + def component(self): + return self.dht_node + + @defer.inlineCallbacks + def start(self): + self.upnp_component = self.component_manager.get_component(UPNP_COMPONENT) + self.peer_port, self.udp_port = self.upnp_component.get_redirects() + node_id = CS.get_node_id() + if node_id is None: + node_id = generate_id() + + self.dht_node = node.Node( + node_id=node_id, + udpPort=self.udp_port, + externalIP=CS.get_external_ip(), + peerPort=self.peer_port + ) + + self.dht_node.start_listening() + yield self.dht_node._protocol._listening + d = self.dht_node.joinNetwork(GCS('known_dht_nodes')) + d.addCallback(lambda _: self.dht_node.start_looping_calls()) + d.addCallback(lambda _: log.info("Joined the dht")) + log.info("Started the dht") + + @defer.inlineCallbacks + def stop(self): + yield self.dht_node.stop() + + +class HashAnnouncerComponent(Component): + component_name = HASH_ANNOUNCER_COMPONENT + depends_on = [DHT_COMPONENT, DATABASE_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.hash_announcer = None + + @property + def component(self): + return self.hash_announcer + + @defer.inlineCallbacks + def start(self): + storage = self.component_manager.get_component(DATABASE_COMPONENT) + dht_node = self.component_manager.get_component(DHT_COMPONENT) + self.hash_announcer = hashannouncer.DHTHashAnnouncer(dht_node, storage) + yield self.hash_announcer.start() + + @defer.inlineCallbacks + def stop(self): + yield self.hash_announcer.stop() + + +class StreamIdentifierComponent(Component): + component_name = STREAM_IDENTIFIER_COMPONENT + depends_on = [SESSION_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.sd_identifier = StreamDescriptorIdentifier() + + @property + def component(self): + return self.sd_identifier + + @defer.inlineCallbacks + def start(self): + session = self.component_manager.get_component(SESSION_COMPONENT) + add_lbry_file_to_sd_identifier(self.sd_identifier) + file_saver_factory = EncryptedFileSaverFactory( + session.peer_finder, + session.rate_limiter, + session.blob_manager, + session.storage, + session.wallet, + GCS('download_directory') + ) + yield self.sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType, file_saver_factory) + + def stop(self): + pass + + +class FileManagerComponent(Component): + component_name = FILE_MANAGER_COMPONENT + depends_on = [SESSION_COMPONENT, STREAM_IDENTIFIER_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.file_manager = None + + @property + def component(self): + return self.file_manager + + @defer.inlineCallbacks + def start(self): + session = self.component_manager.get_component(SESSION_COMPONENT) + sd_identifier = self.component_manager.get_component(STREAM_IDENTIFIER_COMPONENT) + log.info('Starting the file manager') + self.file_manager = EncryptedFileManager(session, sd_identifier) + yield self.file_manager.setup() + log.info('Done setting up file manager') + + @defer.inlineCallbacks + def stop(self): + yield self.file_manager.stop() + + +class PeerProtocolServerComponent(Component): + component_name = PEER_PROTOCOL_SERVER_COMPONENT + depends_on = [SESSION_COMPONENT, UPNP_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.lbry_server_port = None + + @property + def component(self): + return self.lbry_server_port + + @defer.inlineCallbacks + def start(self): + query_handlers = {} + upnp_component = self.component_manager.get_component(UPNP_COMPONENT) + peer_port, udp_port = upnp_component.get_redirects() + session = self.component_manager.get_component(SESSION_COMPONENT) + + handlers = [ + BlobRequestHandlerFactory( + session.blob_manager, + session.wallet, + session.payment_rate_manager, + self.component_manager.analytics_manager + ), + session.wallet.get_wallet_info_query_handler_factory(), + ] + + for handler in handlers: + query_id = handler.get_primary_query_identifier() + query_handlers[query_id] = handler + + if peer_port is not None: + server_factory = ServerProtocolFactory(session.rate_limiter, query_handlers, session.peer_manager) + + try: + log.info("Peer protocol listening on TCP %d", peer_port) + self.lbry_server_port = yield reactor.listenTCP(peer_port, server_factory) + except error.CannotListenError as e: + import traceback + log.error("Couldn't bind to port %d. Visit lbry.io/faq/how-to-change-port for" + " more details.", peer_port) + log.error("%s", traceback.format_exc()) + raise ValueError("%s lbrynet may already be running on your computer." % str(e)) + + @defer.inlineCallbacks + def stop(self): + if self.lbry_server_port is not None: + self.lbry_server_port, old_port = None, self.lbry_server_port + log.info('Stop listening on port %s', old_port.port) + yield old_port.stopListening() + + +class ReflectorComponent(Component): + component_name = REFLECTOR_COMPONENT + depends_on = [SESSION_COMPONENT, FILE_MANAGER_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.reflector_server_port = GCS('reflector_port') + self.reflector_server = None + + @property + def component(self): + return self.reflector_server + + @defer.inlineCallbacks + def start(self): + log.info("Starting reflector server") + + session = self.component_manager.get_component(SESSION_COMPONENT) + file_manager = self.component_manager.get_component(FILE_MANAGER_COMPONENT) + reflector_factory = reflector_server_factory(session.peer_manager, session.blob_manager, file_manager) + + try: + self.reflector_server = yield reactor.listenTCP(self.reflector_server_port, reflector_factory) + log.info('Started reflector on port %s', self.reflector_server_port) + except error.CannotListenError as e: + log.exception("Couldn't bind reflector to port %d", self.reflector_server_port) + raise ValueError("{} lbrynet may already be running on your computer.".format(e)) + + @defer.inlineCallbacks + def stop(self): + if self.reflector_server is not None: + log.info("Stopping reflector server") + self.reflector_server, p = None, self.reflector_server + yield p.stopListening + + +class UPnPComponent(Component): + component_name = UPNP_COMPONENT + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.peer_port = GCS('peer_port') + self.dht_node_port = GCS('dht_node_port') + self.use_upnp = GCS('use_upnp') + self.external_ip = CS.get_external_ip() + self.upnp_redirects = [] + + @property + def component(self): + return self + + def get_redirects(self): + return self.peer_port, self.dht_node_port + + def start(self): + log.debug("In _try_upnp") + + def get_free_port(upnp, port, protocol): + # returns an existing mapping if it exists + mapping = upnp.getspecificportmapping(port, protocol) + if not mapping: + return port + if upnp.lanaddr == mapping[0]: + return mapping[1] + return get_free_port(upnp, port + 1, protocol) + + def get_port_mapping(upnp, port, protocol, description): + # try to map to the requested port, if there is already a mapping use the next external + # port available + if protocol not in ['UDP', 'TCP']: + raise Exception("invalid protocol") + port = get_free_port(upnp, port, protocol) + if isinstance(port, tuple): + log.info("Found existing UPnP redirect %s:%i (%s) to %s:%i, using it", + self.external_ip, port, protocol, upnp.lanaddr, port) + return port + upnp.addportmapping(port, protocol, upnp.lanaddr, port, + description, '') + log.info("Set UPnP redirect %s:%i (%s) to %s:%i", self.external_ip, port, + protocol, upnp.lanaddr, port) + return port + + def threaded_try_upnp(): + if self.use_upnp is False: + log.debug("Not using upnp") + return False + u = miniupnpc.UPnP() + num_devices_found = u.discover() + if num_devices_found > 0: + u.selectigd() + external_ip = u.externalipaddress() + if external_ip != '0.0.0.0' and not self.external_ip: + # best not to rely on this external ip, the router can be behind layers of NATs + self.external_ip = external_ip + if self.peer_port: + self.peer_port = get_port_mapping(u, self.peer_port, 'TCP', 'LBRY peer port') + self.upnp_redirects.append((self.peer_port, 'TCP')) + if self.dht_node_port: + self.dht_node_port = get_port_mapping(u, self.dht_node_port, 'UDP', 'LBRY DHT port') + self.upnp_redirects.append((self.dht_node_port, 'UDP')) + return True + return False + + def upnp_failed(err): + log.warning("UPnP failed. Reason: %s", err.getErrorMessage()) + return False + + d = threads.deferToThread(threaded_try_upnp) + d.addErrback(upnp_failed) + return d + + def stop(self): + log.info("Unsetting upnp for session") + + def threaded_unset_upnp(): + if self.use_upnp is False: + log.debug("Not using upnp") + return False + u = miniupnpc.UPnP() + num_devices_found = u.discover() + if num_devices_found > 0: + u.selectigd() + for port, protocol in self.upnp_redirects: + if u.getspecificportmapping(port, protocol) is None: + log.warning( + "UPnP redirect for %s %d was removed by something else.", + protocol, port) + else: + u.deleteportmapping(port, protocol) + log.info("Removed UPnP redirect for %s %d.", protocol, port) + self.upnp_redirects = [] + + d = threads.deferToThread(threaded_unset_upnp) + d.addErrback(lambda err: str(err)) + return d + + +class ExchangeRateManagerComponent(Component): + component_name = EXCHANGE_RATE_MANAGER_COMPONENT + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.exchange_rate_manager = ExchangeRateManager() + + @property + def component(self): + return self.exchange_rate_manager + + @defer.inlineCallbacks + def start(self): + yield self.exchange_rate_manager.start() + + @defer.inlineCallbacks + def stop(self): + yield self.exchange_rate_manager.stop() diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 85969e07c..b80ba3581 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -1,18 +1,16 @@ +# coding=utf-8 import binascii import logging.handlers import mimetypes import os -import base58 import requests import urllib import json import textwrap -import signal -import six from copy import deepcopy from decimal import Decimal, InvalidOperation from twisted.web import server -from twisted.internet import defer, threads, error, reactor +from twisted.internet import defer, reactor from twisted.internet.task import LoopingCall from twisted.python.failure import Failure @@ -25,28 +23,18 @@ from lbryschema.decode import smart_decode # TODO: importing this when internet is disabled raises a socket.gaierror from lbrynet.core.system_info import get_lbrynet_version -from lbrynet.database.storage import SQLiteStorage from lbrynet import conf -from lbrynet.conf import LBRYCRD_WALLET, LBRYUM_WALLET from lbrynet.reflector import reupload -from lbrynet.reflector import ServerFactory as reflector_server_factory -from lbrynet.core.log_support import configure_loggly_handler -from lbrynet.lbry_file.client.EncryptedFileDownloader import EncryptedFileSaverFactory -from lbrynet.lbry_file.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier -from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager +from lbrynet.daemon.Components import WALLET_COMPONENT, DATABASE_COMPONENT, SESSION_COMPONENT, DHT_COMPONENT +from lbrynet.daemon.Components import STREAM_IDENTIFIER_COMPONENT, FILE_MANAGER_COMPONENT +from lbrynet.daemon.Components import EXCHANGE_RATE_MANAGER_COMPONENT +from lbrynet.daemon.ComponentManager import RequiredCondition from lbrynet.daemon.Downloader import GetStream from lbrynet.daemon.Publisher import Publisher -from lbrynet.daemon.ExchangeRateManager import ExchangeRateManager from lbrynet.daemon.auth.server import AuthJSONRPCServer from lbrynet.core.PaymentRateManager import OnlyFreePaymentsManager from lbrynet.core import utils, system_info -from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob -from lbrynet.core.StreamDescriptor import EncryptedFileStreamType -from lbrynet.core.Session import Session -from lbrynet.core.Wallet import LBRYumWallet -from lbrynet.core.looping_call_manager import LoopingCallManager -from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory -from lbrynet.core.server.ServerProtocol import ServerProtocolFactory +from lbrynet.core.StreamDescriptor import download_sd_blob from lbrynet.core.Error import InsufficientFundsError, UnknownNameError from lbrynet.core.Error import DownloadDataTimeout, DownloadSDTimeout from lbrynet.core.Error import NullFundsError, NegativeFundsError @@ -56,25 +44,9 @@ from lbrynet.core.SinglePeerDownloader import SinglePeerDownloader from lbrynet.core.client.StandaloneBlobDownloader import StandaloneBlobDownloader log = logging.getLogger(__name__) +requires = AuthJSONRPCServer.requires INITIALIZING_CODE = 'initializing' -LOADING_DB_CODE = 'loading_db' -LOADING_WALLET_CODE = 'loading_wallet' -LOADING_FILE_MANAGER_CODE = 'loading_file_manager' -LOADING_SERVER_CODE = 'loading_server' -STARTED_CODE = 'started' -WAITING_FOR_FIRST_RUN_CREDITS = 'waiting_for_credits' -WAITING_FOR_UNLOCK = 'waiting_for_wallet_unlock' -STARTUP_STAGES = [ - (INITIALIZING_CODE, 'Initializing'), - (LOADING_DB_CODE, 'Loading databases'), - (LOADING_WALLET_CODE, 'Catching up with the blockchain'), - (LOADING_FILE_MANAGER_CODE, 'Setting up file manager'), - (LOADING_SERVER_CODE, 'Starting lbrynet'), - (STARTED_CODE, 'Started lbrynet'), - (WAITING_FOR_FIRST_RUN_CREDITS, 'Waiting for first run credits'), - (WAITING_FOR_UNLOCK, 'Waiting for user to unlock the wallet using the wallet_unlock command') -] # TODO: make this consistent with the stages in Downloader.py DOWNLOAD_METADATA_CODE = 'downloading_metadata' @@ -103,6 +75,7 @@ DIRECTION_ASCENDING = 'asc' DIRECTION_DESCENDING = 'desc' DIRECTIONS = DIRECTION_ASCENDING, DIRECTION_DESCENDING + class IterableContainer(object): def __iter__(self): for attr in dir(self): @@ -118,8 +91,8 @@ class IterableContainer(object): class Checker(object): """The looping calls the daemon runs""" - INTERNET_CONNECTION = 'internet_connection_checker' - CONNECTION_STATUS = 'connection_status_checker' + INTERNET_CONNECTION = 'internet_connection_checker', 3600 + # CONNECTION_STATUS = 'connection_status_checker' class _FileID(IterableContainer): @@ -173,435 +146,100 @@ def sort_claim_results(claims): return claims +def is_first_run(): + if os.path.isfile(conf.settings.get_db_revision_filename()): + return False + if os.path.isfile(os.path.join(conf.settings['data_dir'], 'lbrynet.sqlite')): + return False + if os.path.isfile(os.path.join(conf.settings['lbryum_wallet_dir'], 'blockchain_headers')): + return False + return True + + +DHT_HAS_CONTACTS = "dht_has_contacts" +WALLET_IS_UNLOCKED = "wallet_is_unlocked" + + +class DHTHasContacts(RequiredCondition): + name = DHT_HAS_CONTACTS + component = DHT_COMPONENT + message = "your node is not connected to the dht" + + @staticmethod + def evaluate(component): + return len(component.contacts) > 0 + + +class WalletIsLocked(RequiredCondition): + name = WALLET_IS_UNLOCKED + component = WALLET_COMPONENT + message = "your wallet is locked" + + @staticmethod + def evaluate(component): + return component.check_locked() + + class Daemon(AuthJSONRPCServer): """ LBRYnet daemon, a jsonrpc interface to lbry functions """ - allowed_during_startup = [ - 'daemon_stop', 'status', 'version', 'wallet_unlock' - ] + component_attributes = { + EXCHANGE_RATE_MANAGER_COMPONENT: "exchange_rate_manager", + DATABASE_COMPONENT: "storage", + SESSION_COMPONENT: "session", + WALLET_COMPONENT: "wallet", + DHT_COMPONENT: "dht_node", + STREAM_IDENTIFIER_COMPONENT: "sd_identifier", + FILE_MANAGER_COMPONENT: "file_manager", + } - def __init__(self, analytics_manager): - AuthJSONRPCServer.__init__(self, conf.settings['use_auth_http']) - self.db_dir = conf.settings['data_dir'] - self.storage = SQLiteStorage(self.db_dir) - self.download_directory = conf.settings['download_directory'] - if conf.settings['BLOBFILES_DIR'] == "blobfiles": - self.blobfile_dir = os.path.join(self.db_dir, "blobfiles") - else: - log.info("Using non-default blobfiles directory: %s", conf.settings['BLOBFILES_DIR']) - self.blobfile_dir = conf.settings['BLOBFILES_DIR'] - self.data_rate = conf.settings['data_rate'] - self.max_key_fee = conf.settings['max_key_fee'] - self.disable_max_key_fee = conf.settings['disable_max_key_fee'] - self.download_timeout = conf.settings['download_timeout'] - self.run_reflector_server = conf.settings['run_reflector_server'] - self.wallet_type = conf.settings['wallet'] - self.delete_blobs_on_remove = conf.settings['delete_blobs_on_remove'] - self.peer_port = conf.settings['peer_port'] - self.reflector_port = conf.settings['reflector_port'] - self.dht_node_port = conf.settings['dht_node_port'] - self.use_upnp = conf.settings['use_upnp'] - self.auto_renew_claim_height_delta = conf.settings['auto_renew_claim_height_delta'] + def __init__(self, analytics_manager=None, component_manager=None): + to_skip = list(conf.settings['components_to_skip']) + if 'reflector' not in to_skip and not conf.settings['run_reflector_server']: + to_skip.append('reflector') + looping_calls = { + Checker.INTERNET_CONNECTION[0]: (LoopingCall(CheckInternetConnection(self)), + Checker.INTERNET_CONNECTION[1]) + } + AuthJSONRPCServer.__init__(self, analytics_manager=analytics_manager, component_manager=component_manager, + use_authentication=conf.settings['use_auth_http'], to_skip=to_skip, + looping_calls=looping_calls) + self.is_first_run = is_first_run() - self.startup_status = STARTUP_STAGES[0] + # TODO: move this to a component self.connected_to_internet = True self.connection_status_code = None - self.platform = None - self.current_db_revision = 9 - self.db_revision_file = conf.settings.get_db_revision_filename() + + # components + # TODO: delete these, get the components where needed + self.storage = None + self.dht_node = None + self.wallet = None + self.sd_identifier = None self.session = None - self._session_id = conf.settings.get_session_id() - # TODO: this should probably be passed into the daemon, or - # possibly have the entire log upload functionality taken out - # of the daemon, but I don't want to deal with that now + self.file_manager = None + self.exchange_rate_manager = None - self.analytics_manager = analytics_manager - self.node_id = conf.settings.node_id - - self.wallet_user = None - self.wallet_password = None - self.query_handlers = {} - self.waiting_on = {} + # TODO: delete this self.streams = {} - self.exchange_rate_manager = ExchangeRateManager() - calls = { - Checker.INTERNET_CONNECTION: LoopingCall(CheckInternetConnection(self)), - Checker.CONNECTION_STATUS: LoopingCall(self._update_connection_status), - } - self.looping_call_manager = LoopingCallManager(calls) - self.sd_identifier = StreamDescriptorIdentifier() - self.lbry_file_manager = None @defer.inlineCallbacks def setup(self): - reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) - configure_loggly_handler() - log.info("Starting lbrynet-daemon") - - self.looping_call_manager.start(Checker.INTERNET_CONNECTION, 3600) - self.looping_call_manager.start(Checker.CONNECTION_STATUS, 30) - self.exchange_rate_manager.start() - - yield self._initial_setup() - yield threads.deferToThread(self._setup_data_directory) - migrated = yield self._check_db_migration() - yield self.storage.setup() - yield self._get_session() - yield self._check_wallet_locked() - yield self._start_analytics() - yield add_lbry_file_to_sd_identifier(self.sd_identifier) - yield self._setup_stream_identifier() - yield self._setup_lbry_file_manager() - yield self._setup_query_handlers() - yield self._setup_server() - log.info("Starting balance: " + str(self.session.wallet.get_balance())) - self.announced_startup = True - self.startup_status = STARTUP_STAGES[5] + log.info("Platform: %s", json.dumps(system_info.get_platform())) + yield super(Daemon, self).setup() log.info("Started lbrynet-daemon") - ### - # this should be removed with the next db revision - if migrated: - missing_channel_claim_ids = yield self.storage.get_unknown_certificate_ids() - while missing_channel_claim_ids: # in case there are a crazy amount lets batch to be safe - batch = missing_channel_claim_ids[:100] - _ = yield self.session.wallet.get_claims_by_ids(*batch) - missing_channel_claim_ids = missing_channel_claim_ids[100:] - ### - - self._auto_renew() - - def _get_platform(self): - if self.platform is None: - self.platform = system_info.get_platform() - return self.platform - - def _initial_setup(self): - def _log_platform(): - log.info("Platform: %s", json.dumps(self._get_platform())) - return defer.succeed(None) - - d = _log_platform() - return d - - def _check_network_connection(self): - self.connected_to_internet = utils.check_connection() - - def _update_connection_status(self): - self.connection_status_code = CONNECTION_STATUS_CONNECTED - - if not self.connected_to_internet: - self.connection_status_code = CONNECTION_STATUS_NETWORK - - @defer.inlineCallbacks - def _auto_renew(self): - # automatically renew claims - # auto renew is turned off if 0 or some negative number - if self.auto_renew_claim_height_delta < 1: - defer.returnValue(None) - if not self.session.wallet.network.get_remote_height(): - log.warning("Failed to get remote height, aborting auto renew") - defer.returnValue(None) - log.debug("Renewing claim") - h = self.session.wallet.network.get_remote_height() + self.auto_renew_claim_height_delta - results = yield self.session.wallet.claim_renew_all_before_expiration(h) - for outpoint, result in results.iteritems(): - if result['success']: - log.info("Renewed claim at outpoint:%s claim ID:%s, paid fee:%s", - outpoint, result['claim_id'], result['fee']) - else: - log.info("Failed to renew claim at outpoint:%s, reason:%s", - outpoint, result['reason']) - - def _start_server(self): - if self.peer_port is not None: - server_factory = ServerProtocolFactory(self.session.rate_limiter, - self.query_handlers, - self.session.peer_manager) - - try: - log.info("Peer protocol listening on TCP %d", self.peer_port) - self.lbry_server_port = reactor.listenTCP(self.peer_port, server_factory) - except error.CannotListenError as e: - import traceback - log.error("Couldn't bind to port %d. Visit lbry.io/faq/how-to-change-port for" - " more details.", self.peer_port) - log.error("%s", traceback.format_exc()) - raise ValueError("%s lbrynet may already be running on your computer." % str(e)) - return defer.succeed(True) - - def _start_reflector(self): - if self.run_reflector_server: - log.info("Starting reflector server") - if self.reflector_port is not None: - reflector_factory = reflector_server_factory( - self.session.peer_manager, - self.session.blob_manager, - self.lbry_file_manager - ) - try: - self.reflector_server_port = reactor.listenTCP(self.reflector_port, - reflector_factory) - log.info('Started reflector on port %s', self.reflector_port) - except error.CannotListenError as e: - log.exception("Couldn't bind reflector to port %d", self.reflector_port) - raise ValueError( - "{} lbrynet may already be running on your computer.".format(e)) - return defer.succeed(True) - - def _stop_reflector(self): - if self.run_reflector_server: - log.info("Stopping reflector server") - try: - if self.reflector_server_port is not None: - self.reflector_server_port, p = None, self.reflector_server_port - return defer.maybeDeferred(p.stopListening) - except AttributeError: - return defer.succeed(True) - return defer.succeed(True) - - def _stop_file_manager(self): - if self.lbry_file_manager: - self.lbry_file_manager.stop() - return defer.succeed(True) - - def _stop_server(self): - try: - if self.lbry_server_port is not None: - self.lbry_server_port, old_port = None, self.lbry_server_port - log.info('Stop listening on port %s', old_port.port) - return defer.maybeDeferred(old_port.stopListening) - else: - return defer.succeed(True) - except AttributeError: - return defer.succeed(True) - - def _setup_server(self): - self.startup_status = STARTUP_STAGES[4] - d = self._start_server() - d.addCallback(lambda _: self._start_reflector()) - return d - - def _setup_query_handlers(self): - handlers = [ - BlobRequestHandlerFactory( - self.session.blob_manager, - self.session.wallet, - self.session.payment_rate_manager, - self.analytics_manager - ), - self.session.wallet.get_wallet_info_query_handler_factory(), - ] - return self._add_query_handlers(handlers) - - def _add_query_handlers(self, query_handlers): - for handler in query_handlers: - query_id = handler.get_primary_query_identifier() - self.query_handlers[query_id] = handler - return defer.succeed(None) - - @staticmethod - def _already_shutting_down(sig_num, frame): - log.info("Already shutting down") - def _stop_streams(self): """stop pending GetStream downloads""" for sd_hash, stream in self.streams.iteritems(): stream.cancel(reason="daemon shutdown") def _shutdown(self): - # ignore INT/TERM signals once shutdown has started - signal.signal(signal.SIGINT, self._already_shutting_down) - signal.signal(signal.SIGTERM, self._already_shutting_down) - - log.info("Closing lbrynet session") - log.info("Status at time of shutdown: " + self.startup_status[0]) - self._stop_streams() - self.looping_call_manager.shutdown() - if self.analytics_manager: - self.analytics_manager.shutdown() - - d = self._stop_server() - d.addErrback(log.fail(), 'Failure while shutting down') - d.addCallback(lambda _: self._stop_reflector()) - d.addErrback(log.fail(), 'Failure while shutting down') - d.addCallback(lambda _: self._stop_file_manager()) - d.addErrback(log.fail(), 'Failure while shutting down') - if self.session is not None: - d.addCallback(lambda _: self.session.shut_down()) - d.addErrback(log.fail(), 'Failure while shutting down') - return d - - def _update_settings(self, settings): - setting_types = { - 'download_directory': str, - 'data_rate': float, - 'download_timeout': int, - 'peer_port': int, - 'max_key_fee': dict, - 'use_upnp': bool, - 'run_reflector_server': bool, - 'cache_time': int, - 'reflect_uploads': bool, - 'share_usage_data': bool, - 'disable_max_key_fee': bool, - 'peer_search_timeout': int, - 'sd_download_timeout': int, - 'auto_renew_claim_height_delta': int - } - - for key, setting_type in setting_types.iteritems(): - if key in settings: - if isinstance(settings[key], setting_type): - conf.settings.update({key: settings[key]}, - data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) - elif setting_type is dict and isinstance(settings[key], six.string_types): - decoded = json.loads(str(settings[key])) - conf.settings.update({key: decoded}, - data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) - else: - converted = setting_type(settings[key]) - conf.settings.update({key: converted}, - data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) - conf.settings.save_conf_file_settings() - - self.data_rate = conf.settings['data_rate'] - self.max_key_fee = conf.settings['max_key_fee'] - self.disable_max_key_fee = conf.settings['disable_max_key_fee'] - self.download_directory = conf.settings['download_directory'] - self.download_timeout = conf.settings['download_timeout'] - - return defer.succeed(True) - - def _write_db_revision_file(self, version_num): - with open(self.db_revision_file, mode='w') as db_revision: - db_revision.write(str(version_num)) - - def _setup_data_directory(self): - old_revision = 1 - self.startup_status = STARTUP_STAGES[1] - log.info("Loading databases") - if not os.path.exists(self.download_directory): - os.mkdir(self.download_directory) - if not os.path.exists(self.db_dir): - os.mkdir(self.db_dir) - self._write_db_revision_file(self.current_db_revision) - log.debug("Created the db revision file: %s", self.db_revision_file) - if not os.path.exists(self.blobfile_dir): - os.mkdir(self.blobfile_dir) - log.debug("Created the blobfile directory: %s", str(self.blobfile_dir)) - if not os.path.exists(self.db_revision_file): - log.warning("db_revision file not found. Creating it") - self._write_db_revision_file(self.current_db_revision) - - @defer.inlineCallbacks - def _check_db_migration(self): - old_revision = 1 - migrated = False - if os.path.exists(self.db_revision_file): - with open(self.db_revision_file, "r") as revision_read_handle: - old_revision = int(revision_read_handle.read().strip()) - - if old_revision > self.current_db_revision: - raise Exception('This version of lbrynet is not compatible with the database\n' - 'Your database is revision %i, expected %i' % - (old_revision, self.current_db_revision)) - if old_revision < self.current_db_revision: - from lbrynet.database.migrator import dbmigrator - log.info("Upgrading your databases (revision %i to %i)", old_revision, self.current_db_revision) - yield threads.deferToThread( - dbmigrator.migrate_db, self.db_dir, old_revision, self.current_db_revision - ) - self._write_db_revision_file(self.current_db_revision) - log.info("Finished upgrading the databases.") - migrated = True - defer.returnValue(migrated) - - @defer.inlineCallbacks - def _setup_lbry_file_manager(self): - log.info('Starting the file manager') - self.startup_status = STARTUP_STAGES[3] - self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier) - yield self.lbry_file_manager.setup() - log.info('Done setting up file manager') - - def _start_analytics(self): - if not self.analytics_manager.is_started: - self.analytics_manager.start() - - def _get_session(self): - def get_wallet(): - if self.wallet_type == LBRYCRD_WALLET: - raise ValueError('LBRYcrd Wallet is no longer supported') - elif self.wallet_type == LBRYUM_WALLET: - - log.info("Using lbryum wallet") - - lbryum_servers = {address: {'t': str(port)} - for address, port in conf.settings['lbryum_servers']} - - config = { - 'auto_connect': True, - 'chain': conf.settings['blockchain_name'], - 'default_servers': lbryum_servers - } - - if 'use_keyring' in conf.settings: - config['use_keyring'] = conf.settings['use_keyring'] - if conf.settings['lbryum_wallet_dir']: - config['lbryum_path'] = conf.settings['lbryum_wallet_dir'] - wallet = LBRYumWallet(self.storage, config) - return defer.succeed(wallet) - else: - raise ValueError('Wallet Type {} is not valid'.format(self.wallet_type)) - - d = get_wallet() - - def create_session(wallet): - self.session = Session( - conf.settings['data_rate'], - db_dir=self.db_dir, - node_id=self.node_id, - blob_dir=self.blobfile_dir, - dht_node_port=self.dht_node_port, - known_dht_nodes=conf.settings['known_dht_nodes'], - peer_port=self.peer_port, - use_upnp=self.use_upnp, - wallet=wallet, - is_generous=conf.settings['is_generous_host'], - external_ip=self.platform['ip'], - storage=self.storage - ) - self.startup_status = STARTUP_STAGES[2] - - d.addCallback(create_session) - d.addCallback(lambda _: self.session.setup()) - return d - - @defer.inlineCallbacks - def _check_wallet_locked(self): - wallet = self.session.wallet - if wallet.wallet.use_encryption: - self.startup_status = STARTUP_STAGES[7] - - yield wallet.check_locked() - - def _setup_stream_identifier(self): - file_saver_factory = EncryptedFileSaverFactory( - self.session.peer_finder, - self.session.rate_limiter, - self.session.blob_manager, - self.session.storage, - self.session.wallet, - self.download_directory - ) - self.sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType, - file_saver_factory) - return defer.succeed(None) + return super(Daemon, self)._shutdown() def _download_blob(self, blob_hash, rate_manager=None, timeout=None): """ @@ -620,7 +258,7 @@ class Daemon(AuthJSONRPCServer): timeout = timeout or 30 downloader = StandaloneBlobDownloader( blob_hash, self.session.blob_manager, self.session.peer_finder, self.session.rate_limiter, - rate_manager, self.session.wallet, timeout + rate_manager, self.wallet, timeout ) return downloader.download() @@ -628,7 +266,7 @@ class Daemon(AuthJSONRPCServer): def _get_stream_analytics_report(self, claim_dict): sd_hash = claim_dict.source_hash try: - stream_hash = yield self.session.storage.get_stream_hash_for_sd_hash(sd_hash) + stream_hash = yield self.storage.get_stream_hash_for_sd_hash(sd_hash) except Exception: stream_hash = None report = { @@ -642,7 +280,7 @@ class Daemon(AuthJSONRPCServer): sd_host = None report["sd_blob"] = sd_host if stream_hash: - blob_infos = yield self.session.storage.get_blobs_for_stream(stream_hash) + blob_infos = yield self.storage.get_blobs_for_stream(stream_hash) report["known_blobs"] = len(blob_infos) else: blob_infos = [] @@ -684,8 +322,8 @@ class Daemon(AuthJSONRPCServer): self.analytics_manager.send_download_started(download_id, name, claim_dict) self.streams[sd_hash] = GetStream(self.sd_identifier, self.session, - self.exchange_rate_manager, self.max_key_fee, - self.disable_max_key_fee, + self.exchange_rate_manager, conf.settings['max_key_fee'], + conf.settings['disable_max_key_fee'], conf.settings['data_rate'], timeout) try: lbry_file, finished_deferred = yield self.streams[sd_hash].start( @@ -713,11 +351,12 @@ class Daemon(AuthJSONRPCServer): def _publish_stream(self, name, bid, claim_dict, file_path=None, certificate_id=None, claim_address=None, change_address=None): - publisher = Publisher(self.session, self.lbry_file_manager, self.session.wallet, + publisher = Publisher(self.session, self.file_manager, self.wallet, certificate_id) parse_lbry_uri(name) if not file_path: - stream_hash = yield self.storage.get_stream_hash_for_sd_hash(claim_dict['stream']['source']['source']) + stream_hash = yield self.storage.get_stream_hash_for_sd_hash( + claim_dict['stream']['source']['source']) claim_out = yield publisher.publish_stream(name, bid, claim_dict, stream_hash, claim_address, change_address) else: @@ -742,7 +381,7 @@ class Daemon(AuthJSONRPCServer): """ parsed = parse_lbry_uri(name) - resolution = yield self.session.wallet.resolve(parsed.name, check_cache=not force_refresh) + resolution = yield self.wallet.resolve(parsed.name, check_cache=not force_refresh) if parsed.name in resolution: result = resolution[parsed.name] defer.returnValue(result) @@ -750,17 +389,9 @@ class Daemon(AuthJSONRPCServer): def _get_or_download_sd_blob(self, blob, sd_hash): if blob: return self.session.blob_manager.get_blob(blob[0]) - - def _check_est(downloader): - if downloader.result is not None: - downloader.cancel() - - d = defer.succeed(None) - reactor.callLater(conf.settings['search_timeout'], _check_est, d) - d.addCallback( - lambda _: download_sd_blob( - self.session, sd_hash, self.session.payment_rate_manager)) - return d + return download_sd_blob( + self.session, sd_hash, self.session.payment_rate_manager, conf.settings['search_timeout'] + ) def get_or_download_sd_blob(self, sd_hash): """Return previously downloaded sd blob if already in the blob @@ -797,7 +428,7 @@ class Daemon(AuthJSONRPCServer): cost = self._get_est_cost_from_stream_size(size) - resolved = yield self.session.wallet.resolve(uri) + resolved = yield self.wallet.resolve(uri) if uri in resolved and 'claim' in resolved[uri]: claim = ClaimDict.load_dict(resolved[uri]['claim']['value']) @@ -844,7 +475,7 @@ class Daemon(AuthJSONRPCServer): Resolve a name and return the estimated stream cost """ - resolved = yield self.session.wallet.resolve(uri) + resolved = yield self.wallet.resolve(uri) if resolved: claim_response = resolved[uri] else: @@ -924,7 +555,7 @@ class Daemon(AuthJSONRPCServer): def _get_lbry_file(self, search_by, val, return_json=False, full_status=False): lbry_file = None if search_by in FileID: - for l_f in self.lbry_file_manager.lbry_files: + for l_f in self.file_manager.lbry_files: if l_f.__dict__.get(search_by) == val: lbry_file = l_f break @@ -936,7 +567,7 @@ class Daemon(AuthJSONRPCServer): @defer.inlineCallbacks def _get_lbry_files(self, return_json=False, full_status=True, **kwargs): - lbry_files = list(self.lbry_file_manager.lbry_files) + lbry_files = list(self.file_manager.lbry_files) if kwargs: for search_type, value in iter_lbry_file_search_values(kwargs): lbry_files = [l_f for l_f in lbry_files if l_f.__dict__[search_type] == value] @@ -970,10 +601,9 @@ class Daemon(AuthJSONRPCServer): direction = pieces[0] return field, direction - def _get_single_peer_downloader(self): downloader = SinglePeerDownloader() - downloader.setup(self.session.wallet) + downloader.setup(self.wallet) return downloader @defer.inlineCallbacks @@ -1037,59 +667,65 @@ class Daemon(AuthJSONRPCServer): Returns: (dict) lbrynet-daemon status { - 'lbry_id': lbry peer id, base58, - 'installation_id': installation id, base58, - 'is_running': bool, + 'installation_id': (str) installation id - base58, + 'is_running': (bool), 'is_first_run': bool, - 'startup_status': { - 'code': status code, - 'message': status message + 'skipped_components': (list) [names of skipped components (str)], + 'startup_status': { Does not include components which have been skipped + 'database': (bool), + 'wallet': (bool), + 'session': (bool), + 'dht': (bool), + 'hash_announcer': (bool), + 'stream_identifier': (bool), + 'file_manager': (bool), + 'peer_protocol_server': (bool), + 'reflector': (bool), + 'upnp': (bool), + 'exchange_rate_manager': (bool), }, 'connection_status': { - 'code': connection status code, - 'message': connection status message + 'code': (str) connection status code, + 'message': (str) connection status message }, 'blockchain_status': { - 'blocks': local blockchain height, - 'blocks_behind': remote_height - local_height, - 'best_blockhash': block hash of most recent block, + 'blocks': (int) local blockchain height, + 'blocks_behind': (int) remote_height - local_height, + 'best_blockhash': (str) block hash of most recent block, }, - 'wallet_is_encrypted': bool, - + 'dht_node_status': { + 'node_id': (str) lbry dht node id - hex encoded, + 'peers_in_routing_table': (int) the number of peers in the routing table, + }, + 'wallet_is_encrypted': (bool), If given the session status option: 'session_status': { - 'managed_blobs': count of blobs in the blob manager, - 'managed_streams': count of streams in the file manager - 'announce_queue_size': number of blobs currently queued to be announced - 'should_announce_blobs': number of blobs that should be announced + 'managed_blobs': (int) count of blobs in the blob manager, + 'managed_streams': (int) count of streams in the file manager, + 'announce_queue_size': (int) number of blobs currently queued to be announced, + 'should_announce_blobs': (int) number of blobs that should be announced, } } """ # on startup, the wallet or network won't be available but we still need this call to work - has_wallet = self.session and self.session.wallet and self.session.wallet.network - local_height = self.session.wallet.network.get_local_height() if has_wallet else 0 - remote_height = self.session.wallet.network.get_server_height() if has_wallet else 0 - best_hash = (yield self.session.wallet.get_best_blockhash()) if has_wallet else None - wallet_is_encrypted = has_wallet and self.session.wallet.wallet and \ - self.session.wallet.wallet.use_encryption + has_wallet = self.session and self.wallet and self.wallet.network + local_height = self.wallet.network.get_local_height() if has_wallet else 0 + remote_height = self.wallet.network.get_server_height() if has_wallet else 0 + best_hash = (yield self.wallet.get_best_blockhash()) if has_wallet else None + wallet_is_encrypted = has_wallet and self.wallet.wallet and \ + self.wallet.wallet.use_encryption + connection_code = CONNECTION_STATUS_CONNECTED if utils.check_connection() else CONNECTION_STATUS_NETWORK response = { - 'lbry_id': base58.b58encode(self.node_id), 'installation_id': conf.settings.installation_id, - 'is_running': self.announced_startup, - 'is_first_run': self.session.wallet.is_first_run if has_wallet else None, - 'startup_status': { - 'code': self.startup_status[0], - 'message': self.startup_status[1], - }, + 'is_running': all(self.component_manager.get_components_status().values()), + 'is_first_run': self.is_first_run, + 'skipped_components': self.component_manager.skip_components, + 'startup_status': self.component_manager.get_components_status(), 'connection_status': { - 'code': self.connection_status_code, - 'message': ( - CONNECTION_MESSAGES[self.connection_status_code] - if self.connection_status_code is not None - else '' - ), + 'code': connection_code, + 'message': CONNECTION_MESSAGES[connection_code], }, 'wallet_is_encrypted': wallet_is_encrypted, 'blocks_behind': remote_height - local_height, # deprecated. remove from UI, then here @@ -1097,6 +733,11 @@ class Daemon(AuthJSONRPCServer): 'blocks': local_height, 'blocks_behind': remote_height - local_height, 'best_blockhash': best_hash, + }, + 'dht_node_status': { + 'node_id': conf.settings.node_id.encode('hex'), + 'peers_in_routing_table': 0 if not self.component_manager.all_components_running(DHT_COMPONENT) else + len(self.dht_node.contacts) } } if session_status: @@ -1105,7 +746,7 @@ class Daemon(AuthJSONRPCServer): should_announce_blobs = yield self.session.blob_manager.count_should_announce_blobs() response['session_status'] = { 'managed_blobs': len(blobs), - 'managed_streams': len(self.lbry_file_manager.lbry_files), + 'managed_streams': len(self.file_manager.lbry_files), 'announce_queue_size': announce_queue_size, 'should_announce_blobs': should_announce_blobs, } @@ -1137,7 +778,7 @@ class Daemon(AuthJSONRPCServer): } """ - platform_info = self._get_platform() + platform_info = system_info.get_platform() log.info("Get version info: " + json.dumps(platform_info)) return self._render_response(platform_info) @@ -1156,7 +797,7 @@ class Daemon(AuthJSONRPCServer): (bool) true if successful """ - platform_name = self._get_platform()['platform'] + platform_name = system_info.get_platform()['platform'] report_bug_to_slack( message, conf.settings.installation_id, @@ -1181,7 +822,6 @@ class Daemon(AuthJSONRPCServer): """ return self._render_response(conf.settings.get_adjustable_settings_dict()) - @defer.inlineCallbacks def jsonrpc_settings_set(self, **kwargs): """ Set daemon settings @@ -1233,8 +873,41 @@ class Daemon(AuthJSONRPCServer): (dict) Updated dictionary of daemon settings """ - yield self._update_settings(kwargs) - defer.returnValue(conf.settings.get_adjustable_settings_dict()) + # TODO: improve upon the current logic, it could be made better + new_settings = kwargs + + setting_types = { + 'download_directory': str, + 'data_rate': float, + 'download_timeout': int, + 'peer_port': int, + 'max_key_fee': dict, + 'use_upnp': bool, + 'run_reflector_server': bool, + 'cache_time': int, + 'reflect_uploads': bool, + 'share_usage_data': bool, + 'disable_max_key_fee': bool, + 'peer_search_timeout': int, + 'sd_download_timeout': int, + 'auto_renew_claim_height_delta': int + } + + for key, setting_type in setting_types.iteritems(): + if key in new_settings: + if isinstance(new_settings[key], setting_type): + conf.settings.update({key: new_settings[key]}, + data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) + elif setting_type is dict and isinstance(new_settings[key], (unicode, str)): + decoded = json.loads(str(new_settings[key])) + conf.settings.update({key: decoded}, + data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) + else: + converted = setting_type(new_settings[key]) + conf.settings.update({key: converted}, + data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) + conf.settings.save_conf_file_settings() + return self._render_response(conf.settings.get_adjustable_settings_dict()) def jsonrpc_help(self, command=None): """ @@ -1284,6 +957,7 @@ class Daemon(AuthJSONRPCServer): """ return self._render_response(sorted([command for command in self.callable_methods.keys()])) + @requires(WALLET_COMPONENT) def jsonrpc_wallet_balance(self, address=None, include_unconfirmed=False): """ Return the balance of the wallet @@ -1300,11 +974,12 @@ class Daemon(AuthJSONRPCServer): (float) amount of lbry credits in wallet """ if address is None: - return self._render_response(float(self.session.wallet.get_balance())) + return self._render_response(float(self.wallet.get_balance())) else: return self._render_response(float( - self.session.wallet.get_address_balance(address, include_unconfirmed))) + self.wallet.get_address_balance(address, include_unconfirmed))) + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_wallet_unlock(self, password): """ @@ -1320,9 +995,10 @@ class Daemon(AuthJSONRPCServer): (bool) true if wallet is unlocked, otherwise false """ - cmd_runner = self.session.wallet.get_cmd_runner() - if cmd_runner.locked: - d = self.session.wallet.wallet_unlocked_d + # the check_locked() in the if statement is needed because that is what sets + # the wallet_unlocked_d deferred ¯\_(ツ)_/¯ + if not self.wallet.check_locked(): + d = self.wallet.wallet_unlocked_d d.callback(password) result = yield d else: @@ -1330,6 +1006,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_wallet_decrypt(self): """ @@ -1345,10 +1022,11 @@ class Daemon(AuthJSONRPCServer): (bool) true if wallet is decrypted, otherwise false """ - result = self.session.wallet.decrypt_wallet() + result = self.wallet.decrypt_wallet() response = yield self._render_response(result) defer.returnValue(response) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_wallet_encrypt(self, new_password): """ @@ -1365,8 +1043,8 @@ class Daemon(AuthJSONRPCServer): (bool) true if wallet is decrypted, otherwise false """ - self.session.wallet.encrypt_wallet(new_password) - response = yield self._render_response(self.session.wallet.wallet.use_encryption) + self.wallet.encrypt_wallet(new_password) + response = yield self._render_response(self.wallet.wallet.use_encryption) defer.returnValue(response) @defer.inlineCallbacks @@ -1389,6 +1067,7 @@ class Daemon(AuthJSONRPCServer): reactor.callLater(0.1, reactor.fireSystemEvent, "shutdown") defer.returnValue(response) + @requires(FILE_MANAGER_COMPONENT) @defer.inlineCallbacks def jsonrpc_file_list(self, sort=None, **kwargs): """ @@ -1460,6 +1139,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_resolve_name(self, name, force=False): """ @@ -1485,6 +1165,7 @@ class Daemon(AuthJSONRPCServer): else: defer.returnValue(metadata) + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_claim_show(self, txid=None, nout=None, claim_id=None): """ @@ -1522,14 +1203,15 @@ class Daemon(AuthJSONRPCServer): """ if claim_id is not None and txid is None and nout is None: - claim_results = yield self.session.wallet.get_claim_by_claim_id(claim_id) + claim_results = yield self.wallet.get_claim_by_claim_id(claim_id) elif txid is not None and nout is not None and claim_id is None: - claim_results = yield self.session.wallet.get_claim_by_outpoint(txid, int(nout)) + claim_results = yield self.wallet.get_claim_by_outpoint(txid, int(nout)) else: raise Exception("Must specify either txid/nout, or claim_id") response = yield self._render_response(claim_results) defer.returnValue(response) + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_resolve(self, force=False, uri=None, uris=[]): """ @@ -1613,13 +1295,14 @@ class Daemon(AuthJSONRPCServer): except URIParseError: results[u] = {"error": "%s is not a valid uri" % u} - resolved = yield self.session.wallet.resolve(*valid_uris, check_cache=not force) + resolved = yield self.wallet.resolve(*valid_uris, check_cache=not force) for resolved_uri in resolved: results[resolved_uri] = resolved[resolved_uri] response = yield self._render_response(results) defer.returnValue(response) + @requires(WALLET_COMPONENT, FILE_MANAGER_COMPONENT, SESSION_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_get(self, uri, file_name=None, timeout=None): """ @@ -1665,13 +1348,13 @@ class Daemon(AuthJSONRPCServer): } """ - timeout = timeout if timeout is not None else self.download_timeout + timeout = timeout if timeout is not None else conf.settings['download_timeout'] parsed_uri = parse_lbry_uri(uri) if parsed_uri.is_channel and not parsed_uri.path: raise Exception("cannot download a channel claim, specify a /path") - resolved_result = yield self.session.wallet.resolve(uri) + resolved_result = yield self.wallet.resolve(uri) if resolved_result and uri in resolved_result: resolved = resolved_result[uri] else: @@ -1708,6 +1391,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) + @requires(FILE_MANAGER_COMPONENT) @defer.inlineCallbacks def jsonrpc_file_set_status(self, status, **kwargs): """ @@ -1738,7 +1422,7 @@ class Daemon(AuthJSONRPCServer): raise Exception('Unable to find a file for {}:{}'.format(search_type, value)) if status == 'start' and lbry_file.stopped or status == 'stop' and not lbry_file.stopped: - yield self.lbry_file_manager.toggle_lbry_file_running(lbry_file) + yield self.file_manager.toggle_lbry_file_running(lbry_file) msg = "Started downloading file" if status == 'start' else "Stopped downloading file" else: msg = ( @@ -1748,6 +1432,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(msg) defer.returnValue(response) + @requires(FILE_MANAGER_COMPONENT) @defer.inlineCallbacks def jsonrpc_file_delete(self, delete_from_download_dir=False, delete_all=False, **kwargs): """ @@ -1800,14 +1485,15 @@ class Daemon(AuthJSONRPCServer): file_name, stream_hash = lbry_file.file_name, lbry_file.stream_hash if lbry_file.sd_hash in self.streams: del self.streams[lbry_file.sd_hash] - yield self.lbry_file_manager.delete_lbry_file(lbry_file, - delete_file=delete_from_download_dir) + yield self.file_manager.delete_lbry_file(lbry_file, + delete_file=delete_from_download_dir) log.info("Deleted file: %s", file_name) result = True response = yield self._render_response(result) defer.returnValue(response) + @requires(WALLET_COMPONENT, SESSION_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_stream_cost_estimate(self, uri, size=None): """ @@ -1828,6 +1514,7 @@ class Daemon(AuthJSONRPCServer): cost = yield self.get_est_cost(uri, size) defer.returnValue(cost) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_channel_new(self, channel_name, amount): """ @@ -1863,14 +1550,14 @@ class Daemon(AuthJSONRPCServer): if amount <= 0: raise Exception("Invalid amount") - yield self.session.wallet.update_balance() - if amount >= self.session.wallet.get_balance(): - balance = yield self.session.wallet.get_max_usable_balance_for_claim(channel_name) + yield self.wallet.update_balance() + if amount >= self.wallet.get_balance(): + balance = yield self.wallet.get_max_usable_balance_for_claim(channel_name) max_bid_amount = balance - MAX_UPDATE_FEE_ESTIMATE if balance <= MAX_UPDATE_FEE_ESTIMATE: raise InsufficientFundsError( "Insufficient funds, please deposit additional LBC. Minimum additional LBC needed {}" - . format(MAX_UPDATE_FEE_ESTIMATE - balance)) + .format(MAX_UPDATE_FEE_ESTIMATE - balance)) elif amount > max_bid_amount: raise InsufficientFundsError( "Please wait for any pending bids to resolve or lower the bid value. " @@ -1878,12 +1565,13 @@ class Daemon(AuthJSONRPCServer): .format(max_bid_amount) ) - result = yield self.session.wallet.claim_new_channel(channel_name, amount) + result = yield self.wallet.claim_new_channel(channel_name, amount) self.analytics_manager.send_new_channel() log.info("Claimed a new channel! Result: %s", result) response = yield self._render_response(result) defer.returnValue(response) + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_channel_list(self): """ @@ -1900,10 +1588,11 @@ class Daemon(AuthJSONRPCServer): is in the wallet. """ - result = yield self.session.wallet.channel_list() + result = yield self.wallet.channel_list() response = yield self._render_response(result) defer.returnValue(response) + @requires(WALLET_COMPONENT) @AuthJSONRPCServer.deprecated("channel_list") def jsonrpc_channel_list_mine(self): """ @@ -1921,6 +1610,7 @@ class Daemon(AuthJSONRPCServer): return self.jsonrpc_channel_list() + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_channel_export(self, claim_id): """ @@ -1936,9 +1626,10 @@ class Daemon(AuthJSONRPCServer): (str) Serialized certificate information """ - result = yield self.session.wallet.export_certificate_info(claim_id) + result = yield self.wallet.export_certificate_info(claim_id) defer.returnValue(result) + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_channel_import(self, serialized_certificate_info): """ @@ -1954,9 +1645,10 @@ class Daemon(AuthJSONRPCServer): (dict) Result dictionary """ - result = yield self.session.wallet.import_certificate_info(serialized_certificate_info) + result = yield self.wallet.import_certificate_info(serialized_certificate_info) defer.returnValue(result) + @requires(WALLET_COMPONENT, FILE_MANAGER_COMPONENT, SESSION_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_publish(self, name, bid, metadata=None, file_path=None, fee=None, title=None, description=None, author=None, language=None, license=None, @@ -2048,9 +1740,9 @@ class Daemon(AuthJSONRPCServer): if bid <= 0.0: raise ValueError("Bid value must be greater than 0.0") - yield self.session.wallet.update_balance() - if bid >= self.session.wallet.get_balance(): - balance = yield self.session.wallet.get_max_usable_balance_for_claim(name) + yield self.wallet.update_balance() + if bid >= self.wallet.get_balance(): + balance = yield self.wallet.get_max_usable_balance_for_claim(name) max_bid_amount = balance - MAX_UPDATE_FEE_ESTIMATE if balance <= MAX_UPDATE_FEE_ESTIMATE: raise InsufficientFundsError( @@ -2097,7 +1789,7 @@ class Daemon(AuthJSONRPCServer): log.warning("Stripping empty fee from published metadata") del metadata['fee'] elif 'address' not in metadata['fee']: - address = yield self.session.wallet.get_least_used_address() + address = yield self.wallet.get_least_used_address() metadata['fee']['address'] = address if 'fee' in metadata and 'version' not in metadata['fee']: metadata['fee']['version'] = '_0_0_1' @@ -2153,7 +1845,7 @@ class Daemon(AuthJSONRPCServer): certificate_id = channel_id elif channel_name: certificate_id = None - my_certificates = yield self.session.wallet.channel_list() + my_certificates = yield self.wallet.channel_list() for certificate in my_certificates: if channel_name == certificate['name']: certificate_id = certificate['claim_id'] @@ -2168,6 +1860,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_claim_abandon(self, claim_id=None, txid=None, nout=None): """ @@ -2196,10 +1889,11 @@ class Daemon(AuthJSONRPCServer): if nout is None and txid is not None: raise Exception('Must specify nout') - result = yield self.session.wallet.abandon_claim(claim_id, txid, nout) + result = yield self.wallet.abandon_claim(claim_id, txid, nout) self.analytics_manager.send_claim_action('abandon') defer.returnValue(result) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_claim_new_support(self, name, claim_id, amount): """ @@ -2223,10 +1917,11 @@ class Daemon(AuthJSONRPCServer): } """ - result = yield self.session.wallet.support_claim(name, claim_id, amount) + result = yield self.wallet.support_claim(name, claim_id, amount) self.analytics_manager.send_claim_action('new_support') defer.returnValue(result) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_claim_renew(self, outpoint=None, height=None): """ @@ -2262,13 +1957,14 @@ class Daemon(AuthJSONRPCServer): nout = int(nout) else: raise Exception("invalid outpoint") - result = yield self.session.wallet.claim_renew(txid, nout) + result = yield self.wallet.claim_renew(txid, nout) result = {outpoint: result} else: height = int(height) - result = yield self.session.wallet.claim_renew_all_before_expiration(height) + result = yield self.wallet.claim_renew_all_before_expiration(height) defer.returnValue(result) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_claim_send_to_address(self, claim_id, address, amount=None): """ @@ -2296,11 +1992,12 @@ class Daemon(AuthJSONRPCServer): } """ - result = yield self.session.wallet.send_claim_to_address(claim_id, address, amount) + result = yield self.wallet.send_claim_to_address(claim_id, address, amount) response = yield self._render_response(result) defer.returnValue(response) # TODO: claim_list_mine should be merged into claim_list, but idk how to authenticate it -Grin + @requires(WALLET_COMPONENT) def jsonrpc_claim_list_mine(self): """ List my name claims @@ -2334,10 +2031,11 @@ class Daemon(AuthJSONRPCServer): ] """ - d = self.session.wallet.get_name_claims() + d = self.wallet.get_name_claims() d.addCallback(lambda claims: self._render_response(claims)) return d + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_claim_list(self, name): """ @@ -2372,10 +2070,11 @@ class Daemon(AuthJSONRPCServer): } """ - claims = yield self.session.wallet.get_claims_for_name(name) # type: dict + claims = yield self.wallet.get_claims_for_name(name) # type: dict sort_claim_results(claims['claims']) defer.returnValue(claims) + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_claim_list_by_channel(self, page=0, page_size=10, uri=None, uris=[]): """ @@ -2449,8 +2148,8 @@ class Daemon(AuthJSONRPCServer): except URIParseError: results[chan_uri] = {"error": "%s is not a valid uri" % chan_uri} - resolved = yield self.session.wallet.resolve(*valid_uris, check_cache=False, page=page, - page_size=page_size) + resolved = yield self.wallet.resolve(*valid_uris, check_cache=False, page=page, + page_size=page_size) for u in resolved: if 'error' in resolved[u]: results[u] = resolved[u] @@ -2465,6 +2164,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(results) defer.returnValue(response) + @requires(WALLET_COMPONENT) def jsonrpc_transaction_list(self): """ List transactions belonging to wallet @@ -2522,10 +2222,11 @@ class Daemon(AuthJSONRPCServer): """ - d = self.session.wallet.get_history() + d = self.wallet.get_history() d.addCallback(lambda r: self._render_response(r)) return d + @requires(WALLET_COMPONENT) def jsonrpc_transaction_show(self, txid): """ Get a decoded transaction from a txid @@ -2540,10 +2241,11 @@ class Daemon(AuthJSONRPCServer): (dict) JSON formatted transaction """ - d = self.session.wallet.get_transaction(txid) + d = self.wallet.get_transaction(txid) d.addCallback(lambda r: self._render_response(r)) return d + @requires(WALLET_COMPONENT) def jsonrpc_wallet_is_address_mine(self, address): """ Checks if an address is associated with the current wallet. @@ -2558,10 +2260,11 @@ class Daemon(AuthJSONRPCServer): (bool) true, if address is associated with current wallet """ - d = self.session.wallet.address_is_mine(address) + d = self.wallet.address_is_mine(address) d.addCallback(lambda is_mine: self._render_response(is_mine)) return d + @requires(WALLET_COMPONENT) def jsonrpc_wallet_public_key(self, address): """ Get public key from wallet address @@ -2577,10 +2280,11 @@ class Daemon(AuthJSONRPCServer): Could contain more than one public key if multisig. """ - d = self.session.wallet.get_pub_keys(address) + d = self.wallet.get_pub_keys(address) d.addCallback(lambda r: self._render_response(r)) return d + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_wallet_list(self): """ @@ -2596,10 +2300,11 @@ class Daemon(AuthJSONRPCServer): List of wallet addresses """ - addresses = yield self.session.wallet.list_addresses() + addresses = yield self.wallet.list_addresses() response = yield self._render_response(addresses) defer.returnValue(response) + @requires(WALLET_COMPONENT) def jsonrpc_wallet_new_address(self): """ Generate a new wallet address @@ -2618,11 +2323,12 @@ class Daemon(AuthJSONRPCServer): log.info("Got new wallet address: " + address) return defer.succeed(address) - d = self.session.wallet.get_new_address() + d = self.wallet.get_new_address() d.addCallback(_disp) d.addCallback(lambda address: self._render_response(address)) return d + @requires(WALLET_COMPONENT) def jsonrpc_wallet_unused_address(self): """ Return an address containing no balance, will create @@ -2642,11 +2348,12 @@ class Daemon(AuthJSONRPCServer): log.info("Got unused wallet address: " + address) return defer.succeed(address) - d = self.session.wallet.get_unused_address() + d = self.wallet.get_unused_address() d.addCallback(_disp) d.addCallback(lambda address: self._render_response(address)) return d + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @AuthJSONRPCServer.deprecated("wallet_send") @defer.inlineCallbacks def jsonrpc_send_amount_to_address(self, amount, address): @@ -2669,13 +2376,14 @@ class Daemon(AuthJSONRPCServer): elif not amount: raise NullFundsError() - reserved_points = self.session.wallet.reserve_points(address, amount) + reserved_points = self.wallet.reserve_points(address, amount) if reserved_points is None: raise InsufficientFundsError() - yield self.session.wallet.send_points_to_address(reserved_points, amount) + yield self.wallet.send_points_to_address(reserved_points, amount) self.analytics_manager.send_credits_sent() defer.returnValue(True) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_wallet_send(self, amount, address=None, claim_id=None): """ @@ -2720,10 +2428,11 @@ class Daemon(AuthJSONRPCServer): result = yield self.jsonrpc_send_amount_to_address(amount, address) else: validate_claim_id(claim_id) - result = yield self.session.wallet.tip_claim(claim_id, amount) + result = yield self.wallet.tip_claim(claim_id, amount) self.analytics_manager.send_claim_action('new_support') defer.returnValue(result) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_wallet_prefill_addresses(self, num_addresses, amount, no_broadcast=False): """ @@ -2749,11 +2458,12 @@ class Daemon(AuthJSONRPCServer): raise NullFundsError() broadcast = not no_broadcast - tx = yield self.session.wallet.create_addresses_with_balance( + tx = yield self.wallet.create_addresses_with_balance( num_addresses, amount, broadcast=broadcast) tx['broadcast'] = broadcast defer.returnValue(tx) + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_utxo_list(self): """ @@ -2783,7 +2493,7 @@ class Daemon(AuthJSONRPCServer): ] """ - unspent = yield self.session.wallet.list_unspent() + unspent = yield self.wallet.list_unspent() for i, utxo in enumerate(unspent): utxo['txid'] = utxo.pop('prevout_hash') utxo['nout'] = utxo.pop('prevout_n') @@ -2793,6 +2503,7 @@ class Daemon(AuthJSONRPCServer): defer.returnValue(unspent) + @requires(WALLET_COMPONENT) def jsonrpc_block_show(self, blockhash=None, height=None): """ Get contents of a block @@ -2809,10 +2520,10 @@ class Daemon(AuthJSONRPCServer): """ if blockhash is not None: - d = self.session.wallet.get_block(blockhash) + d = self.wallet.get_block(blockhash) elif height is not None: - d = self.session.wallet.get_block_info(height) - d.addCallback(lambda b: self.session.wallet.get_block(b)) + d = self.wallet.get_block_info(height) + d.addCallback(lambda b: self.wallet.get_block(b)) else: # TODO: return a useful error message return server.failure @@ -2820,6 +2531,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d + @requires(WALLET_COMPONENT, SESSION_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_blob_get(self, blob_hash, timeout=None, encoding=None, payment_rate_manager=None): """ @@ -2863,6 +2575,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) + @requires(SESSION_COMPONENT) @defer.inlineCallbacks def jsonrpc_blob_delete(self, blob_hash): """ @@ -2882,14 +2595,15 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response("Don't have that blob") defer.returnValue(response) try: - stream_hash = yield self.session.storage.get_stream_hash_for_sd_hash(blob_hash) - yield self.session.storage.delete_stream(stream_hash) + stream_hash = yield self.storage.get_stream_hash_for_sd_hash(blob_hash) + yield self.storage.delete_stream(stream_hash) except Exception as err: pass yield self.session.blob_manager.delete_blobs([blob_hash]) response = yield self._render_response("Deleted %s" % blob_hash) defer.returnValue(response) + @requires(DHT_COMPONENT) @defer.inlineCallbacks def jsonrpc_peer_list(self, blob_hash, timeout=None): """ @@ -2909,7 +2623,7 @@ class Daemon(AuthJSONRPCServer): if not utils.is_valid_blobhash(blob_hash): raise Exception("invalid blob hash") - finished_deferred = self.session.dht_node.iterativeFindValue(binascii.unhexlify(blob_hash)) + finished_deferred = self.dht_node.iterativeFindValue(binascii.unhexlify(blob_hash)) def trap_timeout(err): err.trap(defer.TimeoutError) @@ -2928,6 +2642,7 @@ class Daemon(AuthJSONRPCServer): ] defer.returnValue(results) + @requires(SESSION_COMPONENT, DHT_COMPONENT, conditions=[DHT_HAS_CONTACTS]) @defer.inlineCallbacks def jsonrpc_blob_announce(self, blob_hash=None, stream_hash=None, sd_hash=None): """ @@ -2964,6 +2679,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(True) defer.returnValue(response) + @requires(FILE_MANAGER_COMPONENT) @defer.inlineCallbacks def jsonrpc_file_reflect(self, **kwargs): """ @@ -2999,6 +2715,7 @@ class Daemon(AuthJSONRPCServer): results = yield reupload.reflect_file(lbry_file, reflector_server=reflector_server) defer.returnValue(results) + @requires(SESSION_COMPONENT, WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None, finished=None, page_size=None, page=None): @@ -3028,14 +2745,14 @@ class Daemon(AuthJSONRPCServer): if uri: metadata = yield self._resolve_name(uri) sd_hash = utils.get_sd_hash(metadata) - stream_hash = yield self.session.storage.get_stream_hash_for_sd_hash(sd_hash) + stream_hash = yield self.storage.get_stream_hash_for_sd_hash(sd_hash) elif stream_hash: - sd_hash = yield self.session.storage.get_sd_blob_hash_for_stream(stream_hash) + sd_hash = yield self.storage.get_sd_blob_hash_for_stream(stream_hash) elif sd_hash: - stream_hash = yield self.session.storage.get_stream_hash_for_sd_hash(sd_hash) - sd_hash = yield self.session.storage.get_sd_blob_hash_for_stream(stream_hash) + stream_hash = yield self.storage.get_stream_hash_for_sd_hash(sd_hash) + sd_hash = yield self.storage.get_sd_blob_hash_for_stream(stream_hash) if stream_hash: - crypt_blobs = yield self.session.storage.get_blobs_for_stream(stream_hash) + crypt_blobs = yield self.storage.get_blobs_for_stream(stream_hash) blobs = yield defer.gatherResults([ self.session.blob_manager.get_blob(crypt_blob.blob_hash, crypt_blob.length) for crypt_blob in crypt_blobs if crypt_blob.blob_hash is not None @@ -3062,6 +2779,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(blob_hashes_for_return) defer.returnValue(response) + @requires(SESSION_COMPONENT) def jsonrpc_blob_reflect(self, blob_hashes, reflector_server=None): """ Reflects specified blobs @@ -3080,6 +2798,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d + @requires(SESSION_COMPONENT) def jsonrpc_blob_reflect_all(self): """ Reflects all saved blobs @@ -3099,6 +2818,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d + @requires(DHT_COMPONENT) @defer.inlineCallbacks def jsonrpc_peer_ping(self, node_id): """ @@ -3116,7 +2836,7 @@ class Daemon(AuthJSONRPCServer): contact = None try: - contact = yield self.session.dht_node.findContact(node_id.decode('hex')) + contact = yield self.dht_node.findContact(node_id.decode('hex')) except TimeoutError: result = {'error': 'timeout finding peer'} defer.returnValue(result) @@ -3128,6 +2848,7 @@ class Daemon(AuthJSONRPCServer): result = {'error': 'ping timeout'} defer.returnValue(result) + @requires(DHT_COMPONENT) def jsonrpc_routing_table_get(self): """ Get DHT routing information @@ -3158,7 +2879,7 @@ class Daemon(AuthJSONRPCServer): """ result = {} - data_store = self.session.dht_node._dataStore._dict + data_store = self.dht_node._dataStore._dict datastore_len = len(data_store) hosts = {} @@ -3176,8 +2897,8 @@ class Daemon(AuthJSONRPCServer): blob_hashes = [] result['buckets'] = {} - for i in range(len(self.session.dht_node._routingTable._buckets)): - for contact in self.session.dht_node._routingTable._buckets[i]._contacts: + for i in range(len(self.dht_node._routingTable._buckets)): + for contact in self.dht_node._routingTable._buckets[i]._contacts: contacts = result['buckets'].get(i, []) if contact in hosts: blobs = hosts[contact] @@ -3200,9 +2921,11 @@ class Daemon(AuthJSONRPCServer): result['contacts'] = contact_set result['blob_hashes'] = blob_hashes - result['node_id'] = self.session.dht_node.node_id.encode('hex') + result['node_id'] = self.dht_node.node_id.encode('hex') return self._render_response(result) + # the single peer downloader needs wallet access + @requires(DHT_COMPONENT, WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) def jsonrpc_blob_availability(self, blob_hash, search_timeout=None, blob_timeout=None): """ Get blob availability @@ -3227,6 +2950,7 @@ class Daemon(AuthJSONRPCServer): return self._blob_availability(blob_hash, search_timeout, blob_timeout) + @requires(SESSION_COMPONENT, WALLET_COMPONENT, DHT_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @AuthJSONRPCServer.deprecated("stream_availability") def jsonrpc_get_availability(self, uri, sd_timeout=None, peer_timeout=None): """ @@ -3247,6 +2971,7 @@ class Daemon(AuthJSONRPCServer): return self.jsonrpc_stream_availability(uri, peer_timeout, sd_timeout) + @requires(SESSION_COMPONENT, WALLET_COMPONENT, DHT_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_stream_availability(self, uri, search_timeout=None, blob_timeout=None): """ @@ -3299,7 +3024,7 @@ class Daemon(AuthJSONRPCServer): } try: - resolved_result = yield self.session.wallet.resolve(uri) + resolved_result = yield self.wallet.resolve(uri) response['did_resolve'] = True except UnknownNameError: response['error'] = "Failed to resolve name" diff --git a/lbrynet/daemon/DaemonCLI.py b/lbrynet/daemon/DaemonCLI.py index 7ec03aa34..3cecc7c42 100644 --- a/lbrynet/daemon/DaemonCLI.py +++ b/lbrynet/daemon/DaemonCLI.py @@ -7,7 +7,7 @@ from collections import OrderedDict from lbrynet import conf from lbrynet.core import utils from lbrynet.daemon.auth.client import JSONRPCException, LBRYAPIClient, AuthAPIClient -from lbrynet.daemon.Daemon import LOADING_WALLET_CODE, Daemon +from lbrynet.daemon.Daemon import Daemon from lbrynet.core.system_info import get_platform from jsonrpc.common import RPCError from requests.exceptions import ConnectionError @@ -21,17 +21,13 @@ def remove_brackets(key): return key -def set_flag_vals(flag_names, parsed_args): +def set_kwargs(parsed_args): kwargs = OrderedDict() for key, arg in parsed_args.iteritems(): if arg is None: continue - elif key.startswith("--"): - if remove_brackets(key[2:]) not in kwargs: - k = remove_brackets(key[2:]) - elif key in flag_names: - if remove_brackets(flag_names[key]) not in kwargs: - k = remove_brackets(flag_names[key]) + elif key.startswith("--") and remove_brackets(key[2:]) not in kwargs: + k = remove_brackets(key[2:]) elif remove_brackets(key) not in kwargs: k = remove_brackets(key) kwargs[k] = guess_type(arg, k) @@ -79,26 +75,22 @@ def main(): method = new_method fn = Daemon.callable_methods[method] - if hasattr(fn, "_flags"): - flag_names = fn._flags - else: - flag_names = {} parsed = docopt(fn.__doc__, args) - kwargs = set_flag_vals(flag_names, parsed) + kwargs = set_kwargs(parsed) colorama.init() conf.initialize_settings() try: api = LBRYAPIClient.get_client() - status = api.status() + api.status() except (URLError, ConnectionError) as err: if isinstance(err, HTTPError) and err.code == UNAUTHORIZED: api = AuthAPIClient.config() # this can happen if the daemon is using auth with the --http-auth flag # when the config setting is to not use it try: - status = api.status() + api.status() except: print_error("Daemon requires authentication, but none was provided.", suggest_help=False) @@ -108,20 +100,6 @@ def main(): suggest_help=False) return 1 - status_code = status['startup_status']['code'] - - if status_code != "started" and method not in Daemon.allowed_during_startup: - print "Daemon is in the process of starting. Please try again in a bit." - message = status['startup_status']['message'] - if message: - if ( - status['startup_status']['code'] == LOADING_WALLET_CODE - and status['blockchain_status']['blocks_behind'] > 0 - ): - message += '. Blocks left: ' + str(status['blockchain_status']['blocks_behind']) - print " Status: " + message - return 1 - # TODO: check if port is bound. Error if its not try: diff --git a/lbrynet/daemon/DaemonConsole.py b/lbrynet/daemon/DaemonConsole.py index 6210dfc0e..65442e751 100644 --- a/lbrynet/daemon/DaemonConsole.py +++ b/lbrynet/daemon/DaemonConsole.py @@ -10,7 +10,6 @@ from lbrynet import analytics from lbrynet import conf from lbrynet.core import utils from lbrynet.core import log_support -from lbrynet.daemon.DaemonServer import DaemonServer from lbrynet.daemon.auth.client import LBRYAPIClient from lbrynet.daemon.Daemon import Daemon @@ -175,18 +174,7 @@ def start_server_and_listen(use_auth, analytics_manager, quiet): logging.getLogger("requests").setLevel(logging.CRITICAL) analytics_manager.send_server_startup() - daemon_server = DaemonServer(analytics_manager) - try: - yield daemon_server.start(use_auth) - analytics_manager.send_server_startup_success() - if not quiet: - print "Started lbrynet-daemon!" - defer.returnValue(True) - except Exception as e: - log.exception('Failed to start lbrynet-daemon') - analytics_manager.send_server_startup_error(str(e)) - daemon_server.stop() - raise + yield Daemon().start_listening() def threaded_terminal(started_daemon, quiet): diff --git a/lbrynet/daemon/DaemonControl.py b/lbrynet/daemon/DaemonControl.py index 8d73c9ce0..8db0511b9 100644 --- a/lbrynet/daemon/DaemonControl.py +++ b/lbrynet/daemon/DaemonControl.py @@ -12,13 +12,12 @@ from lbrynet.core import log_support import argparse import logging.handlers -from twisted.internet import defer, reactor +from twisted.internet import reactor from jsonrpc.proxy import JSONRPCProxy -from lbrynet import analytics from lbrynet import conf from lbrynet.core import utils, system_info -from lbrynet.daemon.DaemonServer import DaemonServer +from lbrynet.daemon.Daemon import Daemon log = logging.getLogger(__name__) @@ -71,6 +70,7 @@ def start(): lbrynet_log = conf.settings.get_log_filename() log_support.configure_logging(lbrynet_log, not args.quiet, args.verbose) + log_support.configure_loggly_handler() log.debug('Final Settings: %s', conf.settings.get_current_settings_dict()) try: @@ -84,8 +84,8 @@ def start(): log.info("Starting lbrynet-daemon from command line") if test_internet_connection(): - analytics_manager = analytics.Manager.new_instance() - start_server_and_listen(analytics_manager) + daemon = Daemon() + daemon.start_listening() reactor.run() else: log.info("Not connected to internet, unable to start") @@ -101,24 +101,5 @@ def update_settings_from_args(args): }, data_types=(conf.TYPE_CLI,)) - -@defer.inlineCallbacks -def start_server_and_listen(analytics_manager): - """ - Args: - use_auth: set to true to enable http authentication - analytics_manager: to send analytics - """ - analytics_manager.send_server_startup() - daemon_server = DaemonServer(analytics_manager) - try: - yield daemon_server.start(conf.settings['use_auth_http']) - analytics_manager.send_server_startup_success() - except Exception as e: - log.exception('Failed to start lbrynet-daemon') - analytics_manager.send_server_startup_error(str(e)) - daemon_server.stop() - - if __name__ == "__main__": start() diff --git a/lbrynet/daemon/DaemonServer.py b/lbrynet/daemon/DaemonServer.py deleted file mode 100644 index e8c00606b..000000000 --- a/lbrynet/daemon/DaemonServer.py +++ /dev/null @@ -1,77 +0,0 @@ -import logging -import os - -from twisted.web import server, guard, resource -from twisted.internet import defer, reactor, error -from twisted.cred import portal - -from lbrynet import conf -from lbrynet.daemon.Daemon import Daemon -from lbrynet.daemon.auth.auth import PasswordChecker, HttpPasswordRealm -from lbrynet.daemon.auth.util import initialize_api_key_file - -log = logging.getLogger(__name__) - - -class IndexResource(resource.Resource): - def getChild(self, name, request): - request.setHeader('cache-control', 'no-cache, no-store, must-revalidate') - request.setHeader('expires', '0') - return self if name == '' else resource.Resource.getChild(self, name, request) - - -class DaemonServer(object): - def __init__(self, analytics_manager=None): - self._daemon = None - self.root = None - self.server_port = None - self.analytics_manager = analytics_manager - - def _setup_server(self, use_auth): - self.root = IndexResource() - self._daemon = Daemon(self.analytics_manager) - self.root.putChild("", self._daemon) - # TODO: DEPRECATED, remove this and just serve the API at the root - self.root.putChild(conf.settings['API_ADDRESS'], self._daemon) - - lbrynet_server = get_site_base(use_auth, self.root) - - try: - self.server_port = reactor.listenTCP( - conf.settings['api_port'], lbrynet_server, interface=conf.settings['api_host']) - log.info("lbrynet API listening on TCP %s:%i", conf.settings['api_host'], conf.settings['api_port']) - except error.CannotListenError: - log.info('Daemon already running, exiting app') - raise - - return defer.succeed(True) - - @defer.inlineCallbacks - def start(self, use_auth): - yield self._setup_server(use_auth) - yield self._daemon.setup() - - def stop(self): - if reactor.running: - log.info("Stopping the reactor") - reactor.fireSystemEvent("shutdown") - - -def get_site_base(use_auth, root): - if use_auth: - log.info("Using authenticated API") - root = create_auth_session(root) - else: - log.info("Using non-authenticated API") - return server.Site(root) - - -def create_auth_session(root): - pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") - initialize_api_key_file(pw_path) - checker = PasswordChecker.load_file(pw_path) - realm = HttpPasswordRealm(root) - portal_to_realm = portal.Portal(realm, [checker, ]) - factory = guard.BasicCredentialFactory('Login to lbrynet api') - _lbrynet_server = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) - return _lbrynet_server diff --git a/lbrynet/daemon/ExchangeRateManager.py b/lbrynet/daemon/ExchangeRateManager.py index 486659a0e..acafe77d4 100644 --- a/lbrynet/daemon/ExchangeRateManager.py +++ b/lbrynet/daemon/ExchangeRateManager.py @@ -12,7 +12,7 @@ log = logging.getLogger(__name__) CURRENCY_PAIRS = ["USDBTC", "BTCLBC"] BITTREX_FEE = 0.0025 -COINBASE_FEE = 0.0 #add fee +COINBASE_FEE = 0.0 # add fee class ExchangeRate(object): @@ -37,6 +37,7 @@ class ExchangeRate(object): class MarketFeed(object): REQUESTS_TIMEOUT = 20 EXCHANGE_RATE_UPDATE_RATE_SEC = 300 + def __init__(self, market, name, url, params, fee): self.market = market self.name = name @@ -115,7 +116,7 @@ class BittrexFeed(MarketFeed): qtys = sum([i['Quantity'] for i in trades]) if totals <= 0 or qtys <= 0: raise InvalidExchangeRateResponse(self.market, 'quantities were not positive') - vwap = totals/qtys + vwap = totals / qtys return defer.succeed(float(1.0 / vwap)) @@ -175,12 +176,11 @@ class CryptonatorBTCFeed(MarketFeed): except ValueError: raise InvalidExchangeRateResponse(self.name, "invalid rate response") if 'ticker' not in json_response or len(json_response['ticker']) == 0 or \ - 'success' not in json_response or json_response['success'] is not True: + 'success' not in json_response or json_response['success'] is not True: raise InvalidExchangeRateResponse(self.name, 'result not found') return defer.succeed(float(json_response['ticker']['price'])) - class CryptonatorFeed(MarketFeed): def __init__(self): MarketFeed.__init__( @@ -198,7 +198,7 @@ class CryptonatorFeed(MarketFeed): except ValueError: raise InvalidExchangeRateResponse(self.name, "invalid rate response") if 'ticker' not in json_response or len(json_response['ticker']) == 0 or \ - 'success' not in json_response or json_response['success'] is not True: + 'success' not in json_response or json_response['success'] is not True: raise InvalidExchangeRateResponse(self.name, 'result not found') return defer.succeed(float(json_response['ticker']['price'])) @@ -231,11 +231,11 @@ class ExchangeRateManager(object): for market in self.market_feeds: if (market.rate_is_initialized() and market.is_online() and - market.rate.currency_pair == (from_currency, to_currency)): + market.rate.currency_pair == (from_currency, to_currency)): return amount * market.rate.spot for market in self.market_feeds: if (market.rate_is_initialized() and market.is_online() and - market.rate.currency_pair[0] == from_currency): + market.rate.currency_pair[0] == from_currency): return self.convert_currency( market.rate.currency_pair[1], to_currency, amount * market.rate.spot) raise Exception( diff --git a/lbrynet/daemon/__init__.py b/lbrynet/daemon/__init__.py index 7461e1c00..c428bbb3b 100644 --- a/lbrynet/daemon/__init__.py +++ b/lbrynet/daemon/__init__.py @@ -1,3 +1,4 @@ +from lbrynet import custom_logger +import Components # register Component classes from lbrynet.daemon.auth.client import LBRYAPIClient - get_client = LBRYAPIClient.get_client diff --git a/lbrynet/daemon/auth/factory.py b/lbrynet/daemon/auth/factory.py new file mode 100644 index 000000000..fed157cc0 --- /dev/null +++ b/lbrynet/daemon/auth/factory.py @@ -0,0 +1,38 @@ +import logging +import os + +from twisted.web import server, guard, resource +from twisted.cred import portal + +from lbrynet import conf +from .auth import PasswordChecker, HttpPasswordRealm +from .util import initialize_api_key_file + +log = logging.getLogger(__name__) + + +class AuthJSONRPCResource(resource.Resource): + def __init__(self, protocol): + resource.Resource.__init__(self) + self.putChild("", protocol) + self.putChild(conf.settings['API_ADDRESS'], protocol) + + def getChild(self, name, request): + request.setHeader('cache-control', 'no-cache, no-store, must-revalidate') + request.setHeader('expires', '0') + return self if name == '' else resource.Resource.getChild(self, name, request) + + def getServerFactory(self): + if conf.settings['use_auth_http']: + log.info("Using authenticated API") + pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") + initialize_api_key_file(pw_path) + checker = PasswordChecker.load_file(pw_path) + realm = HttpPasswordRealm(self) + portal_to_realm = portal.Portal(realm, [checker, ]) + factory = guard.BasicCredentialFactory('Login to lbrynet api') + root = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) + else: + log.info("Using non-authenticated API") + root = self + return server.Site(root) diff --git a/lbrynet/daemon/auth/server.py b/lbrynet/daemon/auth/server.py index a0d365a35..1190a58de 100644 --- a/lbrynet/daemon/auth/server.py +++ b/lbrynet/daemon/auth/server.py @@ -2,8 +2,10 @@ import logging import urlparse import json import inspect +import signal from decimal import Decimal +from functools import wraps from zope.interface import implements from twisted.web import server, resource from twisted.internet import defer @@ -12,13 +14,16 @@ from twisted.internet.error import ConnectionDone, ConnectionLost from txjsonrpc import jsonrpclib from traceback import format_exc -from lbrynet import conf +from lbrynet import conf, analytics from lbrynet.core.Error import InvalidAuthenticationToken from lbrynet.core import utils -from lbrynet.daemon.auth.util import APIKey, get_auth_message -from lbrynet.daemon.auth.client import LBRY_SECRET +from lbrynet.core.Error import ComponentsNotStarted, ComponentStartConditionNotMet +from lbrynet.core.looping_call_manager import LoopingCallManager +from lbrynet.daemon.ComponentManager import ComponentManager from lbrynet.undecorated import undecorated - +from .util import APIKey, get_auth_message +from .client import LBRY_SECRET +from .factory import AuthJSONRPCResource log = logging.getLogger(__name__) EMPTY_PARAMS = [{}] @@ -91,10 +96,6 @@ class UnknownAPIMethodError(Exception): pass -class NotAllowedDuringStartupError(Exception): - pass - - def trap(err, *to_trap): err.trap(*to_trap) @@ -141,6 +142,29 @@ class AuthorizedBase(object): return f return _deprecated_wrapper + @staticmethod + def requires(*components, **conditions): + if conditions and ["conditions"] != conditions.keys(): + raise SyntaxError("invalid conditions argument") + condition_names = conditions.get("conditions", []) + + def _wrap(fn): + @defer.inlineCallbacks + @wraps(fn) + def _inner(*args, **kwargs): + component_manager = args[0].component_manager + for condition_name in condition_names: + condition_result, err_msg = yield component_manager.evaluate_condition(condition_name) + if not condition_result: + raise ComponentStartConditionNotMet(err_msg) + if not component_manager.all_components_running(*components): + raise ComponentsNotStarted("the following required components have not yet started: " + "%s" % json.dumps(components)) + result = yield fn(*args, **kwargs) + defer.returnValue(result) + return _inner + return _wrap + class AuthJSONRPCServer(AuthorizedBase): """ @@ -149,7 +173,6 @@ class AuthJSONRPCServer(AuthorizedBase): API methods are named with a leading "jsonrpc_" Attributes: - allowed_during_startup (list): list of api methods that are callable before the server has finished startup sessions (dict): (dict): {: } callable_methods (dict): {: } @@ -170,14 +193,85 @@ class AuthJSONRPCServer(AuthorizedBase): isLeaf = True allowed_during_startup = [] + component_attributes = {} - def __init__(self, use_authentication=None): + def __init__(self, analytics_manager=None, component_manager=None, use_authentication=None, to_skip=None, + looping_calls=None): + self.analytics_manager = analytics_manager or analytics.Manager.new_instance() + self.component_manager = component_manager or ComponentManager( + analytics_manager=self.analytics_manager, + skip_components=to_skip or [] + ) + self.looping_call_manager = LoopingCallManager({n: lc for n, (lc, t) in (looping_calls or {}).iteritems()}) + self._looping_call_times = {n: t for n, (lc, t) in (looping_calls or {}).iteritems()} self._use_authentication = use_authentication or conf.settings['use_auth_http'] + self._component_setup_deferred = None self.announced_startup = False self.sessions = {} + @defer.inlineCallbacks + def start_listening(self): + from twisted.internet import reactor, error as tx_error + + try: + reactor.listenTCP( + conf.settings['api_port'], self.get_server_factory(), interface=conf.settings['api_host'] + ) + log.info("lbrynet API listening on TCP %s:%i", conf.settings['api_host'], conf.settings['api_port']) + yield self.setup() + self.analytics_manager.send_server_startup_success() + except tx_error.CannotListenError: + log.error('lbrynet API failed to bind TCP %s:%i for listening', conf.settings['api_host'], + conf.settings['api_port']) + reactor.fireSystemEvent("shutdown") + except defer.CancelledError: + log.info("shutting down before finished starting") + reactor.fireSystemEvent("shutdown") + except Exception as err: + self.analytics_manager.send_server_startup_error(str(err)) + log.exception('Failed to start lbrynet-daemon') + reactor.fireSystemEvent("shutdown") + def setup(self): - return NotImplementedError() + from twisted.internet import reactor + + reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) + if not self.analytics_manager.is_started: + self.analytics_manager.start() + for lc_name, lc_time in self._looping_call_times.iteritems(): + self.looping_call_manager.start(lc_name, lc_time) + + def update_attribute(setup_result, component): + setattr(self, self.component_attributes[component.component_name], component.component) + + kwargs = {component: update_attribute for component in self.component_attributes.keys()} + self._component_setup_deferred = self.component_manager.setup(**kwargs) + return self._component_setup_deferred + + @staticmethod + def _already_shutting_down(sig_num, frame): + log.info("Already shutting down") + + def _shutdown(self): + # ignore INT/TERM signals once shutdown has started + signal.signal(signal.SIGINT, self._already_shutting_down) + signal.signal(signal.SIGTERM, self._already_shutting_down) + self.looping_call_manager.shutdown() + if self.analytics_manager: + self.analytics_manager.shutdown() + try: + self._component_setup_deferred.cancel() + except (AttributeError, defer.CancelledError): + pass + if self.component_manager is not None: + d = self.component_manager.stop() + d.addErrback(log.fail(), 'Failure while shutting down') + else: + d = defer.succeed(None) + return d + + def get_server_factory(self): + return AuthJSONRPCResource(self).getServerFactory() def _set_headers(self, request, data, update_secret=False): if conf.settings['allowed_origin']: @@ -207,8 +301,9 @@ class AuthJSONRPCServer(AuthorizedBase): else: # last resort, just cast it as a string error = JSONRPCError(str(failure)) - log.warning("error processing api request: %s\ntraceback: %s", error.message, - "\n".join(error.traceback)) + if not failure.check(ComponentsNotStarted, ComponentStartConditionNotMet): + log.warning("error processing api request: %s\ntraceback: %s", error.message, + "\n".join(error.traceback)) response_content = jsonrpc_dumps_pretty(error, id=id_) self._set_headers(request, response_content) request.setResponseCode(200) @@ -304,14 +399,6 @@ class AuthJSONRPCServer(AuthorizedBase): request, request_id ) return server.NOT_DONE_YET - except NotAllowedDuringStartupError: - log.warning('Function not allowed during startup: %s', function_name) - self._render_error( - JSONRPCError("This method is unavailable until the daemon is fully started", - code=JSONRPCError.CODE_INVALID_REQUEST), - request, request_id - ) - return server.NOT_DONE_YET if args == EMPTY_PARAMS or args == []: _args, _kwargs = (), {} @@ -416,9 +503,6 @@ class AuthJSONRPCServer(AuthorizedBase): def _verify_method_is_callable(self, function_path): if function_path not in self.callable_methods: raise UnknownAPIMethodError(function_path) - if not self.announced_startup: - if function_path not in self.allowed_during_startup: - raise NotAllowedDuringStartupError(function_path) def _get_jsonrpc_method(self, function_path): if function_path in self.deprecated_methods: diff --git a/lbrynet/dht/node.py b/lbrynet/dht/node.py index 935ba1264..9c3b0a5a2 100644 --- a/lbrynet/dht/node.py +++ b/lbrynet/dht/node.py @@ -281,7 +281,9 @@ class Node(MockKademliaHelper): yield self._protocol._listening # TODO: Refresh all k-buckets further away than this node's closest neighbour yield self.joinNetwork(known_node_addresses or []) + self.start_looping_calls() + def start_looping_calls(self): self.safe_start_looping_call(self._change_token_lc, constants.tokenSecretChangeInterval) # Start refreshing k-buckets periodically, if necessary self.safe_start_looping_call(self._refresh_node_lc, constants.checkRefreshInterval) diff --git a/lbrynet/tests/functional/test_misc.py b/lbrynet/tests/functional/test_misc.py index b134b6da2..01badedae 100644 --- a/lbrynet/tests/functional/test_misc.py +++ b/lbrynet/tests/functional/test_misc.py @@ -39,6 +39,7 @@ DummyBlobAvailabilityTracker = mocks.BlobAvailabilityTracker log_format = "%(funcName)s(): %(message)s" logging.basicConfig(level=logging.CRITICAL, format=log_format) +TEST_SKIP_STRING_ANDROID = "Test cannot pass on Android because multiprocessing is not supported at the OS level." def require_system(system): def wrapper(fn): @@ -103,13 +104,14 @@ class LbryUploader(object): rate_limiter = RateLimiter() self.sd_identifier = StreamDescriptorIdentifier() self.db_dir, self.blob_dir = mk_db_and_blob_dir() + dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553, + node_id="abcd", externalIP="127.0.0.1") self.session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, blob_dir=self.blob_dir, node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, - peer_port=5553, dht_node_port=4445, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, - dht_node_class=FakeNode, is_generous=self.is_generous, external_ip="127.0.0.1") + peer_port=5553, dht_node_port=4445, rate_limiter=rate_limiter, wallet=wallet, + dht_node=dht_node, external_ip="127.0.0.1") self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier) if self.ul_rate_limit is not None: self.session.rate_limiter.set_ul_limit(self.ul_rate_limit) @@ -197,12 +199,10 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event, db_dir, blob_dir = mk_db_and_blob_dir() session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, - node_id="abcd" + str(n), dht_node_port=4446, dht_node_class=FakeNode, + node_id="abcd" + str(n), dht_node_port=4446, peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=peer_port, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, - is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], + rate_limiter=rate_limiter, wallet=wallet, external_ip="127.0.0.1") lbry_file_manager = EncryptedFileManager(session, sd_identifier) @@ -303,13 +303,14 @@ def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_genero db_dir, blob_dir = mk_db_and_blob_dir() + dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553, + node_id="abcd", externalIP="127.0.0.1") + session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="efgh", - peer_finder=peer_finder, hash_announcer=hash_announcer, dht_node_class=FakeNode, + peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=peer_port, dht_node_port=4446, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, - is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], - external_ip="127.0.0.1") + rate_limiter=rate_limiter, wallet=wallet, + external_ip="127.0.0.1", dht_node=dht_node) if slow is True: session.rate_limiter.set_ul_limit(2 ** 11) @@ -478,15 +479,16 @@ class TestTransfer(TestCase): hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() + dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553, + node_id="abcd", externalIP="127.0.0.1") db_dir, blob_dir = mk_db_and_blob_dir() self.session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, dht_node_port=4445, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, - dht_node_class=FakeNode, is_generous=self.is_generous, external_ip="127.0.0.1") + rate_limiter=rate_limiter, wallet=wallet, + dht_node=dht_node, external_ip="127.0.0.1") self.lbry_file_manager = EncryptedFileManager( self.session, sd_identifier) @@ -566,15 +568,16 @@ class TestTransfer(TestCase): peer_finder = FakePeerFinder(5553, peer_manager, 2) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() + dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553, + node_id="abcd", externalIP="127.0.0.1") db_dir, blob_dir = mk_db_and_blob_dir() self.session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=blob_dir, peer_port=5553, dht_node_port=4445, dht_node_class=FakeNode, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, - is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], external_ip="127.0.0.1") + blob_dir=blob_dir, peer_port=5553, dht_node_port=4445, + rate_limiter=rate_limiter, wallet=wallet, + dht_node=dht_node, external_ip="127.0.0.1") d1 = self.wait_for_hash_from_queue(blob_hash_queue_1) d2 = self.wait_for_hash_from_queue(blob_hash_queue_2) @@ -646,17 +649,17 @@ class TestTransfer(TestCase): hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() + dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553, + node_id="abcd", externalIP="127.0.0.1") downloaders = [] db_dir, blob_dir = mk_db_and_blob_dir() self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, - node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, dht_node_class=FakeNode, + node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, - is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], - external_ip="127.0.0.1") + rate_limiter=rate_limiter, wallet=wallet, + external_ip="127.0.0.1", dht_node=dht_node) self.lbry_file_manager = EncryptedFileManager(self.session, sd_identifier) @@ -758,13 +761,11 @@ class TestTransfer(TestCase): sd_identifier = StreamDescriptorIdentifier() db_dir, blob_dir = mk_db_and_blob_dir() - self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, dht_node_class=FakeNode, + self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, hash_announcer=hash_announcer, blob_dir=blob_dir, - peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, - wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, - is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], - external_ip="127.0.0.1") + peer_port=5553, rate_limiter=rate_limiter, + wallet=wallet, external_ip="127.0.0.1") self.lbry_file_manager = EncryptedFileManager( self.session, sd_identifier) @@ -842,3 +843,10 @@ class TestTransfer(TestCase): d.addBoth(stop) return d + + if is_android(): + test_lbry_transfer.skip = TEST_SKIP_STRING_ANDROID + test_last_blob_retrieval.skip = TEST_SKIP_STRING_ANDROID + test_double_download.skip = TEST_SKIP_STRING_ANDROID + test_multiple_uploaders.skip = TEST_SKIP_STRING_ANDROID + diff --git a/lbrynet/tests/functional/test_reflector.py b/lbrynet/tests/functional/test_reflector.py index cde45583b..082d9d74a 100644 --- a/lbrynet/tests/functional/test_reflector.py +++ b/lbrynet/tests/functional/test_reflector.py @@ -53,13 +53,13 @@ class TestReflector(unittest.TestCase): db_dir=self.db_dir, node_id="abcd", peer_finder=peer_finder, + peer_manager=peer_manager, blob_dir=self.blob_dir, peer_port=5553, dht_node_port=4444, - use_upnp=False, wallet=wallet, - blob_tracker_class=mocks.BlobAvailabilityTracker, external_ip="127.0.0.1", + dht_node=mocks.Node(), hash_announcer=mocks.Announcer(), ) @@ -73,13 +73,13 @@ class TestReflector(unittest.TestCase): db_dir=self.server_db_dir, node_id="abcd", peer_finder=peer_finder, + peer_manager=peer_manager, blob_dir=self.server_blob_dir, peer_port=5554, dht_node_port=4443, - use_upnp=False, wallet=wallet, - blob_tracker_class=mocks.BlobAvailabilityTracker, external_ip="127.0.0.1", + dht_node=mocks.Node(), hash_announcer=mocks.Announcer(), ) diff --git a/lbrynet/tests/functional/test_streamify.py b/lbrynet/tests/functional/test_streamify.py index cda06758b..566427bd3 100644 --- a/lbrynet/tests/functional/test_streamify.py +++ b/lbrynet/tests/functional/test_streamify.py @@ -30,6 +30,7 @@ DummyBlobAvailabilityTracker = mocks.BlobAvailabilityTracker class TestStreamify(TestCase): maxDiff = 5000 + def setUp(self): mocks.mock_conf_settings(self) self.session = None @@ -37,6 +38,12 @@ class TestStreamify(TestCase): self.is_generous = True self.db_dir = tempfile.mkdtemp() self.blob_dir = os.path.join(self.db_dir, "blobfiles") + self.dht_node = FakeNode() + self.wallet = FakeWallet() + self.peer_manager = PeerManager() + self.peer_finder = FakePeerFinder(5553, self.peer_manager, 2) + self.rate_limiter = DummyRateLimiter() + self.sd_identifier = StreamDescriptorIdentifier() os.mkdir(self.blob_dir) @defer.inlineCallbacks @@ -54,26 +61,17 @@ class TestStreamify(TestCase): os.remove("test_file") def test_create_stream(self): - wallet = FakeWallet() - peer_manager = PeerManager() - peer_finder = FakePeerFinder(5553, peer_manager, 2) - hash_announcer = FakeAnnouncer() - rate_limiter = DummyRateLimiter() - sd_identifier = StreamDescriptorIdentifier() self.session = Session( - conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, node_id="abcd", - peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=self.blob_dir, peer_port=5553, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, - is_generous=self.is_generous, external_ip="127.0.0.1", dht_node_class=mocks.Node + conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, node_id="abcd", peer_finder=self.peer_finder, + blob_dir=self.blob_dir, peer_port=5553, rate_limiter=self.rate_limiter, wallet=self.wallet, + external_ip="127.0.0.1", dht_node=self.dht_node ) - self.lbry_file_manager = EncryptedFileManager(self.session, sd_identifier) + self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier) d = self.session.setup() - d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) + d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier)) d.addCallback(lambda _: self.lbry_file_manager.setup()) def verify_equal(sd_info): @@ -102,22 +100,14 @@ class TestStreamify(TestCase): return d def test_create_and_combine_stream(self): - wallet = FakeWallet() - peer_manager = PeerManager() - peer_finder = FakePeerFinder(5553, peer_manager, 2) - hash_announcer = FakeAnnouncer() - rate_limiter = DummyRateLimiter() - sd_identifier = StreamDescriptorIdentifier() self.session = Session( - conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, node_id="abcd", - peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=self.blob_dir, peer_port=5553, dht_node_class=mocks.Node, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, external_ip="127.0.0.1" + conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, node_id="abcd", peer_finder=self.peer_finder, + blob_dir=self.blob_dir, peer_port=5553, rate_limiter=self.rate_limiter, wallet=self.wallet, + external_ip="127.0.0.1", dht_node=self.dht_node ) - self.lbry_file_manager = EncryptedFileManager(self.session, sd_identifier) + self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier) @defer.inlineCallbacks def create_stream(): @@ -132,7 +122,7 @@ class TestStreamify(TestCase): self.assertEqual(hashsum.hexdigest(), "68959747edc73df45e45db6379dd7b3b") d = self.session.setup() - d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) + d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier)) d.addCallback(lambda _: self.lbry_file_manager.setup()) d.addCallback(lambda _: create_stream()) return d diff --git a/lbrynet/tests/mocks.py b/lbrynet/tests/mocks.py index c8e131362..49114610d 100644 --- a/lbrynet/tests/mocks.py +++ b/lbrynet/tests/mocks.py @@ -1,5 +1,6 @@ import base64 import io +import mock from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import rsa @@ -10,6 +11,7 @@ from twisted.python.failure import Failure from lbrynet.core.client.ClientRequest import ClientRequest from lbrynet.core.Error import RequestCanceledError from lbrynet.core import BlobAvailability +from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager from lbrynet.dht.node import Node as RealNode from lbrynet.daemon import ExchangeRateManager as ERM from lbrynet import conf @@ -63,6 +65,7 @@ class BTCLBCFeed(ERM.MarketFeed): 0.0 ) + class USDBTCFeed(ERM.MarketFeed): def __init__(self): ERM.MarketFeed.__init__( @@ -74,6 +77,7 @@ class USDBTCFeed(ERM.MarketFeed): 0.0 ) + class ExchangeRateManager(ERM.ExchangeRateManager): def __init__(self, market_feeds, rates): self.market_feeds = market_feeds @@ -360,6 +364,96 @@ class BlobAvailabilityTracker(BlobAvailability.BlobAvailabilityTracker): pass +# The components below viz. FakeWallet, FakeSession, FakeFileManager are just for testing Component Manager's +# startup and stop +class FakeComponent(object): + depends_on = [] + component_name = None + + def __init__(self, component_manager): + self.component_manager = component_manager + self._running = False + + @property + def running(self): + return self._running + + def start(self): + raise NotImplementedError # Override + + def stop(self): + return defer.succeed(None) + + @property + def component(self): + return self + + @defer.inlineCallbacks + def _setup(self): + result = yield defer.maybeDeferred(self.start) + self._running = True + defer.returnValue(result) + + @defer.inlineCallbacks + def _stop(self): + result = yield defer.maybeDeferred(self.stop) + self._running = False + defer.returnValue(result) + + +class FakeDelayedWallet(FakeComponent): + component_name = "wallet" + depends_on = [] + + def start(self): + return defer.succeed(True) + + def stop(self): + d = defer.Deferred() + self.component_manager.reactor.callLater(1, d.callback, True) + return d + + +class FakeDelayedSession(FakeComponent): + component_name = "session" + depends_on = [FakeDelayedWallet.component_name] + + def start(self): + d = defer.Deferred() + self.component_manager.reactor.callLater(1, d.callback, True) + return d + + def stop(self): + d = defer.Deferred() + self.component_manager.reactor.callLater(1, d.callback, True) + return d + + +class FakeDelayedFileManager(FakeComponent): + component_name = "file_manager" + depends_on = [FakeDelayedSession.component_name] + + def start(self): + d = defer.Deferred() + self.component_manager.reactor.callLater(1, d.callback, True) + return d + + def stop(self): + return defer.succeed(True) + +class FakeFileManager(FakeComponent): + component_name = "file_manager" + depends_on = [] + + @property + def component(self): + return mock.Mock(spec=EncryptedFileManager) + + def start(self): + return defer.succeed(True) + + def stop(self): + pass create_stream_sd_file = { 'stream_name': '746573745f66696c65', diff --git a/lbrynet/tests/unit/components/__init__.py b/lbrynet/tests/unit/components/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lbrynet/tests/unit/components/test_Component_Manager.py b/lbrynet/tests/unit/components/test_Component_Manager.py new file mode 100644 index 000000000..504b12ac8 --- /dev/null +++ b/lbrynet/tests/unit/components/test_Component_Manager.py @@ -0,0 +1,133 @@ +from twisted.internet.task import Clock +from twisted.trial import unittest + +from lbrynet.daemon.ComponentManager import ComponentManager +from lbrynet.daemon.Components import DATABASE_COMPONENT, DHT_COMPONENT, STREAM_IDENTIFIER_COMPONENT +from lbrynet.daemon.Components import HASH_ANNOUNCER_COMPONENT, REFLECTOR_COMPONENT, UPNP_COMPONENT +from lbrynet.daemon.Components import PEER_PROTOCOL_SERVER_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT +from lbrynet.daemon import Components +from lbrynet.tests import mocks + + +class TestComponentManager(unittest.TestCase): + def setUp(self): + mocks.mock_conf_settings(self) + self.default_components_sort = [ + [Components.DatabaseComponent, + Components.ExchangeRateManagerComponent, + Components.UPnPComponent], + [Components.DHTComponent, + Components.WalletComponent], + [Components.HashAnnouncerComponent], + [Components.SessionComponent], + [Components.PeerProtocolServerComponent, + Components.StreamIdentifierComponent], + [Components.FileManagerComponent], + [Components.ReflectorComponent] + ] + self.component_manager = ComponentManager() + + def tearDown(self): + pass + + def test_sort_components(self): + stages = self.component_manager.sort_components() + + for stage_list, sorted_stage_list in zip(stages, self.default_components_sort): + self.assertEqual([type(stage) for stage in stage_list], sorted_stage_list) + + def test_sort_components_reverse(self): + rev_stages = self.component_manager.sort_components(reverse=True) + reverse_default_components_sort = reversed(self.default_components_sort) + + for stage_list, sorted_stage_list in zip(rev_stages, reverse_default_components_sort): + self.assertEqual([type(stage) for stage in stage_list], sorted_stage_list) + + def test_get_component_not_exists(self): + + with self.assertRaises(NameError): + self.component_manager.get_component("random_component") + + +class TestComponentManagerOverrides(unittest.TestCase): + def setUp(self): + mocks.mock_conf_settings(self) + + def test_init_with_overrides(self): + class FakeWallet(object): + component_name = "wallet" + depends_on = [] + + def __init__(self, component_manager): + self.component_manager = component_manager + + @property + def component(self): + return self + + new_component_manager = ComponentManager(wallet=FakeWallet) + fake_wallet = new_component_manager.get_component("wallet") + # wallet should be an instance of FakeWallet and not WalletComponent from Components.py + self.assertIsInstance(fake_wallet, FakeWallet) + self.assertNotIsInstance(fake_wallet, Components.WalletComponent) + + def test_init_with_wrong_overrides(self): + class FakeRandomComponent(object): + component_name = "someComponent" + depends_on = [] + + with self.assertRaises(SyntaxError): + ComponentManager(randomComponent=FakeRandomComponent) + + +class TestComponentManagerProperStart(unittest.TestCase): + def setUp(self): + self.reactor = Clock() + mocks.mock_conf_settings(self) + self.component_manager = ComponentManager( + skip_components=[DATABASE_COMPONENT, DHT_COMPONENT, HASH_ANNOUNCER_COMPONENT, STREAM_IDENTIFIER_COMPONENT, + PEER_PROTOCOL_SERVER_COMPONENT, REFLECTOR_COMPONENT, UPNP_COMPONENT, + EXCHANGE_RATE_MANAGER_COMPONENT], + reactor=self.reactor, + wallet=mocks.FakeDelayedWallet, + session=mocks.FakeDelayedSession, + file_manager=mocks.FakeDelayedFileManager + ) + + def tearDown(self): + pass + + def test_proper_starting_of_components(self): + self.component_manager.setup() + self.assertTrue(self.component_manager.get_component('wallet').running) + self.assertFalse(self.component_manager.get_component('session').running) + self.assertFalse(self.component_manager.get_component('file_manager').running) + + self.reactor.advance(1) + self.assertTrue(self.component_manager.get_component('wallet').running) + self.assertTrue(self.component_manager.get_component('session').running) + self.assertFalse(self.component_manager.get_component('file_manager').running) + + self.reactor.advance(1) + self.assertTrue(self.component_manager.get_component('wallet').running) + self.assertTrue(self.component_manager.get_component('session').running) + self.assertTrue(self.component_manager.get_component('file_manager').running) + + def test_proper_stopping_of_components(self): + self.component_manager.setup() + self.reactor.advance(1) + self.reactor.advance(1) + self.component_manager.stop() + self.assertFalse(self.component_manager.get_component('file_manager').running) + self.assertTrue(self.component_manager.get_component('session').running) + self.assertTrue(self.component_manager.get_component('wallet').running) + + self.reactor.advance(1) + self.assertFalse(self.component_manager.get_component('file_manager').running) + self.assertFalse(self.component_manager.get_component('session').running) + self.assertTrue(self.component_manager.get_component('wallet').running) + + self.reactor.advance(1) + self.assertFalse(self.component_manager.get_component('file_manager').running) + self.assertFalse(self.component_manager.get_component('session').running) + self.assertFalse(self.component_manager.get_component('wallet').running) diff --git a/lbrynet/tests/unit/lbrynet_daemon/auth/test_server.py b/lbrynet/tests/unit/lbrynet_daemon/auth/test_server.py index 80fa4aa7c..bd1d5399e 100644 --- a/lbrynet/tests/unit/lbrynet_daemon/auth/test_server.py +++ b/lbrynet/tests/unit/lbrynet_daemon/auth/test_server.py @@ -11,7 +11,7 @@ class AuthJSONRPCServerTest(unittest.TestCase): # onto it. def setUp(self): conf.initialize_settings(False) - self.server = server.AuthJSONRPCServer(use_authentication=False) + self.server = server.AuthJSONRPCServer(True, use_authentication=False) def test_get_server_port(self): self.assertSequenceEqual( diff --git a/lbrynet/tests/unit/lbrynet_daemon/test_Daemon.py b/lbrynet/tests/unit/lbrynet_daemon/test_Daemon.py index d47c36ba2..8722611a5 100644 --- a/lbrynet/tests/unit/lbrynet_daemon/test_Daemon.py +++ b/lbrynet/tests/unit/lbrynet_daemon/test_Daemon.py @@ -1,11 +1,10 @@ import mock import json -import unittest import random from os import path from twisted.internet import defer -from twisted import trial +from twisted.trial import unittest from faker import Faker @@ -14,12 +13,15 @@ from lbryum.wallet import NewWallet from lbrynet import conf from lbrynet.core import Session, PaymentRateManager, Wallet from lbrynet.database.storage import SQLiteStorage +from lbrynet.daemon.ComponentManager import ComponentManager +from lbrynet.daemon.Components import DATABASE_COMPONENT, DHT_COMPONENT, WALLET_COMPONENT, STREAM_IDENTIFIER_COMPONENT +from lbrynet.daemon.Components import HASH_ANNOUNCER_COMPONENT, REFLECTOR_COMPONENT, UPNP_COMPONENT, SESSION_COMPONENT +from lbrynet.daemon.Components import PEER_PROTOCOL_SERVER_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT from lbrynet.daemon.Daemon import Daemon as LBRYDaemon -from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader from lbrynet.tests import util -from lbrynet.tests.mocks import mock_conf_settings, FakeNetwork +from lbrynet.tests.mocks import mock_conf_settings, FakeNetwork, FakeFileManager from lbrynet.tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker from lbrynet.tests.mocks import ExchangeRateManager as DummyExchangeRateManager from lbrynet.tests.mocks import BTCLBCFeed, USDBTCFeed @@ -40,10 +42,10 @@ def get_test_daemon(data_rate=None, generous=True, with_fee=False): } daemon = LBRYDaemon(None) daemon.session = mock.Mock(spec=Session.Session) - daemon.session.wallet = mock.Mock(spec=Wallet.LBRYumWallet) - daemon.session.wallet.wallet = mock.Mock(spec=NewWallet) - daemon.session.wallet.wallet.use_encryption = False - daemon.session.wallet.network = FakeNetwork() + daemon.wallet = mock.Mock(spec=Wallet.LBRYumWallet) + daemon.wallet.wallet = mock.Mock(spec=NewWallet) + daemon.wallet.wallet.use_encryption = False + daemon.wallet.network = FakeNetwork() daemon.session.storage = mock.Mock(spec=SQLiteStorage) market_feeds = [BTCLBCFeed(), USDBTCFeed()] daemon.exchange_rate_manager = DummyExchangeRateManager(market_feeds, rates) @@ -73,12 +75,12 @@ def get_test_daemon(data_rate=None, generous=True, with_fee=False): {"fee": {"USD": {"address": "bQ6BGboPV2SpTMEP7wLNiAcnsZiH8ye6eA", "amount": 0.75}}}) daemon._resolve_name = lambda _: defer.succeed(metadata) migrated = smart_decode(json.dumps(metadata)) - daemon.session.wallet.resolve = lambda *_: defer.succeed( + daemon.wallet.resolve = lambda *_: defer.succeed( {"test": {'claim': {'value': migrated.claim_dict}}}) return daemon -class TestCostEst(trial.unittest.TestCase): +class TestCostEst(unittest.TestCase): def setUp(self): mock_conf_settings(self) util.resetTime(self) @@ -111,7 +113,8 @@ class TestCostEst(trial.unittest.TestCase): self.assertEquals(daemon.get_est_cost("test", size).result, correct_result) -class TestJsonRpc(trial.unittest.TestCase): +class TestJsonRpc(unittest.TestCase): + def setUp(self): def noop(): return None @@ -119,30 +122,39 @@ class TestJsonRpc(trial.unittest.TestCase): mock_conf_settings(self) util.resetTime(self) self.test_daemon = get_test_daemon() - self.test_daemon.session.wallet.is_first_run = False - self.test_daemon.session.wallet.get_best_blockhash = noop + self.test_daemon.wallet.is_first_run = False + self.test_daemon.wallet.get_best_blockhash = noop def test_status(self): d = defer.maybeDeferred(self.test_daemon.jsonrpc_status) d.addCallback(lambda status: self.assertDictContainsSubset({'is_running': False}, status)) - @unittest.skipIf(is_android(), - 'Test cannot pass on Android because PYTHONOPTIMIZE removes the docstrings.') def test_help(self): d = defer.maybeDeferred(self.test_daemon.jsonrpc_help, command='status') d.addCallback(lambda result: self.assertSubstring('daemon status', result['help'])) # self.assertSubstring('daemon status', d.result) + if is_android(): + test_help.skip = "Test cannot pass on Android because PYTHONOPTIMIZE removes the docstrings." -class TestFileListSorting(trial.unittest.TestCase): + +class TestFileListSorting(unittest.TestCase): def setUp(self): mock_conf_settings(self) util.resetTime(self) self.faker = Faker('en_US') self.faker.seed(66410) self.test_daemon = get_test_daemon() - self.test_daemon.lbry_file_manager = mock.Mock(spec=EncryptedFileManager) - self.test_daemon.lbry_file_manager.lbry_files = self._get_fake_lbry_files() + component_manager = ComponentManager( + skip_components=[DATABASE_COMPONENT, DHT_COMPONENT, WALLET_COMPONENT, SESSION_COMPONENT, UPNP_COMPONENT, + PEER_PROTOCOL_SERVER_COMPONENT, REFLECTOR_COMPONENT, HASH_ANNOUNCER_COMPONENT, + STREAM_IDENTIFIER_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT], + file_manager=FakeFileManager + ) + component_manager.setup() + self.test_daemon.component_manager = component_manager + self.test_daemon.file_manager = component_manager.get_component("file_manager") + self.test_daemon.file_manager.lbry_files = self._get_fake_lbry_files() # Pre-sorted lists of prices and file names in ascending order produced by # faker with seed 66410. This seed was chosen becacuse it produces 3 results diff --git a/lbrynet/tests/unit/core/test_log_support.py b/lbrynet/tests/unit/test_customLogger.py similarity index 90% rename from lbrynet/tests/unit/core/test_log_support.py rename to lbrynet/tests/unit/test_customLogger.py index 5f68c6272..74cfbb8e6 100644 --- a/lbrynet/tests/unit/core/test_log_support.py +++ b/lbrynet/tests/unit/test_customLogger.py @@ -6,7 +6,7 @@ import unittest from twisted.internet import defer from twisted import trial -from lbrynet.core import log_support +from lbrynet import custom_logger from lbrynet.tests.util import is_android @@ -22,7 +22,7 @@ class TestLogger(trial.unittest.TestCase): return d def setUp(self): - self.log = log_support.Logger('test') + self.log = custom_logger.Logger('test') self.stream = StringIO.StringIO() handler = logging.StreamHandler(self.stream) handler.setFormatter(logging.Formatter("%(filename)s:%(lineno)d - %(message)s")) @@ -36,7 +36,7 @@ class TestLogger(trial.unittest.TestCase): return self.stream.getvalue().split('\n') # the line number could change if this file gets refactored - expected_first_line = 'test_log_support.py:20 - My message: terrible things happened' + expected_first_line = 'test_customLogger.py:20 - My message: terrible things happened' # testing the entirety of the message is futile as the # traceback will depend on the system the test is being run on