From 68b31a09b4012dd33d9cbe24d45057ef6822aeea Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Mon, 2 Apr 2018 15:11:27 -0400 Subject: [PATCH 01/31] add daemon Component and ComponentManager classes --- lbrynet/daemon/Component.py | 57 ++++++++++++++++++ lbrynet/daemon/ComponentManager.py | 93 ++++++++++++++++++++++++++++++ lbrynet/daemon/auth/server.py | 2 + 3 files changed, 152 insertions(+) create mode 100644 lbrynet/daemon/Component.py create mode 100644 lbrynet/daemon/ComponentManager.py diff --git a/lbrynet/daemon/Component.py b/lbrynet/daemon/Component.py new file mode 100644 index 000000000..2b6ac0582 --- /dev/null +++ b/lbrynet/daemon/Component.py @@ -0,0 +1,57 @@ +import logging +from twisted.internet import defer +from ComponentManager import ComponentManager + +log = logging.getLogger(__name__) + + +class ComponentType(type): + def __new__(mcs, name, bases, newattrs): + klass = type.__new__(mcs, name, bases, newattrs) + if name != "Component": + ComponentManager.components.add(klass) + return klass + + +class Component(object): + """ + lbrynet-daemon component helper + + Inheriting classes will be automatically registered with the ComponentManager and must implement setup and stop + methods + """ + + __metaclass__ = ComponentType + depends_on = [] + component_name = None + running = False + + @classmethod + def setup(cls): + raise NotImplementedError() # override + + @classmethod + def stop(cls): + raise NotImplementedError() # override + + @classmethod + @defer.inlineCallbacks + def _setup(cls): + try: + result = yield defer.maybeDeferred(cls.setup) + cls.running = True + defer.returnValue(result) + except Exception as err: + log.exception("Error setting up %s", cls.component_name or cls.__name__) + raise err + + @classmethod + @defer.inlineCallbacks + def _stop(cls): + try: + result = yield defer.maybeDeferred(cls.stop) + cls.running = False + defer.returnValue(result) + except Exception as err: + log.exception("Error stopping %s", cls.__name__) + raise err diff --git a/lbrynet/daemon/ComponentManager.py b/lbrynet/daemon/ComponentManager.py new file mode 100644 index 000000000..8b645c014 --- /dev/null +++ b/lbrynet/daemon/ComponentManager.py @@ -0,0 +1,93 @@ +import logging +from twisted.internet import defer + +log = logging.getLogger(__name__) + + +class ComponentManager(object): + components = set() + + @classmethod + def sort_components(cls, reverse=False): + """ + Sort components by requirements + """ + steps = [] + staged = set() + components = set(cls.components) + + # components with no requirements + step = [] + for component in set(components): + if not component.depends_on: + step.append(component) + staged.add(component.component_name) + components.remove(component) + + if step: + steps.append(step) + + while components: + step = [] + to_stage = set() + for component in set(components): + reqs_met = 0 + for needed in component.depends_on: + if needed in staged: + reqs_met += 1 + if reqs_met == len(component.depends_on): + step.append(component) + to_stage.add(component.component_name) + components.remove(component) + if step: + staged.update(to_stage) + steps.append(step) + elif components: + raise SyntaxError("components cannot be started: %s" % components) + if reverse: + steps.reverse() + return steps + + @classmethod + @defer.inlineCallbacks + def setup(cls): + """ + Start Components in sequence sorted by requirements + + :return: (defer.Deferred) + """ + stages = cls.sort_components() + for stage in stages: + yield defer.DeferredList([component._setup() for component in stage]) + + @classmethod + @defer.inlineCallbacks + def stop(cls): + """ + Stop Components in reversed startup order + + :return: (defer.Deferred) + """ + stages = cls.sort_components(reverse=True) + for stage in stages: + yield defer.DeferredList([component._stop() for component in stage]) + + @classmethod + def all_components_running(cls, *component_names): + """ + :return: (bool) True if all specified components are running + """ + c = {component.component_name: component for component in cls.components} + for component in component_names: + if component not in c: + raise NameError("%s is not a known Component" % component) + if not c[component].running: + return False + return True + + @classmethod + def get_component(cls, component_name): + for component in cls.components: + if component.component_name == component_name: + return component + raise NameError(component_name) diff --git a/lbrynet/daemon/auth/server.py b/lbrynet/daemon/auth/server.py index a0d365a35..009df762a 100644 --- a/lbrynet/daemon/auth/server.py +++ b/lbrynet/daemon/auth/server.py @@ -17,6 +17,7 @@ from lbrynet.core.Error import InvalidAuthenticationToken from lbrynet.core import utils from lbrynet.daemon.auth.util import APIKey, get_auth_message from lbrynet.daemon.auth.client import LBRY_SECRET +from lbrynet.daemon.Component import ComponentManager from lbrynet.undecorated import undecorated log = logging.getLogger(__name__) @@ -132,6 +133,7 @@ class JSONRPCServerType(type): class AuthorizedBase(object): __metaclass__ = JSONRPCServerType + component_manager = ComponentManager @staticmethod def deprecated(new_command=None): From eb11da9b1956f1340ffaeb5524c69e6a8d79f74b Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Mon, 2 Apr 2018 15:13:13 -0400 Subject: [PATCH 02/31] convert directory and SQLiteStorage setup to be a Component --- lbrynet/daemon/Components.py | 89 ++++++++++++++++++++++++++++++++++++ lbrynet/daemon/Daemon.py | 69 +++++----------------------- lbrynet/daemon/__init__.py | 2 +- 3 files changed, 102 insertions(+), 58 deletions(-) create mode 100644 lbrynet/daemon/Components.py diff --git a/lbrynet/daemon/Components.py b/lbrynet/daemon/Components.py new file mode 100644 index 000000000..46f39212b --- /dev/null +++ b/lbrynet/daemon/Components.py @@ -0,0 +1,89 @@ +import os +import logging +from twisted.internet import defer, threads +from lbrynet import conf +from lbrynet.database.storage import SQLiteStorage +from lbrynet.daemon.Component import Component + +log = logging.getLogger(__name__) + +# settings must be initialized before this file is imported + +DATABASE_COMPONENT = "database" + + +class DatabaseComponent(Component): + component_name = DATABASE_COMPONENT + storage = None + + @staticmethod + def get_db_dir(): + return conf.settings['data_dir'] + + @staticmethod + def get_download_directory(): + return conf.settings['download_directory'] + + @staticmethod + def get_blobfile_dir(): + return conf.settings['BLOBFILES_DIR'] + + @staticmethod + def get_current_db_revision(): + return 7 + + @staticmethod + def get_revision_filename(): + return conf.settings.get_db_revision_filename() + + @staticmethod + def _write_db_revision_file(version_num): + with open(conf.settings.get_db_revision_filename(), mode='w') as db_revision: + db_revision.write(str(version_num)) + + @classmethod + @defer.inlineCallbacks + def setup(cls): + # check directories exist, create them if they don't + log.info("Loading databases") + if not os.path.exists(cls.get_download_directory()): + os.mkdir(cls.get_download_directory()) + if not os.path.exists(cls.get_db_dir()): + os.mkdir(cls.get_db_dir()) + cls._write_db_revision_file(cls.get_current_db_revision()) + log.debug("Created the db revision file: %s", cls.get_revision_filename()) + if not os.path.exists(cls.get_blobfile_dir()): + os.mkdir(cls.get_blobfile_dir()) + log.debug("Created the blobfile directory: %s", str(cls.get_blobfile_dir())) + if not os.path.exists(cls.get_revision_filename()): + log.warning("db_revision file not found. Creating it") + cls._write_db_revision_file(cls.get_current_db_revision()) + + # check the db migration and run any needed migrations + migrated = False + with open(cls.get_revision_filename(), "r") as revision_read_handle: + old_revision = int(revision_read_handle.read().strip()) + + if old_revision > cls.get_current_db_revision(): + raise Exception('This version of lbrynet is not compatible with the database\n' + 'Your database is revision %i, expected %i' % + (old_revision, cls.get_current_db_revision())) + if old_revision < cls.get_current_db_revision(): + from lbrynet.database.migrator import dbmigrator + log.info("Upgrading your databases (revision %i to %i)", old_revision, cls.get_current_db_revision()) + yield threads.deferToThread( + dbmigrator.migrate_db, cls.get_db_dir(), old_revision, cls.get_current_db_revision() + ) + cls._write_db_revision_file(cls.get_current_db_revision()) + log.info("Finished upgrading the databases.") + migrated = True + + # start SQLiteStorage + cls.storage = SQLiteStorage(cls.get_db_dir()) + yield cls.storage.setup() + defer.returnValue(migrated) + + @classmethod + @defer.inlineCallbacks + def stop(cls): + yield cls.storage.stop() diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 85969e07c..b75037819 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -185,7 +185,6 @@ class Daemon(AuthJSONRPCServer): def __init__(self, analytics_manager): AuthJSONRPCServer.__init__(self, conf.settings['use_auth_http']) self.db_dir = conf.settings['data_dir'] - self.storage = SQLiteStorage(self.db_dir) self.download_directory = conf.settings['download_directory'] if conf.settings['BLOBFILES_DIR'] == "blobfiles": self.blobfile_dir = os.path.join(self.db_dir, "blobfiles") @@ -233,6 +232,7 @@ class Daemon(AuthJSONRPCServer): self.looping_call_manager = LoopingCallManager(calls) self.sd_identifier = StreamDescriptorIdentifier() self.lbry_file_manager = None + self.storage = None @defer.inlineCallbacks def setup(self): @@ -246,9 +246,8 @@ class Daemon(AuthJSONRPCServer): self.exchange_rate_manager.start() yield self._initial_setup() - yield threads.deferToThread(self._setup_data_directory) - migrated = yield self._check_db_migration() - yield self.storage.setup() + yield self.component_manager.setup() + self.storage = self.component_manager.get_component("database").storage yield self._get_session() yield self._check_wallet_locked() yield self._start_analytics() @@ -262,15 +261,15 @@ class Daemon(AuthJSONRPCServer): self.startup_status = STARTUP_STAGES[5] log.info("Started lbrynet-daemon") - ### - # this should be removed with the next db revision - if migrated: - missing_channel_claim_ids = yield self.storage.get_unknown_certificate_ids() - while missing_channel_claim_ids: # in case there are a crazy amount lets batch to be safe - batch = missing_channel_claim_ids[:100] - _ = yield self.session.wallet.get_claims_by_ids(*batch) - missing_channel_claim_ids = missing_channel_claim_ids[100:] - ### + # ### + # # this should be removed with the next db revision + # if migrated: + # missing_channel_claim_ids = yield self.storage.get_unknown_certificate_ids() + # while missing_channel_claim_ids: # in case there are a crazy amount lets batch to be safe + # batch = missing_channel_claim_ids[:100] + # _ = yield self.session.wallet.get_claims_by_ids(*batch) + # missing_channel_claim_ids = missing_channel_claim_ids[100:] + # ### self._auto_renew() @@ -477,50 +476,6 @@ class Daemon(AuthJSONRPCServer): return defer.succeed(True) - def _write_db_revision_file(self, version_num): - with open(self.db_revision_file, mode='w') as db_revision: - db_revision.write(str(version_num)) - - def _setup_data_directory(self): - old_revision = 1 - self.startup_status = STARTUP_STAGES[1] - log.info("Loading databases") - if not os.path.exists(self.download_directory): - os.mkdir(self.download_directory) - if not os.path.exists(self.db_dir): - os.mkdir(self.db_dir) - self._write_db_revision_file(self.current_db_revision) - log.debug("Created the db revision file: %s", self.db_revision_file) - if not os.path.exists(self.blobfile_dir): - os.mkdir(self.blobfile_dir) - log.debug("Created the blobfile directory: %s", str(self.blobfile_dir)) - if not os.path.exists(self.db_revision_file): - log.warning("db_revision file not found. Creating it") - self._write_db_revision_file(self.current_db_revision) - - @defer.inlineCallbacks - def _check_db_migration(self): - old_revision = 1 - migrated = False - if os.path.exists(self.db_revision_file): - with open(self.db_revision_file, "r") as revision_read_handle: - old_revision = int(revision_read_handle.read().strip()) - - if old_revision > self.current_db_revision: - raise Exception('This version of lbrynet is not compatible with the database\n' - 'Your database is revision %i, expected %i' % - (old_revision, self.current_db_revision)) - if old_revision < self.current_db_revision: - from lbrynet.database.migrator import dbmigrator - log.info("Upgrading your databases (revision %i to %i)", old_revision, self.current_db_revision) - yield threads.deferToThread( - dbmigrator.migrate_db, self.db_dir, old_revision, self.current_db_revision - ) - self._write_db_revision_file(self.current_db_revision) - log.info("Finished upgrading the databases.") - migrated = True - defer.returnValue(migrated) - @defer.inlineCallbacks def _setup_lbry_file_manager(self): log.info('Starting the file manager') diff --git a/lbrynet/daemon/__init__.py b/lbrynet/daemon/__init__.py index 7461e1c00..8e0f5feca 100644 --- a/lbrynet/daemon/__init__.py +++ b/lbrynet/daemon/__init__.py @@ -1,3 +1,3 @@ +import Components # register Component classes from lbrynet.daemon.auth.client import LBRYAPIClient - get_client = LBRYAPIClient.get_client From b808d08eb3bfb221d16b788d5ff7fae23e764a99 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Mon, 2 Apr 2018 16:49:48 -0400 Subject: [PATCH 03/31] support callbacks to component setups --- lbrynet/daemon/ComponentManager.py | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/lbrynet/daemon/ComponentManager.py b/lbrynet/daemon/ComponentManager.py index 8b645c014..0cb7057dc 100644 --- a/lbrynet/daemon/ComponentManager.py +++ b/lbrynet/daemon/ComponentManager.py @@ -50,15 +50,27 @@ class ComponentManager(object): @classmethod @defer.inlineCallbacks - def setup(cls): + def setup(cls, **callbacks): """ Start Components in sequence sorted by requirements :return: (defer.Deferred) """ + for component_name, cb in callbacks.iteritems(): + if not callable(cb): + raise ValueError("%s is not callable" % cb) + cls.get_component(component_name) + + def _setup(component): + if component.component_name in callbacks: + d = component._setup() + d.addCallback(callbacks[component.component_name]) + return d + return component.setup() + stages = cls.sort_components() for stage in stages: - yield defer.DeferredList([component._setup() for component in stage]) + yield defer.DeferredList([_setup(component) for component in stage]) @classmethod @defer.inlineCallbacks @@ -75,13 +87,15 @@ class ComponentManager(object): @classmethod def all_components_running(cls, *component_names): """ + Check if components are running + :return: (bool) True if all specified components are running """ - c = {component.component_name: component for component in cls.components} + components = {component.component_name: component for component in cls.components} for component in component_names: - if component not in c: + if component not in components: raise NameError("%s is not a known Component" % component) - if not c[component].running: + if not components[component].running: return False return True From 7e8ca842a2f9fdab74504823b76920fbf8336ceb Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Tue, 24 Jul 2018 12:03:43 -0400 Subject: [PATCH 04/31] change ComponentManager to use instance methods rather than class methods -add get_component method to ComponentManager -add override_components kwargs to ComponentManager -add skip_components to ComponentManager -change component_manager attribute to exist on the AuthJSONRPCServer instance instead of the class --- lbrynet/core/Error.py | 10 ++++ lbrynet/daemon/Component.py | 45 ++++++++++-------- lbrynet/daemon/ComponentManager.py | 74 +++++++++++++++++++++--------- lbrynet/daemon/auth/server.py | 2 - 4 files changed, 90 insertions(+), 41 deletions(-) diff --git a/lbrynet/core/Error.py b/lbrynet/core/Error.py index 729ceab76..68a6df78e 100644 --- a/lbrynet/core/Error.py +++ b/lbrynet/core/Error.py @@ -155,13 +155,23 @@ class InvalidAuthenticationToken(Exception): class NegotiationError(Exception): pass + class InvalidCurrencyError(Exception): def __init__(self, currency): self.currency = currency Exception.__init__( self, 'Invalid currency: {} is not a supported currency.'.format(currency)) + class NoSuchDirectoryError(Exception): def __init__(self, directory): self.directory = directory Exception.__init__(self, 'No such directory {}'.format(directory)) + + +class ComponentStartConditionNotMet(Exception): + pass + + +class ComponentsNotStarted(Exception): + pass diff --git a/lbrynet/daemon/Component.py b/lbrynet/daemon/Component.py index 2b6ac0582..e7877c47f 100644 --- a/lbrynet/daemon/Component.py +++ b/lbrynet/daemon/Component.py @@ -9,7 +9,7 @@ class ComponentType(type): def __new__(mcs, name, bases, newattrs): klass = type.__new__(mcs, name, bases, newattrs) if name != "Component": - ComponentManager.components.add(klass) + ComponentManager.default_component_classes[klass.component_name] = klass return klass @@ -24,34 +24,43 @@ class Component(object): __metaclass__ = ComponentType depends_on = [] component_name = None - running = False - @classmethod - def setup(cls): - raise NotImplementedError() # override + def __init__(self, component_manager): + self.component_manager = component_manager + self._running = False - @classmethod - def stop(cls): - raise NotImplementedError() # override + def __lt__(self, other): + return self.component_name < other.component_name + + @property + def running(self): + return self._running + + def start(self): + raise NotImplementedError() + + def stop(self): + raise NotImplementedError() + + def component(self): + raise NotImplementedError() - @classmethod @defer.inlineCallbacks - def _setup(cls): + def _setup(self): try: - result = yield defer.maybeDeferred(cls.setup) - cls.running = True + result = yield defer.maybeDeferred(self.start) + self._running = True defer.returnValue(result) except Exception as err: - log.exception("Error setting up %s", cls.component_name or cls.__name__) + log.exception("Error setting up %s", self.component_name or self.__class__.__name__) raise err - @classmethod @defer.inlineCallbacks - def _stop(cls): + def _stop(self): try: - result = yield defer.maybeDeferred(cls.stop) - cls.running = False + result = yield defer.maybeDeferred(self.stop) + self._running = False defer.returnValue(result) except Exception as err: - log.exception("Error stopping %s", cls.__name__) + log.exception("Error stopping %s", self.__class__.__name__) raise err diff --git a/lbrynet/daemon/ComponentManager.py b/lbrynet/daemon/ComponentManager.py index 0cb7057dc..3541339dc 100644 --- a/lbrynet/daemon/ComponentManager.py +++ b/lbrynet/daemon/ComponentManager.py @@ -1,20 +1,41 @@ import logging from twisted.internet import defer +from lbrynet.core.Error import ComponentStartConditionNotMet + log = logging.getLogger(__name__) class ComponentManager(object): - components = set() + default_component_classes = {} - @classmethod - def sort_components(cls, reverse=False): + def __init__(self, reactor=None, analytics_manager=None, skip_components=None, **override_components): + self.skip_components = skip_components or [] + + self.reactor = reactor + self.component_classes = {} + self.components = set() + self.analytics_manager = analytics_manager + + for component_name, component_class in self.default_component_classes.iteritems(): + if component_name in override_components: + component_class = override_components.pop(component_name) + if component_name not in self.skip_components: + self.component_classes[component_name] = component_class + + if override_components: + raise SyntaxError("unexpected components: %s" % override_components) + + for component_class in self.component_classes.itervalues(): + self.components.add(component_class(self)) + + def sort_components(self, reverse=False): """ Sort components by requirements """ steps = [] staged = set() - components = set(cls.components) + components = set(self.components) # components with no requirements step = [] @@ -25,6 +46,7 @@ class ComponentManager(object): components.remove(component) if step: + step.sort() steps.append(step) while components: @@ -40,58 +62,58 @@ class ComponentManager(object): to_stage.add(component.component_name) components.remove(component) if step: + step.sort() staged.update(to_stage) steps.append(step) elif components: - raise SyntaxError("components cannot be started: %s" % components) + raise ComponentStartConditionNotMet("Unresolved dependencies for: %s" % components) if reverse: steps.reverse() return steps - @classmethod @defer.inlineCallbacks - def setup(cls, **callbacks): + def setup(self, **callbacks): """ Start Components in sequence sorted by requirements :return: (defer.Deferred) """ + for component_name, cb in callbacks.iteritems(): + if component_name not in self.component_classes: + raise NameError("unknown component: %s" % component_name) if not callable(cb): raise ValueError("%s is not callable" % cb) - cls.get_component(component_name) def _setup(component): if component.component_name in callbacks: d = component._setup() d.addCallback(callbacks[component.component_name]) return d - return component.setup() + return component._setup() - stages = cls.sort_components() + stages = self.sort_components() for stage in stages: yield defer.DeferredList([_setup(component) for component in stage]) - @classmethod @defer.inlineCallbacks - def stop(cls): + def stop(self): """ Stop Components in reversed startup order :return: (defer.Deferred) """ - stages = cls.sort_components(reverse=True) + stages = self.sort_components(reverse=True) for stage in stages: - yield defer.DeferredList([component._stop() for component in stage]) + yield defer.DeferredList([component._stop() for component in stage if component.running]) - @classmethod - def all_components_running(cls, *component_names): + def all_components_running(self, *component_names): """ Check if components are running :return: (bool) True if all specified components are running """ - components = {component.component_name: component for component in cls.components} + components = {component.component_name: component for component in self.components} for component in component_names: if component not in components: raise NameError("%s is not a known Component" % component) @@ -99,9 +121,19 @@ class ComponentManager(object): return False return True - @classmethod - def get_component(cls, component_name): - for component in cls.components: + def get_components_status(self): + """ + List status of all the components, whether they are running or not + + :return: (dict) {(str) component_name: (bool) True is running else False} + """ + return { + component.component_name: component.running + for component in self.components + } + + def get_component(self, component_name): + for component in self.components: if component.component_name == component_name: - return component + return component.component raise NameError(component_name) diff --git a/lbrynet/daemon/auth/server.py b/lbrynet/daemon/auth/server.py index 009df762a..a0d365a35 100644 --- a/lbrynet/daemon/auth/server.py +++ b/lbrynet/daemon/auth/server.py @@ -17,7 +17,6 @@ from lbrynet.core.Error import InvalidAuthenticationToken from lbrynet.core import utils from lbrynet.daemon.auth.util import APIKey, get_auth_message from lbrynet.daemon.auth.client import LBRY_SECRET -from lbrynet.daemon.Component import ComponentManager from lbrynet.undecorated import undecorated log = logging.getLogger(__name__) @@ -133,7 +132,6 @@ class JSONRPCServerType(type): class AuthorizedBase(object): __metaclass__ = JSONRPCServerType - component_manager = ComponentManager @staticmethod def deprecated(new_command=None): From 40d8e9681155c363ed68047fe76d86abda07c90c Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:22:11 -0400 Subject: [PATCH 05/31] remove functions and attributes in Session and Daemon that are now part of components -rename attributes in daemon to use components --- lbrynet/core/Session.py | 157 ++------------ lbrynet/daemon/Daemon.py | 446 ++++++++++----------------------------- 2 files changed, 125 insertions(+), 478 deletions(-) diff --git a/lbrynet/core/Session.py b/lbrynet/core/Session.py index d3a1febbc..83519ae66 100644 --- a/lbrynet/core/Session.py +++ b/lbrynet/core/Session.py @@ -1,11 +1,8 @@ import logging -import miniupnpc -from twisted.internet import threads, defer +from twisted.internet import defer from lbrynet.core.BlobManager import DiskBlobManager -from lbrynet.dht import node, hashannouncer from lbrynet.database.storage import SQLiteStorage from lbrynet.core.RateLimiter import RateLimiter -from lbrynet.core.utils import generate_id from lbrynet.core.PaymentRateManager import BasePaymentRateManager, OnlyFreePaymentsManager log = logging.getLogger(__name__) @@ -32,11 +29,10 @@ class Session(object): peers can connect to this peer. """ - def __init__(self, blob_data_payment_rate, db_dir=None, node_id=None, peer_manager=None, dht_node_port=None, + def __init__(self, blob_data_payment_rate, db_dir=None, node_id=None, dht_node_port=None, known_dht_nodes=None, peer_finder=None, hash_announcer=None, blob_dir=None, blob_manager=None, - peer_port=None, use_upnp=True, rate_limiter=None, wallet=None, dht_node_class=node.Node, - blob_tracker_class=None, payment_rate_manager_class=None, is_generous=True, external_ip=None, - storage=None): + peer_port=None, rate_limiter=None, wallet=None, external_ip=None, storage=None, + dht_node=None, peer_manager=None): """@param blob_data_payment_rate: The default payment rate for blob data @param db_dir: The directory in which levelDB files should be stored @@ -78,10 +74,6 @@ class Session(object): @param peer_port: The port on which other peers should connect to this peer - @param use_upnp: Whether or not to try to open a hole in the - firewall so that outside peers can connect to this peer's - peer_port and dht_node_port - @param rate_limiter: An object which keeps track of the amount of data transferred to and from this peer, and can limit that rate if desired @@ -103,20 +95,14 @@ class Session(object): self.known_dht_nodes = [] self.blob_dir = blob_dir self.blob_manager = blob_manager - # self.blob_tracker = None - # self.blob_tracker_class = blob_tracker_class or BlobAvailabilityTracker self.peer_port = peer_port - self.use_upnp = use_upnp self.rate_limiter = rate_limiter self.external_ip = external_ip self.upnp_redirects = [] self.wallet = wallet - self.dht_node_class = dht_node_class - self.dht_node = None + self.dht_node = dht_node self.base_payment_rate_manager = BasePaymentRateManager(blob_data_payment_rate) self.payment_rate_manager = OnlyFreePaymentsManager() - # self.payment_rate_manager_class = payment_rate_manager_class or NegotiatedPaymentRateManager - # self.is_generous = is_generous self.storage = storage or SQLiteStorage(self.db_dir) def setup(self): @@ -124,15 +110,14 @@ class Session(object): log.debug("Starting session.") - if self.node_id is None: - self.node_id = generate_id() + if self.dht_node is not None: + if self.peer_manager is None: + self.peer_manager = self.dht_node.peer_manager - if self.use_upnp is True: - d = self._try_upnp() - else: - d = defer.succeed(True) - d.addCallback(lambda _: self.storage.setup()) - d.addCallback(lambda _: self._setup_dht()) + if self.peer_finder is None: + self.peer_finder = self.dht_node.peer_finder + + d = self.storage.setup() d.addCallback(lambda _: self._setup_other_components()) return d @@ -140,97 +125,12 @@ class Session(object): """Stop all services""" log.info('Stopping session.') ds = [] - if self.hash_announcer: - self.hash_announcer.stop() - # if self.blob_tracker is not None: - # ds.append(defer.maybeDeferred(self.blob_tracker.stop)) - if self.dht_node is not None: - ds.append(defer.maybeDeferred(self.dht_node.stop)) if self.rate_limiter is not None: ds.append(defer.maybeDeferred(self.rate_limiter.stop)) - if self.wallet is not None: - ds.append(defer.maybeDeferred(self.wallet.stop)) if self.blob_manager is not None: ds.append(defer.maybeDeferred(self.blob_manager.stop)) - if self.use_upnp is True: - ds.append(defer.maybeDeferred(self._unset_upnp)) return defer.DeferredList(ds) - def _try_upnp(self): - - log.debug("In _try_upnp") - - def get_free_port(upnp, port, protocol): - # returns an existing mapping if it exists - mapping = upnp.getspecificportmapping(port, protocol) - if not mapping: - return port - if upnp.lanaddr == mapping[0]: - return mapping[1] - return get_free_port(upnp, port + 1, protocol) - - def get_port_mapping(upnp, port, protocol, description): - # try to map to the requested port, if there is already a mapping use the next external - # port available - if protocol not in ['UDP', 'TCP']: - raise Exception("invalid protocol") - port = get_free_port(upnp, port, protocol) - if isinstance(port, tuple): - log.info("Found existing UPnP redirect %s:%i (%s) to %s:%i, using it", - self.external_ip, port, protocol, upnp.lanaddr, port) - return port - upnp.addportmapping(port, protocol, upnp.lanaddr, port, - description, '') - log.info("Set UPnP redirect %s:%i (%s) to %s:%i", self.external_ip, port, - protocol, upnp.lanaddr, port) - return port - - def threaded_try_upnp(): - if self.use_upnp is False: - log.debug("Not using upnp") - return False - u = miniupnpc.UPnP() - num_devices_found = u.discover() - if num_devices_found > 0: - u.selectigd() - external_ip = u.externalipaddress() - if external_ip != '0.0.0.0' and not self.external_ip: - # best not to rely on this external ip, the router can be behind layers of NATs - self.external_ip = external_ip - if self.peer_port: - self.peer_port = get_port_mapping(u, self.peer_port, 'TCP', 'LBRY peer port') - self.upnp_redirects.append((self.peer_port, 'TCP')) - if self.dht_node_port: - self.dht_node_port = get_port_mapping(u, self.dht_node_port, 'UDP', 'LBRY DHT port') - self.upnp_redirects.append((self.dht_node_port, 'UDP')) - return True - return False - - def upnp_failed(err): - log.warning("UPnP failed. Reason: %s", err.getErrorMessage()) - return False - - d = threads.deferToThread(threaded_try_upnp) - d.addErrback(upnp_failed) - return d - - def _setup_dht(self): # does not block startup, the dht will re-attempt if necessary - self.dht_node = self.dht_node_class( - node_id=self.node_id, - udpPort=self.dht_node_port, - externalIP=self.external_ip, - peerPort=self.peer_port, - peer_manager=self.peer_manager, - peer_finder=self.peer_finder, - ) - if not self.hash_announcer: - self.hash_announcer = hashannouncer.DHTHashAnnouncer(self.dht_node, self.storage) - self.peer_manager = self.dht_node.peer_manager - self.peer_finder = self.dht_node.peer_finder - d = self.dht_node.start(self.known_dht_nodes) - d.addCallback(lambda _: log.info("Joined the dht")) - d.addCallback(lambda _: self.hash_announcer.start()) - def _setup_other_components(self): log.debug("Setting up the rest of the components") @@ -244,39 +144,6 @@ class Session(object): else: self.blob_manager = DiskBlobManager(self.blob_dir, self.storage, self.dht_node._dataStore) - # if self.blob_tracker is None: - # self.blob_tracker = self.blob_tracker_class( - # self.blob_manager, self.dht_node.peer_finder, self.dht_node - # ) - # if self.payment_rate_manager is None: - # self.payment_rate_manager = self.payment_rate_manager_class( - # self.base_payment_rate_manager, self.blob_tracker, self.is_generous - # ) - self.rate_limiter.start() d = self.blob_manager.setup() - d.addCallback(lambda _: self.wallet.start()) - # d.addCallback(lambda _: self.blob_tracker.start()) - return d - - def _unset_upnp(self): - log.info("Unsetting upnp for session") - - def threaded_unset_upnp(): - u = miniupnpc.UPnP() - num_devices_found = u.discover() - if num_devices_found > 0: - u.selectigd() - for port, protocol in self.upnp_redirects: - if u.getspecificportmapping(port, protocol) is None: - log.warning( - "UPnP redirect for %s %d was removed by something else.", - protocol, port) - else: - u.deleteportmapping(port, protocol) - log.info("Removed UPnP redirect for %s %d.", protocol, port) - self.upnp_redirects = [] - - d = threads.deferToThread(threaded_unset_upnp) - d.addErrback(lambda err: str(err)) return d diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index b75037819..9f32b289c 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -8,11 +8,10 @@ import urllib import json import textwrap import signal -import six from copy import deepcopy from decimal import Decimal, InvalidOperation from twisted.web import server -from twisted.internet import defer, threads, error, reactor +from twisted.internet import defer, reactor from twisted.internet.task import LoopingCall from twisted.python.failure import Failure @@ -25,28 +24,20 @@ from lbryschema.decode import smart_decode # TODO: importing this when internet is disabled raises a socket.gaierror from lbrynet.core.system_info import get_lbrynet_version -from lbrynet.database.storage import SQLiteStorage from lbrynet import conf -from lbrynet.conf import LBRYCRD_WALLET, LBRYUM_WALLET from lbrynet.reflector import reupload -from lbrynet.reflector import ServerFactory as reflector_server_factory from lbrynet.core.log_support import configure_loggly_handler -from lbrynet.lbry_file.client.EncryptedFileDownloader import EncryptedFileSaverFactory -from lbrynet.lbry_file.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier -from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager +from lbrynet.daemon.Component import ComponentManager +from lbrynet.daemon.Components import WALLET_COMPONENT, DATABASE_COMPONENT, SESSION_COMPONENT, DHT_COMPONENT +from lbrynet.daemon.Components import STREAM_IDENTIFIER_COMPONENT, FILE_MANAGER_COMPONENT +from lbrynet.daemon.Components import EXCHANGE_RATE_MANAGER_COMPONENT from lbrynet.daemon.Downloader import GetStream from lbrynet.daemon.Publisher import Publisher -from lbrynet.daemon.ExchangeRateManager import ExchangeRateManager from lbrynet.daemon.auth.server import AuthJSONRPCServer from lbrynet.core.PaymentRateManager import OnlyFreePaymentsManager from lbrynet.core import utils, system_info -from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob -from lbrynet.core.StreamDescriptor import EncryptedFileStreamType -from lbrynet.core.Session import Session -from lbrynet.core.Wallet import LBRYumWallet +from lbrynet.core.StreamDescriptor import download_sd_blob from lbrynet.core.looping_call_manager import LoopingCallManager -from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory -from lbrynet.core.server.ServerProtocol import ServerProtocolFactory from lbrynet.core.Error import InsufficientFundsError, UnknownNameError from lbrynet.core.Error import DownloadDataTimeout, DownloadSDTimeout from lbrynet.core.Error import NullFundsError, NegativeFundsError @@ -58,23 +49,6 @@ from lbrynet.core.client.StandaloneBlobDownloader import StandaloneBlobDownloade log = logging.getLogger(__name__) INITIALIZING_CODE = 'initializing' -LOADING_DB_CODE = 'loading_db' -LOADING_WALLET_CODE = 'loading_wallet' -LOADING_FILE_MANAGER_CODE = 'loading_file_manager' -LOADING_SERVER_CODE = 'loading_server' -STARTED_CODE = 'started' -WAITING_FOR_FIRST_RUN_CREDITS = 'waiting_for_credits' -WAITING_FOR_UNLOCK = 'waiting_for_wallet_unlock' -STARTUP_STAGES = [ - (INITIALIZING_CODE, 'Initializing'), - (LOADING_DB_CODE, 'Loading databases'), - (LOADING_WALLET_CODE, 'Catching up with the blockchain'), - (LOADING_FILE_MANAGER_CODE, 'Setting up file manager'), - (LOADING_SERVER_CODE, 'Starting lbrynet'), - (STARTED_CODE, 'Started lbrynet'), - (WAITING_FOR_FIRST_RUN_CREDITS, 'Waiting for first run credits'), - (WAITING_FOR_UNLOCK, 'Waiting for user to unlock the wallet using the wallet_unlock command') -] # TODO: make this consistent with the stages in Downloader.py DOWNLOAD_METADATA_CODE = 'downloading_metadata' @@ -178,39 +152,20 @@ class Daemon(AuthJSONRPCServer): LBRYnet daemon, a jsonrpc interface to lbry functions """ - allowed_during_startup = [ - 'daemon_stop', 'status', 'version', 'wallet_unlock' - ] - - def __init__(self, analytics_manager): + def __init__(self, analytics_manager, component_manager=None): AuthJSONRPCServer.__init__(self, conf.settings['use_auth_http']) - self.db_dir = conf.settings['data_dir'] self.download_directory = conf.settings['download_directory'] - if conf.settings['BLOBFILES_DIR'] == "blobfiles": - self.blobfile_dir = os.path.join(self.db_dir, "blobfiles") - else: - log.info("Using non-default blobfiles directory: %s", conf.settings['BLOBFILES_DIR']) - self.blobfile_dir = conf.settings['BLOBFILES_DIR'] self.data_rate = conf.settings['data_rate'] self.max_key_fee = conf.settings['max_key_fee'] self.disable_max_key_fee = conf.settings['disable_max_key_fee'] self.download_timeout = conf.settings['download_timeout'] - self.run_reflector_server = conf.settings['run_reflector_server'] - self.wallet_type = conf.settings['wallet'] self.delete_blobs_on_remove = conf.settings['delete_blobs_on_remove'] - self.peer_port = conf.settings['peer_port'] - self.reflector_port = conf.settings['reflector_port'] - self.dht_node_port = conf.settings['dht_node_port'] - self.use_upnp = conf.settings['use_upnp'] self.auto_renew_claim_height_delta = conf.settings['auto_renew_claim_height_delta'] - self.startup_status = STARTUP_STAGES[0] self.connected_to_internet = True self.connection_status_code = None self.platform = None - self.current_db_revision = 9 self.db_revision_file = conf.settings.get_db_revision_filename() - self.session = None self._session_id = conf.settings.get_session_id() # TODO: this should probably be passed into the daemon, or # possibly have the entire log upload functionality taken out @@ -219,20 +174,28 @@ class Daemon(AuthJSONRPCServer): self.analytics_manager = analytics_manager self.node_id = conf.settings.node_id + # components + self.storage = None + self.dht_node = None + self.wallet = None + self.sd_identifier = None + self.session = None + self.file_manager = None + self.exchange_rate_manager = None + self.wallet_user = None self.wallet_password = None - self.query_handlers = {} self.waiting_on = {} self.streams = {} - self.exchange_rate_manager = ExchangeRateManager() calls = { Checker.INTERNET_CONNECTION: LoopingCall(CheckInternetConnection(self)), Checker.CONNECTION_STATUS: LoopingCall(self._update_connection_status), } self.looping_call_manager = LoopingCallManager(calls) - self.sd_identifier = StreamDescriptorIdentifier() - self.lbry_file_manager = None - self.storage = None + self.component_manager = component_manager or ComponentManager( + analytics_manager=self.analytics_manager, + skip_components=conf.settings['components_to_skip'] + ) @defer.inlineCallbacks def setup(self): @@ -243,34 +206,21 @@ class Daemon(AuthJSONRPCServer): self.looping_call_manager.start(Checker.INTERNET_CONNECTION, 3600) self.looping_call_manager.start(Checker.CONNECTION_STATUS, 30) - self.exchange_rate_manager.start() yield self._initial_setup() yield self.component_manager.setup() - self.storage = self.component_manager.get_component("database").storage - yield self._get_session() - yield self._check_wallet_locked() + self.exchange_rate_manager = self.component_manager.get_component(EXCHANGE_RATE_MANAGER_COMPONENT) + self.storage = self.component_manager.get_component(DATABASE_COMPONENT) + self.session = self.component_manager.get_component(SESSION_COMPONENT) + self.wallet = self.component_manager.get_component(WALLET_COMPONENT) + self.dht_node = self.component_manager.get_component(DHT_COMPONENT) yield self._start_analytics() - yield add_lbry_file_to_sd_identifier(self.sd_identifier) - yield self._setup_stream_identifier() - yield self._setup_lbry_file_manager() - yield self._setup_query_handlers() - yield self._setup_server() - log.info("Starting balance: " + str(self.session.wallet.get_balance())) + self.sd_identifier = self.component_manager.get_component(STREAM_IDENTIFIER_COMPONENT) + self.file_manager = self.component_manager.get_component(FILE_MANAGER_COMPONENT) + log.info("Starting balance: " + str(self.wallet.get_balance())) self.announced_startup = True - self.startup_status = STARTUP_STAGES[5] log.info("Started lbrynet-daemon") - # ### - # # this should be removed with the next db revision - # if migrated: - # missing_channel_claim_ids = yield self.storage.get_unknown_certificate_ids() - # while missing_channel_claim_ids: # in case there are a crazy amount lets batch to be safe - # batch = missing_channel_claim_ids[:100] - # _ = yield self.session.wallet.get_claims_by_ids(*batch) - # missing_channel_claim_ids = missing_channel_claim_ids[100:] - # ### - self._auto_renew() def _get_platform(self): @@ -301,12 +251,12 @@ class Daemon(AuthJSONRPCServer): # auto renew is turned off if 0 or some negative number if self.auto_renew_claim_height_delta < 1: defer.returnValue(None) - if not self.session.wallet.network.get_remote_height(): + if not self.wallet.network.get_remote_height(): log.warning("Failed to get remote height, aborting auto renew") defer.returnValue(None) log.debug("Renewing claim") - h = self.session.wallet.network.get_remote_height() + self.auto_renew_claim_height_delta - results = yield self.session.wallet.claim_renew_all_before_expiration(h) + h = self.wallet.network.get_remote_height() + self.auto_renew_claim_height_delta + results = yield self.wallet.claim_renew_all_before_expiration(h) for outpoint, result in results.iteritems(): if result['success']: log.info("Renewed claim at outpoint:%s claim ID:%s, paid fee:%s", @@ -315,93 +265,6 @@ class Daemon(AuthJSONRPCServer): log.info("Failed to renew claim at outpoint:%s, reason:%s", outpoint, result['reason']) - def _start_server(self): - if self.peer_port is not None: - server_factory = ServerProtocolFactory(self.session.rate_limiter, - self.query_handlers, - self.session.peer_manager) - - try: - log.info("Peer protocol listening on TCP %d", self.peer_port) - self.lbry_server_port = reactor.listenTCP(self.peer_port, server_factory) - except error.CannotListenError as e: - import traceback - log.error("Couldn't bind to port %d. Visit lbry.io/faq/how-to-change-port for" - " more details.", self.peer_port) - log.error("%s", traceback.format_exc()) - raise ValueError("%s lbrynet may already be running on your computer." % str(e)) - return defer.succeed(True) - - def _start_reflector(self): - if self.run_reflector_server: - log.info("Starting reflector server") - if self.reflector_port is not None: - reflector_factory = reflector_server_factory( - self.session.peer_manager, - self.session.blob_manager, - self.lbry_file_manager - ) - try: - self.reflector_server_port = reactor.listenTCP(self.reflector_port, - reflector_factory) - log.info('Started reflector on port %s', self.reflector_port) - except error.CannotListenError as e: - log.exception("Couldn't bind reflector to port %d", self.reflector_port) - raise ValueError( - "{} lbrynet may already be running on your computer.".format(e)) - return defer.succeed(True) - - def _stop_reflector(self): - if self.run_reflector_server: - log.info("Stopping reflector server") - try: - if self.reflector_server_port is not None: - self.reflector_server_port, p = None, self.reflector_server_port - return defer.maybeDeferred(p.stopListening) - except AttributeError: - return defer.succeed(True) - return defer.succeed(True) - - def _stop_file_manager(self): - if self.lbry_file_manager: - self.lbry_file_manager.stop() - return defer.succeed(True) - - def _stop_server(self): - try: - if self.lbry_server_port is not None: - self.lbry_server_port, old_port = None, self.lbry_server_port - log.info('Stop listening on port %s', old_port.port) - return defer.maybeDeferred(old_port.stopListening) - else: - return defer.succeed(True) - except AttributeError: - return defer.succeed(True) - - def _setup_server(self): - self.startup_status = STARTUP_STAGES[4] - d = self._start_server() - d.addCallback(lambda _: self._start_reflector()) - return d - - def _setup_query_handlers(self): - handlers = [ - BlobRequestHandlerFactory( - self.session.blob_manager, - self.session.wallet, - self.session.payment_rate_manager, - self.analytics_manager - ), - self.session.wallet.get_wallet_info_query_handler_factory(), - ] - return self._add_query_handlers(handlers) - - def _add_query_handlers(self, query_handlers): - for handler in query_handlers: - query_id = handler.get_primary_query_identifier() - self.query_handlers[query_id] = handler - return defer.succeed(None) - @staticmethod def _already_shutting_down(sig_num, frame): log.info("Already shutting down") @@ -417,21 +280,14 @@ class Daemon(AuthJSONRPCServer): signal.signal(signal.SIGTERM, self._already_shutting_down) log.info("Closing lbrynet session") - log.info("Status at time of shutdown: " + self.startup_status[0]) self._stop_streams() self.looping_call_manager.shutdown() if self.analytics_manager: self.analytics_manager.shutdown() - d = self._stop_server() - d.addErrback(log.fail(), 'Failure while shutting down') - d.addCallback(lambda _: self._stop_reflector()) - d.addErrback(log.fail(), 'Failure while shutting down') - d.addCallback(lambda _: self._stop_file_manager()) - d.addErrback(log.fail(), 'Failure while shutting down') - if self.session is not None: - d.addCallback(lambda _: self.session.shut_down()) + if self.component_manager is not None: + d = self.component_manager.stop() d.addErrback(log.fail(), 'Failure while shutting down') return d @@ -476,88 +332,10 @@ class Daemon(AuthJSONRPCServer): return defer.succeed(True) - @defer.inlineCallbacks - def _setup_lbry_file_manager(self): - log.info('Starting the file manager') - self.startup_status = STARTUP_STAGES[3] - self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier) - yield self.lbry_file_manager.setup() - log.info('Done setting up file manager') - def _start_analytics(self): if not self.analytics_manager.is_started: self.analytics_manager.start() - def _get_session(self): - def get_wallet(): - if self.wallet_type == LBRYCRD_WALLET: - raise ValueError('LBRYcrd Wallet is no longer supported') - elif self.wallet_type == LBRYUM_WALLET: - - log.info("Using lbryum wallet") - - lbryum_servers = {address: {'t': str(port)} - for address, port in conf.settings['lbryum_servers']} - - config = { - 'auto_connect': True, - 'chain': conf.settings['blockchain_name'], - 'default_servers': lbryum_servers - } - - if 'use_keyring' in conf.settings: - config['use_keyring'] = conf.settings['use_keyring'] - if conf.settings['lbryum_wallet_dir']: - config['lbryum_path'] = conf.settings['lbryum_wallet_dir'] - wallet = LBRYumWallet(self.storage, config) - return defer.succeed(wallet) - else: - raise ValueError('Wallet Type {} is not valid'.format(self.wallet_type)) - - d = get_wallet() - - def create_session(wallet): - self.session = Session( - conf.settings['data_rate'], - db_dir=self.db_dir, - node_id=self.node_id, - blob_dir=self.blobfile_dir, - dht_node_port=self.dht_node_port, - known_dht_nodes=conf.settings['known_dht_nodes'], - peer_port=self.peer_port, - use_upnp=self.use_upnp, - wallet=wallet, - is_generous=conf.settings['is_generous_host'], - external_ip=self.platform['ip'], - storage=self.storage - ) - self.startup_status = STARTUP_STAGES[2] - - d.addCallback(create_session) - d.addCallback(lambda _: self.session.setup()) - return d - - @defer.inlineCallbacks - def _check_wallet_locked(self): - wallet = self.session.wallet - if wallet.wallet.use_encryption: - self.startup_status = STARTUP_STAGES[7] - - yield wallet.check_locked() - - def _setup_stream_identifier(self): - file_saver_factory = EncryptedFileSaverFactory( - self.session.peer_finder, - self.session.rate_limiter, - self.session.blob_manager, - self.session.storage, - self.session.wallet, - self.download_directory - ) - self.sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType, - file_saver_factory) - return defer.succeed(None) - def _download_blob(self, blob_hash, rate_manager=None, timeout=None): """ Download a blob @@ -575,7 +353,7 @@ class Daemon(AuthJSONRPCServer): timeout = timeout or 30 downloader = StandaloneBlobDownloader( blob_hash, self.session.blob_manager, self.session.peer_finder, self.session.rate_limiter, - rate_manager, self.session.wallet, timeout + rate_manager, self.wallet, timeout ) return downloader.download() @@ -583,7 +361,7 @@ class Daemon(AuthJSONRPCServer): def _get_stream_analytics_report(self, claim_dict): sd_hash = claim_dict.source_hash try: - stream_hash = yield self.session.storage.get_stream_hash_for_sd_hash(sd_hash) + stream_hash = yield self.storage.get_stream_hash_for_sd_hash(sd_hash) except Exception: stream_hash = None report = { @@ -597,7 +375,7 @@ class Daemon(AuthJSONRPCServer): sd_host = None report["sd_blob"] = sd_host if stream_hash: - blob_infos = yield self.session.storage.get_blobs_for_stream(stream_hash) + blob_infos = yield self.storage.get_blobs_for_stream(stream_hash) report["known_blobs"] = len(blob_infos) else: blob_infos = [] @@ -668,11 +446,12 @@ class Daemon(AuthJSONRPCServer): def _publish_stream(self, name, bid, claim_dict, file_path=None, certificate_id=None, claim_address=None, change_address=None): - publisher = Publisher(self.session, self.lbry_file_manager, self.session.wallet, + publisher = Publisher(self.session, self.file_manager, self.wallet, certificate_id) parse_lbry_uri(name) if not file_path: - stream_hash = yield self.storage.get_stream_hash_for_sd_hash(claim_dict['stream']['source']['source']) + stream_hash = yield self.storage.get_stream_hash_for_sd_hash( + claim_dict['stream']['source']['source']) claim_out = yield publisher.publish_stream(name, bid, claim_dict, stream_hash, claim_address, change_address) else: @@ -697,7 +476,7 @@ class Daemon(AuthJSONRPCServer): """ parsed = parse_lbry_uri(name) - resolution = yield self.session.wallet.resolve(parsed.name, check_cache=not force_refresh) + resolution = yield self.wallet.resolve(parsed.name, check_cache=not force_refresh) if parsed.name in resolution: result = resolution[parsed.name] defer.returnValue(result) @@ -752,7 +531,7 @@ class Daemon(AuthJSONRPCServer): cost = self._get_est_cost_from_stream_size(size) - resolved = yield self.session.wallet.resolve(uri) + resolved = yield self.wallet.resolve(uri) if uri in resolved and 'claim' in resolved[uri]: claim = ClaimDict.load_dict(resolved[uri]['claim']['value']) @@ -799,7 +578,7 @@ class Daemon(AuthJSONRPCServer): Resolve a name and return the estimated stream cost """ - resolved = yield self.session.wallet.resolve(uri) + resolved = yield self.wallet.resolve(uri) if resolved: claim_response = resolved[uri] else: @@ -879,7 +658,7 @@ class Daemon(AuthJSONRPCServer): def _get_lbry_file(self, search_by, val, return_json=False, full_status=False): lbry_file = None if search_by in FileID: - for l_f in self.lbry_file_manager.lbry_files: + for l_f in self.file_manager.lbry_files: if l_f.__dict__.get(search_by) == val: lbry_file = l_f break @@ -891,7 +670,7 @@ class Daemon(AuthJSONRPCServer): @defer.inlineCallbacks def _get_lbry_files(self, return_json=False, full_status=True, **kwargs): - lbry_files = list(self.lbry_file_manager.lbry_files) + lbry_files = list(self.file_manager.lbry_files) if kwargs: for search_type, value in iter_lbry_file_search_values(kwargs): lbry_files = [l_f for l_f in lbry_files if l_f.__dict__[search_type] == value] @@ -928,7 +707,7 @@ class Daemon(AuthJSONRPCServer): def _get_single_peer_downloader(self): downloader = SinglePeerDownloader() - downloader.setup(self.session.wallet) + downloader.setup(self.wallet) return downloader @defer.inlineCallbacks @@ -1060,7 +839,7 @@ class Daemon(AuthJSONRPCServer): should_announce_blobs = yield self.session.blob_manager.count_should_announce_blobs() response['session_status'] = { 'managed_blobs': len(blobs), - 'managed_streams': len(self.lbry_file_manager.lbry_files), + 'managed_streams': len(self.file_manager.lbry_files), 'announce_queue_size': announce_queue_size, 'should_announce_blobs': should_announce_blobs, } @@ -1255,10 +1034,10 @@ class Daemon(AuthJSONRPCServer): (float) amount of lbry credits in wallet """ if address is None: - return self._render_response(float(self.session.wallet.get_balance())) + return self._render_response(float(self.wallet.get_balance())) else: return self._render_response(float( - self.session.wallet.get_address_balance(address, include_unconfirmed))) + self.wallet.get_address_balance(address, include_unconfirmed))) @defer.inlineCallbacks def jsonrpc_wallet_unlock(self, password): @@ -1275,9 +1054,10 @@ class Daemon(AuthJSONRPCServer): (bool) true if wallet is unlocked, otherwise false """ - cmd_runner = self.session.wallet.get_cmd_runner() - if cmd_runner.locked: - d = self.session.wallet.wallet_unlocked_d + # the check_locked() in the if statement is needed because that is what sets + # the wallet_unlocked_d deferred ¯\_(ツ)_/¯ + if not self.wallet.check_locked(): + d = self.wallet.wallet_unlocked_d d.callback(password) result = yield d else: @@ -1300,7 +1080,7 @@ class Daemon(AuthJSONRPCServer): (bool) true if wallet is decrypted, otherwise false """ - result = self.session.wallet.decrypt_wallet() + result = self.wallet.decrypt_wallet() response = yield self._render_response(result) defer.returnValue(response) @@ -1320,8 +1100,8 @@ class Daemon(AuthJSONRPCServer): (bool) true if wallet is decrypted, otherwise false """ - self.session.wallet.encrypt_wallet(new_password) - response = yield self._render_response(self.session.wallet.wallet.use_encryption) + self.wallet.encrypt_wallet(new_password) + response = yield self._render_response(self.wallet.wallet.use_encryption) defer.returnValue(response) @defer.inlineCallbacks @@ -1477,9 +1257,9 @@ class Daemon(AuthJSONRPCServer): """ if claim_id is not None and txid is None and nout is None: - claim_results = yield self.session.wallet.get_claim_by_claim_id(claim_id) + claim_results = yield self.wallet.get_claim_by_claim_id(claim_id) elif txid is not None and nout is not None and claim_id is None: - claim_results = yield self.session.wallet.get_claim_by_outpoint(txid, int(nout)) + claim_results = yield self.wallet.get_claim_by_outpoint(txid, int(nout)) else: raise Exception("Must specify either txid/nout, or claim_id") response = yield self._render_response(claim_results) @@ -1568,7 +1348,7 @@ class Daemon(AuthJSONRPCServer): except URIParseError: results[u] = {"error": "%s is not a valid uri" % u} - resolved = yield self.session.wallet.resolve(*valid_uris, check_cache=not force) + resolved = yield self.wallet.resolve(*valid_uris, check_cache=not force) for resolved_uri in resolved: results[resolved_uri] = resolved[resolved_uri] @@ -1626,7 +1406,7 @@ class Daemon(AuthJSONRPCServer): if parsed_uri.is_channel and not parsed_uri.path: raise Exception("cannot download a channel claim, specify a /path") - resolved_result = yield self.session.wallet.resolve(uri) + resolved_result = yield self.wallet.resolve(uri) if resolved_result and uri in resolved_result: resolved = resolved_result[uri] else: @@ -1693,7 +1473,7 @@ class Daemon(AuthJSONRPCServer): raise Exception('Unable to find a file for {}:{}'.format(search_type, value)) if status == 'start' and lbry_file.stopped or status == 'stop' and not lbry_file.stopped: - yield self.lbry_file_manager.toggle_lbry_file_running(lbry_file) + yield self.file_manager.toggle_lbry_file_running(lbry_file) msg = "Started downloading file" if status == 'start' else "Stopped downloading file" else: msg = ( @@ -1755,8 +1535,8 @@ class Daemon(AuthJSONRPCServer): file_name, stream_hash = lbry_file.file_name, lbry_file.stream_hash if lbry_file.sd_hash in self.streams: del self.streams[lbry_file.sd_hash] - yield self.lbry_file_manager.delete_lbry_file(lbry_file, - delete_file=delete_from_download_dir) + yield self.file_manager.delete_lbry_file(lbry_file, + delete_file=delete_from_download_dir) log.info("Deleted file: %s", file_name) result = True @@ -1818,14 +1598,14 @@ class Daemon(AuthJSONRPCServer): if amount <= 0: raise Exception("Invalid amount") - yield self.session.wallet.update_balance() - if amount >= self.session.wallet.get_balance(): - balance = yield self.session.wallet.get_max_usable_balance_for_claim(channel_name) + yield self.wallet.update_balance() + if amount >= self.wallet.get_balance(): + balance = yield self.wallet.get_max_usable_balance_for_claim(channel_name) max_bid_amount = balance - MAX_UPDATE_FEE_ESTIMATE if balance <= MAX_UPDATE_FEE_ESTIMATE: raise InsufficientFundsError( "Insufficient funds, please deposit additional LBC. Minimum additional LBC needed {}" - . format(MAX_UPDATE_FEE_ESTIMATE - balance)) + .format(MAX_UPDATE_FEE_ESTIMATE - balance)) elif amount > max_bid_amount: raise InsufficientFundsError( "Please wait for any pending bids to resolve or lower the bid value. " @@ -1833,7 +1613,7 @@ class Daemon(AuthJSONRPCServer): .format(max_bid_amount) ) - result = yield self.session.wallet.claim_new_channel(channel_name, amount) + result = yield self.wallet.claim_new_channel(channel_name, amount) self.analytics_manager.send_new_channel() log.info("Claimed a new channel! Result: %s", result) response = yield self._render_response(result) @@ -1855,7 +1635,7 @@ class Daemon(AuthJSONRPCServer): is in the wallet. """ - result = yield self.session.wallet.channel_list() + result = yield self.wallet.channel_list() response = yield self._render_response(result) defer.returnValue(response) @@ -1891,7 +1671,7 @@ class Daemon(AuthJSONRPCServer): (str) Serialized certificate information """ - result = yield self.session.wallet.export_certificate_info(claim_id) + result = yield self.wallet.export_certificate_info(claim_id) defer.returnValue(result) @defer.inlineCallbacks @@ -1909,7 +1689,7 @@ class Daemon(AuthJSONRPCServer): (dict) Result dictionary """ - result = yield self.session.wallet.import_certificate_info(serialized_certificate_info) + result = yield self.wallet.import_certificate_info(serialized_certificate_info) defer.returnValue(result) @defer.inlineCallbacks @@ -2003,9 +1783,9 @@ class Daemon(AuthJSONRPCServer): if bid <= 0.0: raise ValueError("Bid value must be greater than 0.0") - yield self.session.wallet.update_balance() - if bid >= self.session.wallet.get_balance(): - balance = yield self.session.wallet.get_max_usable_balance_for_claim(name) + yield self.wallet.update_balance() + if bid >= self.wallet.get_balance(): + balance = yield self.wallet.get_max_usable_balance_for_claim(name) max_bid_amount = balance - MAX_UPDATE_FEE_ESTIMATE if balance <= MAX_UPDATE_FEE_ESTIMATE: raise InsufficientFundsError( @@ -2052,7 +1832,7 @@ class Daemon(AuthJSONRPCServer): log.warning("Stripping empty fee from published metadata") del metadata['fee'] elif 'address' not in metadata['fee']: - address = yield self.session.wallet.get_least_used_address() + address = yield self.wallet.get_least_used_address() metadata['fee']['address'] = address if 'fee' in metadata and 'version' not in metadata['fee']: metadata['fee']['version'] = '_0_0_1' @@ -2108,7 +1888,7 @@ class Daemon(AuthJSONRPCServer): certificate_id = channel_id elif channel_name: certificate_id = None - my_certificates = yield self.session.wallet.channel_list() + my_certificates = yield self.wallet.channel_list() for certificate in my_certificates: if channel_name == certificate['name']: certificate_id = certificate['claim_id'] @@ -2151,7 +1931,7 @@ class Daemon(AuthJSONRPCServer): if nout is None and txid is not None: raise Exception('Must specify nout') - result = yield self.session.wallet.abandon_claim(claim_id, txid, nout) + result = yield self.wallet.abandon_claim(claim_id, txid, nout) self.analytics_manager.send_claim_action('abandon') defer.returnValue(result) @@ -2178,7 +1958,7 @@ class Daemon(AuthJSONRPCServer): } """ - result = yield self.session.wallet.support_claim(name, claim_id, amount) + result = yield self.wallet.support_claim(name, claim_id, amount) self.analytics_manager.send_claim_action('new_support') defer.returnValue(result) @@ -2217,11 +1997,11 @@ class Daemon(AuthJSONRPCServer): nout = int(nout) else: raise Exception("invalid outpoint") - result = yield self.session.wallet.claim_renew(txid, nout) + result = yield self.wallet.claim_renew(txid, nout) result = {outpoint: result} else: height = int(height) - result = yield self.session.wallet.claim_renew_all_before_expiration(height) + result = yield self.wallet.claim_renew_all_before_expiration(height) defer.returnValue(result) @defer.inlineCallbacks @@ -2251,7 +2031,7 @@ class Daemon(AuthJSONRPCServer): } """ - result = yield self.session.wallet.send_claim_to_address(claim_id, address, amount) + result = yield self.wallet.send_claim_to_address(claim_id, address, amount) response = yield self._render_response(result) defer.returnValue(response) @@ -2289,7 +2069,7 @@ class Daemon(AuthJSONRPCServer): ] """ - d = self.session.wallet.get_name_claims() + d = self.wallet.get_name_claims() d.addCallback(lambda claims: self._render_response(claims)) return d @@ -2327,7 +2107,7 @@ class Daemon(AuthJSONRPCServer): } """ - claims = yield self.session.wallet.get_claims_for_name(name) # type: dict + claims = yield self.wallet.get_claims_for_name(name) # type: dict sort_claim_results(claims['claims']) defer.returnValue(claims) @@ -2404,8 +2184,8 @@ class Daemon(AuthJSONRPCServer): except URIParseError: results[chan_uri] = {"error": "%s is not a valid uri" % chan_uri} - resolved = yield self.session.wallet.resolve(*valid_uris, check_cache=False, page=page, - page_size=page_size) + resolved = yield self.wallet.resolve(*valid_uris, check_cache=False, page=page, + page_size=page_size) for u in resolved: if 'error' in resolved[u]: results[u] = resolved[u] @@ -2477,7 +2257,7 @@ class Daemon(AuthJSONRPCServer): """ - d = self.session.wallet.get_history() + d = self.wallet.get_history() d.addCallback(lambda r: self._render_response(r)) return d @@ -2495,7 +2275,7 @@ class Daemon(AuthJSONRPCServer): (dict) JSON formatted transaction """ - d = self.session.wallet.get_transaction(txid) + d = self.wallet.get_transaction(txid) d.addCallback(lambda r: self._render_response(r)) return d @@ -2513,7 +2293,7 @@ class Daemon(AuthJSONRPCServer): (bool) true, if address is associated with current wallet """ - d = self.session.wallet.address_is_mine(address) + d = self.wallet.address_is_mine(address) d.addCallback(lambda is_mine: self._render_response(is_mine)) return d @@ -2532,7 +2312,7 @@ class Daemon(AuthJSONRPCServer): Could contain more than one public key if multisig. """ - d = self.session.wallet.get_pub_keys(address) + d = self.wallet.get_pub_keys(address) d.addCallback(lambda r: self._render_response(r)) return d @@ -2551,7 +2331,7 @@ class Daemon(AuthJSONRPCServer): List of wallet addresses """ - addresses = yield self.session.wallet.list_addresses() + addresses = yield self.wallet.list_addresses() response = yield self._render_response(addresses) defer.returnValue(response) @@ -2573,7 +2353,7 @@ class Daemon(AuthJSONRPCServer): log.info("Got new wallet address: " + address) return defer.succeed(address) - d = self.session.wallet.get_new_address() + d = self.wallet.get_new_address() d.addCallback(_disp) d.addCallback(lambda address: self._render_response(address)) return d @@ -2597,7 +2377,7 @@ class Daemon(AuthJSONRPCServer): log.info("Got unused wallet address: " + address) return defer.succeed(address) - d = self.session.wallet.get_unused_address() + d = self.wallet.get_unused_address() d.addCallback(_disp) d.addCallback(lambda address: self._render_response(address)) return d @@ -2624,10 +2404,10 @@ class Daemon(AuthJSONRPCServer): elif not amount: raise NullFundsError() - reserved_points = self.session.wallet.reserve_points(address, amount) + reserved_points = self.wallet.reserve_points(address, amount) if reserved_points is None: raise InsufficientFundsError() - yield self.session.wallet.send_points_to_address(reserved_points, amount) + yield self.wallet.send_points_to_address(reserved_points, amount) self.analytics_manager.send_credits_sent() defer.returnValue(True) @@ -2675,7 +2455,7 @@ class Daemon(AuthJSONRPCServer): result = yield self.jsonrpc_send_amount_to_address(amount, address) else: validate_claim_id(claim_id) - result = yield self.session.wallet.tip_claim(claim_id, amount) + result = yield self.wallet.tip_claim(claim_id, amount) self.analytics_manager.send_claim_action('new_support') defer.returnValue(result) @@ -2704,7 +2484,7 @@ class Daemon(AuthJSONRPCServer): raise NullFundsError() broadcast = not no_broadcast - tx = yield self.session.wallet.create_addresses_with_balance( + tx = yield self.wallet.create_addresses_with_balance( num_addresses, amount, broadcast=broadcast) tx['broadcast'] = broadcast defer.returnValue(tx) @@ -2738,7 +2518,7 @@ class Daemon(AuthJSONRPCServer): ] """ - unspent = yield self.session.wallet.list_unspent() + unspent = yield self.wallet.list_unspent() for i, utxo in enumerate(unspent): utxo['txid'] = utxo.pop('prevout_hash') utxo['nout'] = utxo.pop('prevout_n') @@ -2764,10 +2544,10 @@ class Daemon(AuthJSONRPCServer): """ if blockhash is not None: - d = self.session.wallet.get_block(blockhash) + d = self.wallet.get_block(blockhash) elif height is not None: - d = self.session.wallet.get_block_info(height) - d.addCallback(lambda b: self.session.wallet.get_block(b)) + d = self.wallet.get_block_info(height) + d.addCallback(lambda b: self.wallet.get_block(b)) else: # TODO: return a useful error message return server.failure @@ -2837,8 +2617,8 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response("Don't have that blob") defer.returnValue(response) try: - stream_hash = yield self.session.storage.get_stream_hash_for_sd_hash(blob_hash) - yield self.session.storage.delete_stream(stream_hash) + stream_hash = yield self.storage.get_stream_hash_for_sd_hash(blob_hash) + yield self.storage.delete_stream(stream_hash) except Exception as err: pass yield self.session.blob_manager.delete_blobs([blob_hash]) @@ -2864,7 +2644,7 @@ class Daemon(AuthJSONRPCServer): if not utils.is_valid_blobhash(blob_hash): raise Exception("invalid blob hash") - finished_deferred = self.session.dht_node.iterativeFindValue(binascii.unhexlify(blob_hash)) + finished_deferred = self.dht_node.iterativeFindValue(binascii.unhexlify(blob_hash)) def trap_timeout(err): err.trap(defer.TimeoutError) @@ -2983,14 +2763,14 @@ class Daemon(AuthJSONRPCServer): if uri: metadata = yield self._resolve_name(uri) sd_hash = utils.get_sd_hash(metadata) - stream_hash = yield self.session.storage.get_stream_hash_for_sd_hash(sd_hash) + stream_hash = yield self.storage.get_stream_hash_for_sd_hash(sd_hash) elif stream_hash: - sd_hash = yield self.session.storage.get_sd_blob_hash_for_stream(stream_hash) + sd_hash = yield self.storage.get_sd_blob_hash_for_stream(stream_hash) elif sd_hash: - stream_hash = yield self.session.storage.get_stream_hash_for_sd_hash(sd_hash) - sd_hash = yield self.session.storage.get_sd_blob_hash_for_stream(stream_hash) + stream_hash = yield self.storage.get_stream_hash_for_sd_hash(sd_hash) + sd_hash = yield self.storage.get_sd_blob_hash_for_stream(stream_hash) if stream_hash: - crypt_blobs = yield self.session.storage.get_blobs_for_stream(stream_hash) + crypt_blobs = yield self.storage.get_blobs_for_stream(stream_hash) blobs = yield defer.gatherResults([ self.session.blob_manager.get_blob(crypt_blob.blob_hash, crypt_blob.length) for crypt_blob in crypt_blobs if crypt_blob.blob_hash is not None @@ -3071,7 +2851,7 @@ class Daemon(AuthJSONRPCServer): contact = None try: - contact = yield self.session.dht_node.findContact(node_id.decode('hex')) + contact = yield self.dht_node.findContact(node_id.decode('hex')) except TimeoutError: result = {'error': 'timeout finding peer'} defer.returnValue(result) @@ -3113,7 +2893,7 @@ class Daemon(AuthJSONRPCServer): """ result = {} - data_store = self.session.dht_node._dataStore._dict + data_store = self.dht_node._dataStore._dict datastore_len = len(data_store) hosts = {} @@ -3131,8 +2911,8 @@ class Daemon(AuthJSONRPCServer): blob_hashes = [] result['buckets'] = {} - for i in range(len(self.session.dht_node._routingTable._buckets)): - for contact in self.session.dht_node._routingTable._buckets[i]._contacts: + for i in range(len(self.dht_node._routingTable._buckets)): + for contact in self.dht_node._routingTable._buckets[i]._contacts: contacts = result['buckets'].get(i, []) if contact in hosts: blobs = hosts[contact] @@ -3155,7 +2935,7 @@ class Daemon(AuthJSONRPCServer): result['contacts'] = contact_set result['blob_hashes'] = blob_hashes - result['node_id'] = self.session.dht_node.node_id.encode('hex') + result['node_id'] = self.dht_node.node_id.encode('hex') return self._render_response(result) def jsonrpc_blob_availability(self, blob_hash, search_timeout=None, blob_timeout=None): @@ -3254,7 +3034,7 @@ class Daemon(AuthJSONRPCServer): } try: - resolved_result = yield self.session.wallet.resolve(uri) + resolved_result = yield self.wallet.resolve(uri) response['did_resolve'] = True except UnknownNameError: response['error'] = "Failed to resolve name" From 944200ca8c1f7621195a9ddd2f03322b3cbfa7a8 Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:26:29 -0400 Subject: [PATCH 06/31] add all the daemon components --- lbrynet/daemon/Components.py | 542 ++++++++++++++++++++++++++++++++--- 1 file changed, 500 insertions(+), 42 deletions(-) diff --git a/lbrynet/daemon/Components.py b/lbrynet/daemon/Components.py index 46f39212b..5f328523e 100644 --- a/lbrynet/daemon/Components.py +++ b/lbrynet/daemon/Components.py @@ -1,36 +1,85 @@ import os import logging -from twisted.internet import defer, threads +import miniupnpc +from twisted.internet import defer, threads, reactor, error + from lbrynet import conf -from lbrynet.database.storage import SQLiteStorage +from lbrynet.core.Session import Session +from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, EncryptedFileStreamType +from lbrynet.core.Wallet import LBRYumWallet +from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory +from lbrynet.core.server.ServerProtocol import ServerProtocolFactory from lbrynet.daemon.Component import Component +from lbrynet.daemon.ExchangeRateManager import ExchangeRateManager +from lbrynet.database.storage import SQLiteStorage +from lbrynet.dht import node, hashannouncer +from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager +from lbrynet.lbry_file.client.EncryptedFileDownloader import EncryptedFileSaverFactory +from lbrynet.lbry_file.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier +from lbrynet.reflector import ServerFactory as reflector_server_factory + +from lbrynet.core.utils import generate_id log = logging.getLogger(__name__) # settings must be initialized before this file is imported DATABASE_COMPONENT = "database" +WALLET_COMPONENT = "wallet" +SESSION_COMPONENT = "session" +DHT_COMPONENT = "dht" +HASH_ANNOUNCER_COMPONENT = "hash_announcer" +STREAM_IDENTIFIER_COMPONENT = "stream_identifier" +FILE_MANAGER_COMPONENT = "file_manager" +PEER_PROTOCOL_SERVER_COMPONENT = "peer_protocol_server" +REFLECTOR_COMPONENT = "reflector" +UPNP_COMPONENT = "upnp" +EXCHANGE_RATE_MANAGER_COMPONENT = "exchange_rate_manager" + + +class ConfigSettings(object): + @staticmethod + def get_conf_setting(setting_name): + return conf.settings[setting_name] + + @staticmethod + def get_blobfiles_dir(): + if conf.settings['BLOBFILES_DIR'] == "blobfiles": + return os.path.join(GCS("data_dir"), "blobfiles") + else: + log.info("Using non-default blobfiles directory: %s", conf.settings['BLOBFILES_DIR']) + return conf.settings['BLOBFILES_DIR'] + + @staticmethod + def get_node_id(): + return conf.settings.node_id + + @staticmethod + def get_external_ip(): + from lbrynet.core.system_info import get_platform + platform = get_platform(get_ip=True) + return platform['ip'] + + +# Shorthand for common ConfigSettings methods +CS = ConfigSettings +GCS = ConfigSettings.get_conf_setting class DatabaseComponent(Component): component_name = DATABASE_COMPONENT - storage = None - @staticmethod - def get_db_dir(): - return conf.settings['data_dir'] + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.storage = None - @staticmethod - def get_download_directory(): - return conf.settings['download_directory'] - - @staticmethod - def get_blobfile_dir(): - return conf.settings['BLOBFILES_DIR'] + @property + def component(self): + return self.storage @staticmethod def get_current_db_revision(): - return 7 + return 9 @staticmethod def get_revision_filename(): @@ -41,49 +90,458 @@ class DatabaseComponent(Component): with open(conf.settings.get_db_revision_filename(), mode='w') as db_revision: db_revision.write(str(version_num)) - @classmethod @defer.inlineCallbacks - def setup(cls): + def start(self): # check directories exist, create them if they don't log.info("Loading databases") - if not os.path.exists(cls.get_download_directory()): - os.mkdir(cls.get_download_directory()) - if not os.path.exists(cls.get_db_dir()): - os.mkdir(cls.get_db_dir()) - cls._write_db_revision_file(cls.get_current_db_revision()) - log.debug("Created the db revision file: %s", cls.get_revision_filename()) - if not os.path.exists(cls.get_blobfile_dir()): - os.mkdir(cls.get_blobfile_dir()) - log.debug("Created the blobfile directory: %s", str(cls.get_blobfile_dir())) - if not os.path.exists(cls.get_revision_filename()): + + if not os.path.exists(GCS('download_directory')): + os.mkdir(GCS('download_directory')) + + if not os.path.exists(GCS('data_dir')): + os.mkdir(GCS('data_dir')) + self._write_db_revision_file(self.get_current_db_revision()) + log.debug("Created the db revision file: %s", self.get_revision_filename()) + + if not os.path.exists(CS.get_blobfiles_dir()): + os.mkdir(CS.get_blobfiles_dir()) + log.debug("Created the blobfile directory: %s", str(CS.get_blobfiles_dir())) + + if not os.path.exists(self.get_revision_filename()): log.warning("db_revision file not found. Creating it") - cls._write_db_revision_file(cls.get_current_db_revision()) + self._write_db_revision_file(self.get_current_db_revision()) # check the db migration and run any needed migrations - migrated = False - with open(cls.get_revision_filename(), "r") as revision_read_handle: + with open(self.get_revision_filename(), "r") as revision_read_handle: old_revision = int(revision_read_handle.read().strip()) - if old_revision > cls.get_current_db_revision(): + if old_revision > self.get_current_db_revision(): raise Exception('This version of lbrynet is not compatible with the database\n' 'Your database is revision %i, expected %i' % - (old_revision, cls.get_current_db_revision())) - if old_revision < cls.get_current_db_revision(): + (old_revision, self.get_current_db_revision())) + if old_revision < self.get_current_db_revision(): from lbrynet.database.migrator import dbmigrator - log.info("Upgrading your databases (revision %i to %i)", old_revision, cls.get_current_db_revision()) + log.info("Upgrading your databases (revision %i to %i)", old_revision, self.get_current_db_revision()) yield threads.deferToThread( - dbmigrator.migrate_db, cls.get_db_dir(), old_revision, cls.get_current_db_revision() + dbmigrator.migrate_db, GCS('data_dir'), old_revision, self.get_current_db_revision() ) - cls._write_db_revision_file(cls.get_current_db_revision()) + self._write_db_revision_file(self.get_current_db_revision()) log.info("Finished upgrading the databases.") - migrated = True # start SQLiteStorage - cls.storage = SQLiteStorage(cls.get_db_dir()) - yield cls.storage.setup() - defer.returnValue(migrated) + self.storage = SQLiteStorage(GCS('data_dir')) + yield self.storage.setup() - @classmethod @defer.inlineCallbacks - def stop(cls): - yield cls.storage.stop() + def stop(self): + yield self.storage.stop() + self.storage = None + + +class WalletComponent(Component): + component_name = WALLET_COMPONENT + depends_on = [DATABASE_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.wallet = None + + @property + def component(self): + return self.wallet + + @defer.inlineCallbacks + def start(self): + storage = self.component_manager.get_component(DATABASE_COMPONENT) + wallet_type = GCS('wallet') + + if wallet_type == conf.LBRYCRD_WALLET: + raise ValueError('LBRYcrd Wallet is no longer supported') + elif wallet_type == conf.LBRYUM_WALLET: + + log.info("Using lbryum wallet") + + lbryum_servers = {address: {'t': str(port)} + for address, port in GCS('lbryum_servers')} + + config = { + 'auto_connect': True, + 'chain': GCS('blockchain_name'), + 'default_servers': lbryum_servers + } + + if 'use_keyring' in conf.settings: + config['use_keyring'] = GCS('use_keyring') + if conf.settings['lbryum_wallet_dir']: + config['lbryum_path'] = GCS('lbryum_wallet_dir') + self.wallet = LBRYumWallet(storage, config) + yield self.wallet.start() + else: + raise ValueError('Wallet Type {} is not valid'.format(wallet_type)) + + @defer.inlineCallbacks + def stop(self): + yield self.wallet.stop() + self.wallet = None + + +class SessionComponent(Component): + component_name = SESSION_COMPONENT + depends_on = [DATABASE_COMPONENT, WALLET_COMPONENT, DHT_COMPONENT, HASH_ANNOUNCER_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.session = None + + @property + def component(self): + return self.session + + @defer.inlineCallbacks + def start(self): + self.session = Session( + GCS('data_rate'), + db_dir=GCS('data_dir'), + node_id=CS.get_node_id(), + blob_dir=CS.get_blobfiles_dir(), + dht_node=self.component_manager.get_component(DHT_COMPONENT), + hash_announcer=self.component_manager.get_component(HASH_ANNOUNCER_COMPONENT), + dht_node_port=GCS('dht_node_port'), + known_dht_nodes=GCS('known_dht_nodes'), + peer_port=GCS('peer_port'), + wallet=self.component_manager.get_component(WALLET_COMPONENT), + external_ip=CS.get_external_ip(), + storage=self.component_manager.get_component(DATABASE_COMPONENT) + ) + yield self.session.setup() + + @defer.inlineCallbacks + def stop(self): + yield self.session.shut_down() + + +class DHTComponent(Component): + component_name = DHT_COMPONENT + depends_on = [UPNP_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.dht_node = None + self.upnp_component = None + self.udp_port, self.peer_port = None, None + + @property + def component(self): + return self.dht_node + + @defer.inlineCallbacks + def start(self): + self.upnp_component = self.component_manager.get_component(UPNP_COMPONENT) + self.peer_port, self.udp_port = self.upnp_component.get_redirects() + node_id = CS.get_node_id() + if node_id is None: + node_id = generate_id() + + self.dht_node = node.Node( + node_id=node_id, + udpPort=self.udp_port, + externalIP=CS.get_external_ip(), + peerPort=self.peer_port + ) + yield self.dht_node.start(GCS('known_dht_nodes')) + log.info("Joined the dht") + + @defer.inlineCallbacks + def stop(self): + yield self.dht_node.stop() + + +class HashAnnouncerComponent(Component): + component_name = HASH_ANNOUNCER_COMPONENT + depends_on = [DHT_COMPONENT, DATABASE_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.hash_announcer = None + + @property + def component(self): + return self.hash_announcer + + @defer.inlineCallbacks + def start(self): + storage = self.component_manager.get_component(DATABASE_COMPONENT) + dht_node = self.component_manager.get_component(DHT_COMPONENT) + self.hash_announcer = hashannouncer.DHTHashAnnouncer(dht_node, storage) + yield self.hash_announcer.start() + + @defer.inlineCallbacks + def stop(self): + yield self.hash_announcer.stop() + + +class StreamIdentifierComponent(Component): + component_name = STREAM_IDENTIFIER_COMPONENT + depends_on = [SESSION_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.sd_identifier = StreamDescriptorIdentifier() + + @property + def component(self): + return self.sd_identifier + + @defer.inlineCallbacks + def start(self): + session = self.component_manager.get_component(SESSION_COMPONENT) + add_lbry_file_to_sd_identifier(self.sd_identifier) + file_saver_factory = EncryptedFileSaverFactory( + session.peer_finder, + session.rate_limiter, + session.blob_manager, + session.storage, + session.wallet, + GCS('download_directory') + ) + yield self.sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType, file_saver_factory) + + def stop(self): + pass + + +class FileManagerComponent(Component): + component_name = FILE_MANAGER_COMPONENT + depends_on = [SESSION_COMPONENT, STREAM_IDENTIFIER_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.file_manager = None + + @property + def component(self): + return self.file_manager + + @defer.inlineCallbacks + def start(self): + session = self.component_manager.get_component(SESSION_COMPONENT) + sd_identifier = self.component_manager.get_component(STREAM_IDENTIFIER_COMPONENT) + log.info('Starting the file manager') + self.file_manager = EncryptedFileManager(session, sd_identifier) + yield self.file_manager.setup() + log.info('Done setting up file manager') + + @defer.inlineCallbacks + def stop(self): + yield self.file_manager.stop() + + +class PeerProtocolServerComponent(Component): + component_name = PEER_PROTOCOL_SERVER_COMPONENT + depends_on = [SESSION_COMPONENT, UPNP_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.lbry_server_port = None + + @property + def component(self): + return self.lbry_server_port + + @defer.inlineCallbacks + def start(self): + query_handlers = {} + upnp_component = self.component_manager.get_component(UPNP_COMPONENT) + peer_port, udp_port = upnp_component.get_redirects() + session = self.component_manager.get_component(SESSION_COMPONENT) + + handlers = [ + BlobRequestHandlerFactory( + session.blob_manager, + session.wallet, + session.payment_rate_manager, + self.component_manager.analytics_manager + ), + session.wallet.get_wallet_info_query_handler_factory(), + ] + + for handler in handlers: + query_id = handler.get_primary_query_identifier() + query_handlers[query_id] = handler + + if peer_port is not None: + server_factory = ServerProtocolFactory(session.rate_limiter, query_handlers, session.peer_manager) + + try: + log.info("Peer protocol listening on TCP %d", peer_port) + self.lbry_server_port = yield reactor.listenTCP(peer_port, server_factory) + except error.CannotListenError as e: + import traceback + log.error("Couldn't bind to port %d. Visit lbry.io/faq/how-to-change-port for" + " more details.", peer_port) + log.error("%s", traceback.format_exc()) + raise ValueError("%s lbrynet may already be running on your computer." % str(e)) + + @defer.inlineCallbacks + def stop(self): + if self.lbry_server_port is not None: + self.lbry_server_port, old_port = None, self.lbry_server_port + log.info('Stop listening on port %s', old_port.port) + yield old_port.stopListening() + + +class ReflectorComponent(Component): + component_name = REFLECTOR_COMPONENT + depends_on = [SESSION_COMPONENT, FILE_MANAGER_COMPONENT] + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.reflector_server_port = GCS('reflector_port') + self.reflector_server = None + + @property + def component(self): + return self.reflector_server + + @defer.inlineCallbacks + def start(self): + log.info("Starting reflector server") + + session = self.component_manager.get_component(SESSION_COMPONENT) + file_manager = self.component_manager.get_component(FILE_MANAGER_COMPONENT) + reflector_factory = reflector_server_factory(session.peer_manager, session.blob_manager, file_manager) + + try: + self.reflector_server = yield reactor.listenTCP(self.reflector_server_port, reflector_factory) + log.info('Started reflector on port %s', self.reflector_server_port) + except error.CannotListenError as e: + log.exception("Couldn't bind reflector to port %d", self.reflector_server_port) + raise ValueError("{} lbrynet may already be running on your computer.".format(e)) + + @defer.inlineCallbacks + def stop(self): + if self.reflector_server is not None: + log.info("Stopping reflector server") + self.reflector_server, p = None, self.reflector_server + yield p.stopListening + + +class UPnPComponent(Component): + component_name = UPNP_COMPONENT + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.peer_port = GCS('peer_port') + self.dht_node_port = GCS('dht_node_port') + self.use_upnp = GCS('use_upnp') + self.external_ip = CS.get_external_ip() + self.upnp_redirects = [] + + @property + def component(self): + return self + + def get_redirects(self): + return self.peer_port, self.dht_node_port + + def start(self): + log.debug("In _try_upnp") + + def get_free_port(upnp, port, protocol): + # returns an existing mapping if it exists + mapping = upnp.getspecificportmapping(port, protocol) + if not mapping: + return port + if upnp.lanaddr == mapping[0]: + return mapping[1] + return get_free_port(upnp, port + 1, protocol) + + def get_port_mapping(upnp, port, protocol, description): + # try to map to the requested port, if there is already a mapping use the next external + # port available + if protocol not in ['UDP', 'TCP']: + raise Exception("invalid protocol") + port = get_free_port(upnp, port, protocol) + if isinstance(port, tuple): + log.info("Found existing UPnP redirect %s:%i (%s) to %s:%i, using it", + self.external_ip, port, protocol, upnp.lanaddr, port) + return port + upnp.addportmapping(port, protocol, upnp.lanaddr, port, + description, '') + log.info("Set UPnP redirect %s:%i (%s) to %s:%i", self.external_ip, port, + protocol, upnp.lanaddr, port) + return port + + def threaded_try_upnp(): + if self.use_upnp is False: + log.debug("Not using upnp") + return False + u = miniupnpc.UPnP() + num_devices_found = u.discover() + if num_devices_found > 0: + u.selectigd() + external_ip = u.externalipaddress() + if external_ip != '0.0.0.0' and not self.external_ip: + # best not to rely on this external ip, the router can be behind layers of NATs + self.external_ip = external_ip + if self.peer_port: + self.peer_port = get_port_mapping(u, self.peer_port, 'TCP', 'LBRY peer port') + self.upnp_redirects.append((self.peer_port, 'TCP')) + if self.dht_node_port: + self.dht_node_port = get_port_mapping(u, self.dht_node_port, 'UDP', 'LBRY DHT port') + self.upnp_redirects.append((self.dht_node_port, 'UDP')) + return True + return False + + def upnp_failed(err): + log.warning("UPnP failed. Reason: %s", err.getErrorMessage()) + return False + + d = threads.deferToThread(threaded_try_upnp) + d.addErrback(upnp_failed) + return d + + def stop(self): + log.info("Unsetting upnp for session") + + def threaded_unset_upnp(): + if self.use_upnp is False: + log.debug("Not using upnp") + return False + u = miniupnpc.UPnP() + num_devices_found = u.discover() + if num_devices_found > 0: + u.selectigd() + for port, protocol in self.upnp_redirects: + if u.getspecificportmapping(port, protocol) is None: + log.warning( + "UPnP redirect for %s %d was removed by something else.", + protocol, port) + else: + u.deleteportmapping(port, protocol) + log.info("Removed UPnP redirect for %s %d.", protocol, port) + self.upnp_redirects = [] + + d = threads.deferToThread(threaded_unset_upnp) + d.addErrback(lambda err: str(err)) + return d + + +class ExchangeRateManagerComponent(Component): + component_name = EXCHANGE_RATE_MANAGER_COMPONENT + + def __init__(self, component_manager): + Component.__init__(self, component_manager) + self.exchange_rate_manager = ExchangeRateManager() + + @property + def component(self): + return self.exchange_rate_manager + + @defer.inlineCallbacks + def start(self): + yield self.exchange_rate_manager.start() + + @defer.inlineCallbacks + def stop(self): + yield self.exchange_rate_manager.stop() From 5a2075019c297b4e97f1e00e12c42cd33e9ac1d6 Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:29:06 -0400 Subject: [PATCH 07/31] update Wallet.check_locked --- lbrynet/core/Wallet.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/lbrynet/core/Wallet.py b/lbrynet/core/Wallet.py index 0b71ed59d..3052fdce8 100644 --- a/lbrynet/core/Wallet.py +++ b/lbrynet/core/Wallet.py @@ -938,9 +938,7 @@ class LBRYumWallet(Wallet): self._lag_counter = 0 self.blocks_behind = 0 self.catchup_progress = 0 - - # fired when the wallet actually unlocks (wallet_unlocked_d can be called multiple times) - self.wallet_unlock_success = defer.Deferred() + self.is_wallet_unlocked = None def _is_first_run(self): return (not self.printed_retrieving_headers and @@ -953,21 +951,23 @@ class LBRYumWallet(Wallet): return self._cmd_runner def check_locked(self): - if not self.wallet.use_encryption: - log.info("Wallet is not encrypted") - self.wallet_unlock_success.callback(True) - elif not self._cmd_runner: + """ + Checks if the wallet is encrypted(locked) or not + + :return: (boolean) indicating whether the wallet is locked or not + """ + if not self._cmd_runner: raise Exception("Command runner hasn't been initialized yet") elif self._cmd_runner.locked: log.info("Waiting for wallet password") self.wallet_unlocked_d.addCallback(self.unlock) - return self.wallet_unlock_success + return self.is_wallet_unlocked def unlock(self, password): if self._cmd_runner and self._cmd_runner.locked: try: self._cmd_runner.unlock_wallet(password) - self.wallet_unlock_success.callback(True) + self.is_wallet_unlocked = True log.info("Unlocked the wallet!") except InvalidPassword: log.warning("Incorrect password, try again") @@ -1054,6 +1054,7 @@ class LBRYumWallet(Wallet): wallet.create_main_account() wallet.synchronize() self.wallet = wallet + self.is_wallet_unlocked = not self.wallet.use_encryption self._check_large_wallet() return defer.succeed(True) From 2d61ba629fdb4ad176292548fd89b64daef3bd84 Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:30:47 -0400 Subject: [PATCH 08/31] add requires decorator --- lbrynet/daemon/Daemon.py | 51 +++++++++++++++++++++++++++++++++++ lbrynet/daemon/auth/server.py | 31 ++++++++++++++++++--- 2 files changed, 78 insertions(+), 4 deletions(-) diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 9f32b289c..786f33151 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -1018,6 +1018,7 @@ class Daemon(AuthJSONRPCServer): """ return self._render_response(sorted([command for command in self.callable_methods.keys()])) + @AuthJSONRPCServer.requires("wallet") def jsonrpc_wallet_balance(self, address=None, include_unconfirmed=False): """ Return the balance of the wallet @@ -1039,6 +1040,7 @@ class Daemon(AuthJSONRPCServer): return self._render_response(float( self.wallet.get_address_balance(address, include_unconfirmed))) + @AuthJSONRPCServer.requires("wallet") @defer.inlineCallbacks def jsonrpc_wallet_unlock(self, password): """ @@ -1065,6 +1067,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) + @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) @defer.inlineCallbacks def jsonrpc_wallet_decrypt(self): """ @@ -1084,6 +1087,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) + @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) @defer.inlineCallbacks def jsonrpc_wallet_encrypt(self, new_password): """ @@ -1124,6 +1128,7 @@ class Daemon(AuthJSONRPCServer): reactor.callLater(0.1, reactor.fireSystemEvent, "shutdown") defer.returnValue(response) + @AuthJSONRPCServer.requires("file_manager") @defer.inlineCallbacks def jsonrpc_file_list(self, sort=None, **kwargs): """ @@ -1195,6 +1200,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) + @AuthJSONRPCServer.requires("wallet") @defer.inlineCallbacks def jsonrpc_resolve_name(self, name, force=False): """ @@ -1220,6 +1226,7 @@ class Daemon(AuthJSONRPCServer): else: defer.returnValue(metadata) + @AuthJSONRPCServer.requires("wallet") @defer.inlineCallbacks def jsonrpc_claim_show(self, txid=None, nout=None, claim_id=None): """ @@ -1265,6 +1272,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(claim_results) defer.returnValue(response) + @AuthJSONRPCServer.requires("wallet") @defer.inlineCallbacks def jsonrpc_resolve(self, force=False, uri=None, uris=[]): """ @@ -1355,6 +1363,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(results) defer.returnValue(response) + @AuthJSONRPCServer.requires("wallet", "file_manager", "session", wallet=lambda wallet: wallet.check_locked()) @defer.inlineCallbacks def jsonrpc_get(self, uri, file_name=None, timeout=None): """ @@ -1443,6 +1452,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) + @AuthJSONRPCServer.requires("file_manager") @defer.inlineCallbacks def jsonrpc_file_set_status(self, status, **kwargs): """ @@ -1483,6 +1493,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(msg) defer.returnValue(response) + @AuthJSONRPCServer.requires("file_manager") @defer.inlineCallbacks def jsonrpc_file_delete(self, delete_from_download_dir=False, delete_all=False, **kwargs): """ @@ -1543,6 +1554,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) + @AuthJSONRPCServer.requires("wallet", "session", wallet=lambda wallet: wallet.check_locked()) @defer.inlineCallbacks def jsonrpc_stream_cost_estimate(self, uri, size=None): """ @@ -1563,6 +1575,7 @@ class Daemon(AuthJSONRPCServer): cost = yield self.get_est_cost(uri, size) defer.returnValue(cost) + @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) @defer.inlineCallbacks def jsonrpc_channel_new(self, channel_name, amount): """ @@ -1619,6 +1632,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) + @AuthJSONRPCServer.requires("wallet") @defer.inlineCallbacks def jsonrpc_channel_list(self): """ @@ -1639,6 +1653,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) + @AuthJSONRPCServer.requires("wallet") @AuthJSONRPCServer.deprecated("channel_list") def jsonrpc_channel_list_mine(self): """ @@ -1656,6 +1671,7 @@ class Daemon(AuthJSONRPCServer): return self.jsonrpc_channel_list() + @AuthJSONRPCServer.requires("wallet") @defer.inlineCallbacks def jsonrpc_channel_export(self, claim_id): """ @@ -1674,6 +1690,7 @@ class Daemon(AuthJSONRPCServer): result = yield self.wallet.export_certificate_info(claim_id) defer.returnValue(result) + @AuthJSONRPCServer.requires("wallet") @defer.inlineCallbacks def jsonrpc_channel_import(self, serialized_certificate_info): """ @@ -1692,6 +1709,7 @@ class Daemon(AuthJSONRPCServer): result = yield self.wallet.import_certificate_info(serialized_certificate_info) defer.returnValue(result) + @AuthJSONRPCServer.requires("wallet", "file_manager", "session", wallet=lambda wallet: wallet.check_locked()) @defer.inlineCallbacks def jsonrpc_publish(self, name, bid, metadata=None, file_path=None, fee=None, title=None, description=None, author=None, language=None, license=None, @@ -1903,6 +1921,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) + @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) @defer.inlineCallbacks def jsonrpc_claim_abandon(self, claim_id=None, txid=None, nout=None): """ @@ -1935,6 +1954,7 @@ class Daemon(AuthJSONRPCServer): self.analytics_manager.send_claim_action('abandon') defer.returnValue(result) + @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) @defer.inlineCallbacks def jsonrpc_claim_new_support(self, name, claim_id, amount): """ @@ -1962,6 +1982,7 @@ class Daemon(AuthJSONRPCServer): self.analytics_manager.send_claim_action('new_support') defer.returnValue(result) + @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) @defer.inlineCallbacks def jsonrpc_claim_renew(self, outpoint=None, height=None): """ @@ -2004,6 +2025,7 @@ class Daemon(AuthJSONRPCServer): result = yield self.wallet.claim_renew_all_before_expiration(height) defer.returnValue(result) + @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) @defer.inlineCallbacks def jsonrpc_claim_send_to_address(self, claim_id, address, amount=None): """ @@ -2036,6 +2058,7 @@ class Daemon(AuthJSONRPCServer): defer.returnValue(response) # TODO: claim_list_mine should be merged into claim_list, but idk how to authenticate it -Grin + @AuthJSONRPCServer.requires("wallet") def jsonrpc_claim_list_mine(self): """ List my name claims @@ -2073,6 +2096,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda claims: self._render_response(claims)) return d + @AuthJSONRPCServer.requires("wallet") @defer.inlineCallbacks def jsonrpc_claim_list(self, name): """ @@ -2111,6 +2135,7 @@ class Daemon(AuthJSONRPCServer): sort_claim_results(claims['claims']) defer.returnValue(claims) + @AuthJSONRPCServer.requires("wallet") @defer.inlineCallbacks def jsonrpc_claim_list_by_channel(self, page=0, page_size=10, uri=None, uris=[]): """ @@ -2200,6 +2225,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(results) defer.returnValue(response) + @AuthJSONRPCServer.requires("wallet") def jsonrpc_transaction_list(self): """ List transactions belonging to wallet @@ -2261,6 +2287,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d + @AuthJSONRPCServer.requires("wallet") def jsonrpc_transaction_show(self, txid): """ Get a decoded transaction from a txid @@ -2279,6 +2306,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d + @AuthJSONRPCServer.requires("wallet") def jsonrpc_wallet_is_address_mine(self, address): """ Checks if an address is associated with the current wallet. @@ -2297,6 +2325,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda is_mine: self._render_response(is_mine)) return d + @AuthJSONRPCServer.requires("wallet") def jsonrpc_wallet_public_key(self, address): """ Get public key from wallet address @@ -2316,6 +2345,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d + @AuthJSONRPCServer.requires("wallet") @defer.inlineCallbacks def jsonrpc_wallet_list(self): """ @@ -2335,6 +2365,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(addresses) defer.returnValue(response) + @AuthJSONRPCServer.requires("wallet") def jsonrpc_wallet_new_address(self): """ Generate a new wallet address @@ -2358,6 +2389,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda address: self._render_response(address)) return d + @AuthJSONRPCServer.requires("wallet") def jsonrpc_wallet_unused_address(self): """ Return an address containing no balance, will create @@ -2382,6 +2414,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda address: self._render_response(address)) return d + @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) @AuthJSONRPCServer.deprecated("wallet_send") @defer.inlineCallbacks def jsonrpc_send_amount_to_address(self, amount, address): @@ -2411,6 +2444,7 @@ class Daemon(AuthJSONRPCServer): self.analytics_manager.send_credits_sent() defer.returnValue(True) + @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) @defer.inlineCallbacks def jsonrpc_wallet_send(self, amount, address=None, claim_id=None): """ @@ -2459,6 +2493,7 @@ class Daemon(AuthJSONRPCServer): self.analytics_manager.send_claim_action('new_support') defer.returnValue(result) + @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) @defer.inlineCallbacks def jsonrpc_wallet_prefill_addresses(self, num_addresses, amount, no_broadcast=False): """ @@ -2489,6 +2524,7 @@ class Daemon(AuthJSONRPCServer): tx['broadcast'] = broadcast defer.returnValue(tx) + @AuthJSONRPCServer.requires("wallet") @defer.inlineCallbacks def jsonrpc_utxo_list(self): """ @@ -2528,6 +2564,7 @@ class Daemon(AuthJSONRPCServer): defer.returnValue(unspent) + @AuthJSONRPCServer.requires("wallet") def jsonrpc_block_show(self, blockhash=None, height=None): """ Get contents of a block @@ -2555,6 +2592,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d + @AuthJSONRPCServer.requires("wallet", "session", wallet=lambda wallet: wallet.check_locked()) @defer.inlineCallbacks def jsonrpc_blob_get(self, blob_hash, timeout=None, encoding=None, payment_rate_manager=None): """ @@ -2598,6 +2636,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) + @AuthJSONRPCServer.requires("session") @defer.inlineCallbacks def jsonrpc_blob_delete(self, blob_hash): """ @@ -2625,6 +2664,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response("Deleted %s" % blob_hash) defer.returnValue(response) + @AuthJSONRPCServer.requires("dht") @defer.inlineCallbacks def jsonrpc_peer_list(self, blob_hash, timeout=None): """ @@ -2663,6 +2703,7 @@ class Daemon(AuthJSONRPCServer): ] defer.returnValue(results) + @AuthJSONRPCServer.requires("database") @defer.inlineCallbacks def jsonrpc_blob_announce(self, blob_hash=None, stream_hash=None, sd_hash=None): """ @@ -2699,6 +2740,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(True) defer.returnValue(response) + @AuthJSONRPCServer.requires("file_manager") @defer.inlineCallbacks def jsonrpc_file_reflect(self, **kwargs): """ @@ -2734,6 +2776,7 @@ class Daemon(AuthJSONRPCServer): results = yield reupload.reflect_file(lbry_file, reflector_server=reflector_server) defer.returnValue(results) + @AuthJSONRPCServer.requires("database", "session", "wallet") @defer.inlineCallbacks def jsonrpc_blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None, finished=None, page_size=None, page=None): @@ -2797,6 +2840,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(blob_hashes_for_return) defer.returnValue(response) + @AuthJSONRPCServer.requires("session") def jsonrpc_blob_reflect(self, blob_hashes, reflector_server=None): """ Reflects specified blobs @@ -2815,6 +2859,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d + @AuthJSONRPCServer.requires("session") def jsonrpc_blob_reflect_all(self): """ Reflects all saved blobs @@ -2834,6 +2879,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d + @AuthJSONRPCServer.requires("dht") @defer.inlineCallbacks def jsonrpc_peer_ping(self, node_id): """ @@ -2863,6 +2909,7 @@ class Daemon(AuthJSONRPCServer): result = {'error': 'ping timeout'} defer.returnValue(result) + @AuthJSONRPCServer.requires("dht") def jsonrpc_routing_table_get(self): """ Get DHT routing information @@ -2938,6 +2985,8 @@ class Daemon(AuthJSONRPCServer): result['node_id'] = self.dht_node.node_id.encode('hex') return self._render_response(result) + # the single peer downloader needs wallet access + @AuthJSONRPCServer.requires("dht", "wallet", wallet=lambda wallet: wallet.check_locked()) def jsonrpc_blob_availability(self, blob_hash, search_timeout=None, blob_timeout=None): """ Get blob availability @@ -2962,6 +3011,7 @@ class Daemon(AuthJSONRPCServer): return self._blob_availability(blob_hash, search_timeout, blob_timeout) + @AuthJSONRPCServer.requires("session", "wallet", "dht", wallet=lambda wallet: wallet.check_locked()) @AuthJSONRPCServer.deprecated("stream_availability") def jsonrpc_get_availability(self, uri, sd_timeout=None, peer_timeout=None): """ @@ -2982,6 +3032,7 @@ class Daemon(AuthJSONRPCServer): return self.jsonrpc_stream_availability(uri, peer_timeout, sd_timeout) + @AuthJSONRPCServer.requires("session", "wallet", "dht", wallet=lambda wallet: wallet.check_locked()) @defer.inlineCallbacks def jsonrpc_stream_availability(self, uri, search_timeout=None, blob_timeout=None): """ diff --git a/lbrynet/daemon/auth/server.py b/lbrynet/daemon/auth/server.py index a0d365a35..72d7e7b6b 100644 --- a/lbrynet/daemon/auth/server.py +++ b/lbrynet/daemon/auth/server.py @@ -4,6 +4,7 @@ import json import inspect from decimal import Decimal +from functools import wraps from zope.interface import implements from twisted.web import server, resource from twisted.internet import defer @@ -15,6 +16,7 @@ from traceback import format_exc from lbrynet import conf from lbrynet.core.Error import InvalidAuthenticationToken from lbrynet.core import utils +from lbrynet.core.Error import ComponentsNotStarted, ComponentStartConditionNotMet from lbrynet.daemon.auth.util import APIKey, get_auth_message from lbrynet.daemon.auth.client import LBRY_SECRET from lbrynet.undecorated import undecorated @@ -141,6 +143,31 @@ class AuthorizedBase(object): return f return _deprecated_wrapper + @staticmethod + def requires(*components, **component_conditionals): + def _wrap(fn): + @defer.inlineCallbacks + @wraps(fn) + def _inner(*args, **kwargs): + if component_conditionals: + for component_name, condition in component_conditionals.iteritems(): + if not callable(condition): + raise SyntaxError("The specified condition is invalid/not callable") + if args[0].component_manager.all_components_running(component_name): + if not (yield condition(args[0].component_manager.get_component(component_name))): + raise ComponentStartConditionNotMet( + "Not all conditions required to do this operation are met") + else: + raise ComponentsNotStarted("%s component is not setup.\nConditional cannot be checked" + % component_name) + if args[0].component_manager.all_components_running(*components): + result = yield fn(*args, **kwargs) + defer.returnValue(result) + else: + raise ComponentsNotStarted("Not all required components are set up:", components) + return _inner + return _wrap + class AuthJSONRPCServer(AuthorizedBase): """ @@ -149,7 +176,6 @@ class AuthJSONRPCServer(AuthorizedBase): API methods are named with a leading "jsonrpc_" Attributes: - allowed_during_startup (list): list of api methods that are callable before the server has finished startup sessions (dict): (dict): {: } callable_methods (dict): {: } @@ -416,9 +442,6 @@ class AuthJSONRPCServer(AuthorizedBase): def _verify_method_is_callable(self, function_path): if function_path not in self.callable_methods: raise UnknownAPIMethodError(function_path) - if not self.announced_startup: - if function_path not in self.allowed_during_startup: - raise NotAllowedDuringStartupError(function_path) def _get_jsonrpc_method(self, function_path): if function_path in self.deprecated_methods: From 6b6a29fdb79de22dfe93d1b8ee1abbc5cb60f9e6 Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:32:00 -0400 Subject: [PATCH 09/31] update settings_set --- lbrynet/daemon/Daemon.py | 86 ++++++++++++++++++++-------------------- 1 file changed, 42 insertions(+), 44 deletions(-) diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 786f33151..a913977a2 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -291,47 +291,6 @@ class Daemon(AuthJSONRPCServer): d.addErrback(log.fail(), 'Failure while shutting down') return d - def _update_settings(self, settings): - setting_types = { - 'download_directory': str, - 'data_rate': float, - 'download_timeout': int, - 'peer_port': int, - 'max_key_fee': dict, - 'use_upnp': bool, - 'run_reflector_server': bool, - 'cache_time': int, - 'reflect_uploads': bool, - 'share_usage_data': bool, - 'disable_max_key_fee': bool, - 'peer_search_timeout': int, - 'sd_download_timeout': int, - 'auto_renew_claim_height_delta': int - } - - for key, setting_type in setting_types.iteritems(): - if key in settings: - if isinstance(settings[key], setting_type): - conf.settings.update({key: settings[key]}, - data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) - elif setting_type is dict and isinstance(settings[key], six.string_types): - decoded = json.loads(str(settings[key])) - conf.settings.update({key: decoded}, - data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) - else: - converted = setting_type(settings[key]) - conf.settings.update({key: converted}, - data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) - conf.settings.save_conf_file_settings() - - self.data_rate = conf.settings['data_rate'] - self.max_key_fee = conf.settings['max_key_fee'] - self.disable_max_key_fee = conf.settings['disable_max_key_fee'] - self.download_directory = conf.settings['download_directory'] - self.download_timeout = conf.settings['download_timeout'] - - return defer.succeed(True) - def _start_analytics(self): if not self.analytics_manager.is_started: self.analytics_manager.start() @@ -915,7 +874,6 @@ class Daemon(AuthJSONRPCServer): """ return self._render_response(conf.settings.get_adjustable_settings_dict()) - @defer.inlineCallbacks def jsonrpc_settings_set(self, **kwargs): """ Set daemon settings @@ -967,8 +925,48 @@ class Daemon(AuthJSONRPCServer): (dict) Updated dictionary of daemon settings """ - yield self._update_settings(kwargs) - defer.returnValue(conf.settings.get_adjustable_settings_dict()) + # TODO: improve upon the current logic, it could be made better + new_settings = kwargs + + setting_types = { + 'download_directory': str, + 'data_rate': float, + 'download_timeout': int, + 'peer_port': int, + 'max_key_fee': dict, + 'use_upnp': bool, + 'run_reflector_server': bool, + 'cache_time': int, + 'reflect_uploads': bool, + 'share_usage_data': bool, + 'disable_max_key_fee': bool, + 'peer_search_timeout': int, + 'sd_download_timeout': int, + 'auto_renew_claim_height_delta': int + } + + for key, setting_type in setting_types.iteritems(): + if key in new_settings: + if isinstance(new_settings[key], setting_type): + conf.settings.update({key: new_settings[key]}, + data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) + elif setting_type is dict and isinstance(new_settings[key], (unicode, str)): + decoded = json.loads(str(new_settings[key])) + conf.settings.update({key: decoded}, + data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) + else: + converted = setting_type(new_settings[key]) + conf.settings.update({key: converted}, + data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) + conf.settings.save_conf_file_settings() + + self.data_rate = conf.settings['data_rate'] + self.max_key_fee = conf.settings['max_key_fee'] + self.disable_max_key_fee = conf.settings['disable_max_key_fee'] + self.download_directory = conf.settings['download_directory'] + self.download_timeout = conf.settings['download_timeout'] + + return self._render_response(conf.settings.get_adjustable_settings_dict()) def jsonrpc_help(self, command=None): """ From 4e2904129143971889a7556c71fc954db9d68a9c Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:34:58 -0400 Subject: [PATCH 10/31] update status command --- lbrynet/daemon/Daemon.py | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index a913977a2..f8a640937 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -735,8 +735,7 @@ class Daemon(AuthJSONRPCServer): 'is_running': bool, 'is_first_run': bool, 'startup_status': { - 'code': status code, - 'message': status message + (str) component_name: (bool) True if running else False, }, 'connection_status': { 'code': connection status code, @@ -760,22 +759,19 @@ class Daemon(AuthJSONRPCServer): """ # on startup, the wallet or network won't be available but we still need this call to work - has_wallet = self.session and self.session.wallet and self.session.wallet.network - local_height = self.session.wallet.network.get_local_height() if has_wallet else 0 - remote_height = self.session.wallet.network.get_server_height() if has_wallet else 0 - best_hash = (yield self.session.wallet.get_best_blockhash()) if has_wallet else None - wallet_is_encrypted = has_wallet and self.session.wallet.wallet and \ - self.session.wallet.wallet.use_encryption + has_wallet = self.session and self.wallet and self.wallet.network + local_height = self.wallet.network.get_local_height() if has_wallet else 0 + remote_height = self.wallet.network.get_server_height() if has_wallet else 0 + best_hash = (yield self.wallet.get_best_blockhash()) if has_wallet else None + wallet_is_encrypted = has_wallet and self.wallet.wallet and \ + self.wallet.wallet.use_encryption response = { 'lbry_id': base58.b58encode(self.node_id), 'installation_id': conf.settings.installation_id, 'is_running': self.announced_startup, - 'is_first_run': self.session.wallet.is_first_run if has_wallet else None, - 'startup_status': { - 'code': self.startup_status[0], - 'message': self.startup_status[1], - }, + 'is_first_run': self.wallet.is_first_run if has_wallet else None, + 'startup_status': self.component_manager.get_components_status(), 'connection_status': { 'code': self.connection_status_code, 'message': ( From 3dc5a9de7bcfb0332232456720381199bc1395f8 Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:36:00 -0400 Subject: [PATCH 11/31] whitespace --- lbrynet/daemon/ExchangeRateManager.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/lbrynet/daemon/ExchangeRateManager.py b/lbrynet/daemon/ExchangeRateManager.py index 486659a0e..acafe77d4 100644 --- a/lbrynet/daemon/ExchangeRateManager.py +++ b/lbrynet/daemon/ExchangeRateManager.py @@ -12,7 +12,7 @@ log = logging.getLogger(__name__) CURRENCY_PAIRS = ["USDBTC", "BTCLBC"] BITTREX_FEE = 0.0025 -COINBASE_FEE = 0.0 #add fee +COINBASE_FEE = 0.0 # add fee class ExchangeRate(object): @@ -37,6 +37,7 @@ class ExchangeRate(object): class MarketFeed(object): REQUESTS_TIMEOUT = 20 EXCHANGE_RATE_UPDATE_RATE_SEC = 300 + def __init__(self, market, name, url, params, fee): self.market = market self.name = name @@ -115,7 +116,7 @@ class BittrexFeed(MarketFeed): qtys = sum([i['Quantity'] for i in trades]) if totals <= 0 or qtys <= 0: raise InvalidExchangeRateResponse(self.market, 'quantities were not positive') - vwap = totals/qtys + vwap = totals / qtys return defer.succeed(float(1.0 / vwap)) @@ -175,12 +176,11 @@ class CryptonatorBTCFeed(MarketFeed): except ValueError: raise InvalidExchangeRateResponse(self.name, "invalid rate response") if 'ticker' not in json_response or len(json_response['ticker']) == 0 or \ - 'success' not in json_response or json_response['success'] is not True: + 'success' not in json_response or json_response['success'] is not True: raise InvalidExchangeRateResponse(self.name, 'result not found') return defer.succeed(float(json_response['ticker']['price'])) - class CryptonatorFeed(MarketFeed): def __init__(self): MarketFeed.__init__( @@ -198,7 +198,7 @@ class CryptonatorFeed(MarketFeed): except ValueError: raise InvalidExchangeRateResponse(self.name, "invalid rate response") if 'ticker' not in json_response or len(json_response['ticker']) == 0 or \ - 'success' not in json_response or json_response['success'] is not True: + 'success' not in json_response or json_response['success'] is not True: raise InvalidExchangeRateResponse(self.name, 'result not found') return defer.succeed(float(json_response['ticker']['price'])) @@ -231,11 +231,11 @@ class ExchangeRateManager(object): for market in self.market_feeds: if (market.rate_is_initialized() and market.is_online() and - market.rate.currency_pair == (from_currency, to_currency)): + market.rate.currency_pair == (from_currency, to_currency)): return amount * market.rate.spot for market in self.market_feeds: if (market.rate_is_initialized() and market.is_online() and - market.rate.currency_pair[0] == from_currency): + market.rate.currency_pair[0] == from_currency): return self.convert_currency( market.rate.currency_pair[1], to_currency, amount * market.rate.spot) raise Exception( From 094d9c6497a3846e0c3781410406a3b4ba008066 Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:36:42 -0400 Subject: [PATCH 12/31] update lbrynet-cli --- lbrynet/daemon/DaemonCLI.py | 36 +++++++----------------------------- 1 file changed, 7 insertions(+), 29 deletions(-) diff --git a/lbrynet/daemon/DaemonCLI.py b/lbrynet/daemon/DaemonCLI.py index 7ec03aa34..3cecc7c42 100644 --- a/lbrynet/daemon/DaemonCLI.py +++ b/lbrynet/daemon/DaemonCLI.py @@ -7,7 +7,7 @@ from collections import OrderedDict from lbrynet import conf from lbrynet.core import utils from lbrynet.daemon.auth.client import JSONRPCException, LBRYAPIClient, AuthAPIClient -from lbrynet.daemon.Daemon import LOADING_WALLET_CODE, Daemon +from lbrynet.daemon.Daemon import Daemon from lbrynet.core.system_info import get_platform from jsonrpc.common import RPCError from requests.exceptions import ConnectionError @@ -21,17 +21,13 @@ def remove_brackets(key): return key -def set_flag_vals(flag_names, parsed_args): +def set_kwargs(parsed_args): kwargs = OrderedDict() for key, arg in parsed_args.iteritems(): if arg is None: continue - elif key.startswith("--"): - if remove_brackets(key[2:]) not in kwargs: - k = remove_brackets(key[2:]) - elif key in flag_names: - if remove_brackets(flag_names[key]) not in kwargs: - k = remove_brackets(flag_names[key]) + elif key.startswith("--") and remove_brackets(key[2:]) not in kwargs: + k = remove_brackets(key[2:]) elif remove_brackets(key) not in kwargs: k = remove_brackets(key) kwargs[k] = guess_type(arg, k) @@ -79,26 +75,22 @@ def main(): method = new_method fn = Daemon.callable_methods[method] - if hasattr(fn, "_flags"): - flag_names = fn._flags - else: - flag_names = {} parsed = docopt(fn.__doc__, args) - kwargs = set_flag_vals(flag_names, parsed) + kwargs = set_kwargs(parsed) colorama.init() conf.initialize_settings() try: api = LBRYAPIClient.get_client() - status = api.status() + api.status() except (URLError, ConnectionError) as err: if isinstance(err, HTTPError) and err.code == UNAUTHORIZED: api = AuthAPIClient.config() # this can happen if the daemon is using auth with the --http-auth flag # when the config setting is to not use it try: - status = api.status() + api.status() except: print_error("Daemon requires authentication, but none was provided.", suggest_help=False) @@ -108,20 +100,6 @@ def main(): suggest_help=False) return 1 - status_code = status['startup_status']['code'] - - if status_code != "started" and method not in Daemon.allowed_during_startup: - print "Daemon is in the process of starting. Please try again in a bit." - message = status['startup_status']['message'] - if message: - if ( - status['startup_status']['code'] == LOADING_WALLET_CODE - and status['blockchain_status']['blocks_behind'] > 0 - ): - message += '. Blocks left: ' + str(status['blockchain_status']['blocks_behind']) - print " Status: " + message - return 1 - # TODO: check if port is bound. Error if its not try: From e7c57dcabc01ae0fe268c5ea76716d7be4984e83 Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:42:12 -0400 Subject: [PATCH 13/31] add components_to_skip setting --- lbrynet/conf.py | 5 ++++- lbrynet/core/BlobManager.py | 3 ++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/lbrynet/conf.py b/lbrynet/conf.py index 14fa45b53..3a8871f1d 100644 --- a/lbrynet/conf.py +++ b/lbrynet/conf.py @@ -168,9 +168,11 @@ def server_port(server_and_port): def server_list(servers): return [server_port(server) for server in servers] + def server_list_reverse(servers): return ["%s:%s" % (server, port) for server, port in servers] + class Env(envparse.Env): """An Env parser that automatically namespaces the variables with LBRY""" @@ -299,7 +301,8 @@ ADJUSTABLE_SETTINGS = { 'blockchain_name': (str, 'lbrycrd_main'), 'lbryum_servers': (list, [('lbryumx1.lbry.io', 50001), ('lbryumx2.lbry.io', 50001)], server_list, server_list_reverse), - 's3_headers_depth': (int, 96 * 10) # download headers from s3 when the local height is more than 10 chunks behind + 's3_headers_depth': (int, 96 * 10), # download headers from s3 when the local height is more than 10 chunks behind + 'components_to_skip': (list, ['reflector']) # components which will be skipped during start-up of daemon } diff --git a/lbrynet/core/BlobManager.py b/lbrynet/core/BlobManager.py index 370a3ddeb..4a86ed581 100644 --- a/lbrynet/core/BlobManager.py +++ b/lbrynet/core/BlobManager.py @@ -27,7 +27,8 @@ class DiskBlobManager(object): self.blob_hashes_to_delete = {} # {blob_hash: being_deleted (True/False)} self.check_should_announce_lc = None - if conf.settings['run_reflector_server']: # TODO: move this looping call to SQLiteStorage + # TODO: move this looping call to SQLiteStorage + if 'reflector' not in conf.settings['components_to_skip']: self.check_should_announce_lc = task.LoopingCall(self.storage.verify_will_announce_all_head_and_sd_blobs) @defer.inlineCallbacks From 62b50dc0ae1662836ea36d5ff7387962ab4e36d2 Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:42:46 -0400 Subject: [PATCH 14/31] move custom logger to lbrynet directory -import on module level __init__ --- lbrynet/__init__.py | 1 + lbrynet/core/log_support.py | 110 ------------------------------------ lbrynet/customLogger.py | 106 ++++++++++++++++++++++++++++++++++ 3 files changed, 107 insertions(+), 110 deletions(-) create mode 100644 lbrynet/customLogger.py diff --git a/lbrynet/__init__.py b/lbrynet/__init__.py index 0a9c7f041..a93812309 100644 --- a/lbrynet/__init__.py +++ b/lbrynet/__init__.py @@ -1,4 +1,5 @@ import logging +import customLogger __version__ = "0.20.4" version = tuple(__version__.split('.')) diff --git a/lbrynet/core/log_support.py b/lbrynet/core/log_support.py index 9e0a635d1..a623c8b81 100644 --- a/lbrynet/core/log_support.py +++ b/lbrynet/core/log_support.py @@ -1,8 +1,6 @@ -import inspect import json import logging import logging.handlers -import os import sys import traceback @@ -13,25 +11,6 @@ import twisted.python.log from lbrynet import __version__ as lbrynet_version, build_type, conf from lbrynet.core import utils -#### -# This code is copied from logging/__init__.py in the python source code -#### -# -# _srcfile is used when walking the stack to check when we've got the first -# caller stack frame. -# -if hasattr(sys, 'frozen'): # support for py2exe - _srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:]) -elif __file__[-4:].lower() in ['.pyc', '.pyo']: - _srcfile = __file__[:-4] + '.py' -else: - _srcfile = __file__ -_srcfile = os.path.normcase(_srcfile) -##### - - -TRACE = 5 - class HTTPSHandler(logging.Handler): def __init__(self, url, fqdn=False, localname=None, facility=None, cookies=None): @@ -185,33 +164,6 @@ class JsonFormatter(logging.Formatter): return json.dumps(data) -#### -# This code is copied from logging/__init__.py in the python source code -#### -def findCaller(srcfile=None): - """Returns the filename, line number and function name of the caller""" - srcfile = srcfile or _srcfile - f = inspect.currentframe() - # On some versions of IronPython, currentframe() returns None if - # IronPython isn't run with -X:Frames. - if f is not None: - f = f.f_back - rv = "(unknown file)", 0, "(unknown function)" - while hasattr(f, "f_code"): - co = f.f_code - filename = os.path.normcase(co.co_filename) - # ignore any function calls that are in this file - if filename == srcfile: - f = f.f_back - continue - rv = (filename, f.f_lineno, co.co_name) - break - return rv - - -### - - def failure(failure, log, msg, *args): """Log a failure message from a deferred. @@ -316,65 +268,3 @@ def get_parent(logger_name): return '' names = names[:-1] return '.'.join(names) - - -class Logger(logging.Logger): - """A logger that has an extra `fail` method useful for handling twisted failures.""" - - def fail(self, callback=None, *args, **kwargs): - """Returns a function to log a failure from an errback. - - The returned function appends the error message and extracts - the traceback from `err`. - - Example usage: - d.addErrback(log.fail(), 'This is an error message') - - Although odd, making the method call is necessary to extract - out useful filename and line number information; otherwise the - reported values are from inside twisted's deferred handling - code. - - Args: - callback: callable to call after making the log. The first argument - will be the `err` from the deferred - args: extra arguments to pass into `callback` - - Returns: a function that takes the following arguments: - err: twisted.python.failure.Failure - msg: the message to log, using normal logging string iterpolation. - msg_args: the values to subtitute into `msg` - msg_kwargs: set `level` to change from the default ERROR severity. Other - keywoards are treated as normal log kwargs. - """ - fn, lno, func = findCaller() - - def _fail(err, msg, *msg_args, **msg_kwargs): - level = msg_kwargs.pop('level', logging.ERROR) - msg += ": %s" - msg_args += (err.getErrorMessage(),) - exc_info = (err.type, err.value, err.getTracebackObject()) - record = self.makeRecord( - self.name, level, fn, lno, msg, msg_args, exc_info, func, msg_kwargs) - self.handle(record) - if callback: - try: - return callback(err, *args, **kwargs) - except Exception: - # log.fail is almost always called within an - # errback. If callback fails and we didn't catch - # the exception we would need to attach a second - # errback to deal with that, which we will almost - # never do and then we end up with an unhandled - # error that will get swallowed by twisted - self.exception('Failed to run callback') - - return _fail - - def trace(self, msg, *args, **kwargs): - if self.isEnabledFor(TRACE): - self._log(TRACE, msg, args, **kwargs) - - -logging.setLoggerClass(Logger) -logging.addLevelName(TRACE, 'TRACE') diff --git a/lbrynet/customLogger.py b/lbrynet/customLogger.py new file mode 100644 index 000000000..860f0b3c2 --- /dev/null +++ b/lbrynet/customLogger.py @@ -0,0 +1,106 @@ +import os +import sys +import inspect +import logging +TRACE = 5 + + +#### +# This code is copied from logging/__init__.py in the python source code +#### +# +# _srcfile is used when walking the stack to check when we've got the first +# caller stack frame. +# +if hasattr(sys, 'frozen'): # support for py2exe + _srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:]) +elif __file__[-4:].lower() in ['.pyc', '.pyo']: + _srcfile = __file__[:-4] + '.py' +else: + _srcfile = __file__ +_srcfile = os.path.normcase(_srcfile) + + +def findCaller(srcfile=None): + """Returns the filename, line number and function name of the caller""" + srcfile = srcfile or _srcfile + f = inspect.currentframe() + # On some versions of IronPython, currentframe() returns None if + # IronPython isn't run with -X:Frames. + if f is not None: + f = f.f_back + rv = "(unknown file)", 0, "(unknown function)" + while hasattr(f, "f_code"): + co = f.f_code + filename = os.path.normcase(co.co_filename) + # ignore any function calls that are in this file + if filename == srcfile: + f = f.f_back + continue + rv = (filename, f.f_lineno, co.co_name) + break + return rv + + +### + +class Logger(logging.Logger): + """A logger that has an extra `fail` method useful for handling twisted failures.""" + + def fail(self, callback=None, *args, **kwargs): + """Returns a function to log a failure from an errback. + + The returned function appends the error message and extracts + the traceback from `err`. + + Example usage: + d.addErrback(log.fail(), 'This is an error message') + + Although odd, making the method call is necessary to extract + out useful filename and line number information; otherwise the + reported values are from inside twisted's deferred handling + code. + + Args: + callback: callable to call after making the log. The first argument + will be the `err` from the deferred + args: extra arguments to pass into `callback` + + Returns: a function that takes the following arguments: + err: twisted.python.failure.Failure + msg: the message to log, using normal logging string iterpolation. + msg_args: the values to subtitute into `msg` + msg_kwargs: set `level` to change from the default ERROR severity. Other + keywoards are treated as normal log kwargs. + """ + fn, lno, func = findCaller() + + def _fail(err, msg, *msg_args, **msg_kwargs): + level = msg_kwargs.pop('level', logging.ERROR) + msg += ": %s" + msg_args += (err.getErrorMessage(),) + exc_info = (err.type, err.value, err.getTracebackObject()) + record = self.makeRecord( + self.name, level, fn, lno, msg, msg_args, exc_info, func, msg_kwargs) + self.handle(record) + if callback: + try: + return callback(err, *args, **kwargs) + except Exception: + # log.fail is almost always called within an + # errback. If callback fails and we didn't catch + # the exception we would need to attach a second + # errback to deal with that, which we will almost + # never do and then we end up with an unhandled + # error that will get swallowed by twisted + self.exception('Failed to run callback') + + return _fail + + def trace(self, msg, *args, **kwargs): + if self.isEnabledFor(TRACE): + self._log(TRACE, msg, args, **kwargs) + + +logging.setLoggerClass(Logger) +logging.addLevelName(TRACE, 'TRACE') From edcb06a415cddda82edd99a778db473a7413ad8a Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:44:37 -0400 Subject: [PATCH 15/31] update mocks, add test_Component_Manager --- lbrynet/tests/mocks.py | 94 +++++++++++++ lbrynet/tests/unit/components/__init__.py | 0 .../unit/components/test_Component_Manager.py | 133 ++++++++++++++++++ 3 files changed, 227 insertions(+) create mode 100644 lbrynet/tests/unit/components/__init__.py create mode 100644 lbrynet/tests/unit/components/test_Component_Manager.py diff --git a/lbrynet/tests/mocks.py b/lbrynet/tests/mocks.py index c8e131362..49114610d 100644 --- a/lbrynet/tests/mocks.py +++ b/lbrynet/tests/mocks.py @@ -1,5 +1,6 @@ import base64 import io +import mock from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import rsa @@ -10,6 +11,7 @@ from twisted.python.failure import Failure from lbrynet.core.client.ClientRequest import ClientRequest from lbrynet.core.Error import RequestCanceledError from lbrynet.core import BlobAvailability +from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager from lbrynet.dht.node import Node as RealNode from lbrynet.daemon import ExchangeRateManager as ERM from lbrynet import conf @@ -63,6 +65,7 @@ class BTCLBCFeed(ERM.MarketFeed): 0.0 ) + class USDBTCFeed(ERM.MarketFeed): def __init__(self): ERM.MarketFeed.__init__( @@ -74,6 +77,7 @@ class USDBTCFeed(ERM.MarketFeed): 0.0 ) + class ExchangeRateManager(ERM.ExchangeRateManager): def __init__(self, market_feeds, rates): self.market_feeds = market_feeds @@ -360,6 +364,96 @@ class BlobAvailabilityTracker(BlobAvailability.BlobAvailabilityTracker): pass +# The components below viz. FakeWallet, FakeSession, FakeFileManager are just for testing Component Manager's +# startup and stop +class FakeComponent(object): + depends_on = [] + component_name = None + + def __init__(self, component_manager): + self.component_manager = component_manager + self._running = False + + @property + def running(self): + return self._running + + def start(self): + raise NotImplementedError # Override + + def stop(self): + return defer.succeed(None) + + @property + def component(self): + return self + + @defer.inlineCallbacks + def _setup(self): + result = yield defer.maybeDeferred(self.start) + self._running = True + defer.returnValue(result) + + @defer.inlineCallbacks + def _stop(self): + result = yield defer.maybeDeferred(self.stop) + self._running = False + defer.returnValue(result) + + +class FakeDelayedWallet(FakeComponent): + component_name = "wallet" + depends_on = [] + + def start(self): + return defer.succeed(True) + + def stop(self): + d = defer.Deferred() + self.component_manager.reactor.callLater(1, d.callback, True) + return d + + +class FakeDelayedSession(FakeComponent): + component_name = "session" + depends_on = [FakeDelayedWallet.component_name] + + def start(self): + d = defer.Deferred() + self.component_manager.reactor.callLater(1, d.callback, True) + return d + + def stop(self): + d = defer.Deferred() + self.component_manager.reactor.callLater(1, d.callback, True) + return d + + +class FakeDelayedFileManager(FakeComponent): + component_name = "file_manager" + depends_on = [FakeDelayedSession.component_name] + + def start(self): + d = defer.Deferred() + self.component_manager.reactor.callLater(1, d.callback, True) + return d + + def stop(self): + return defer.succeed(True) + +class FakeFileManager(FakeComponent): + component_name = "file_manager" + depends_on = [] + + @property + def component(self): + return mock.Mock(spec=EncryptedFileManager) + + def start(self): + return defer.succeed(True) + + def stop(self): + pass create_stream_sd_file = { 'stream_name': '746573745f66696c65', diff --git a/lbrynet/tests/unit/components/__init__.py b/lbrynet/tests/unit/components/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lbrynet/tests/unit/components/test_Component_Manager.py b/lbrynet/tests/unit/components/test_Component_Manager.py new file mode 100644 index 000000000..504b12ac8 --- /dev/null +++ b/lbrynet/tests/unit/components/test_Component_Manager.py @@ -0,0 +1,133 @@ +from twisted.internet.task import Clock +from twisted.trial import unittest + +from lbrynet.daemon.ComponentManager import ComponentManager +from lbrynet.daemon.Components import DATABASE_COMPONENT, DHT_COMPONENT, STREAM_IDENTIFIER_COMPONENT +from lbrynet.daemon.Components import HASH_ANNOUNCER_COMPONENT, REFLECTOR_COMPONENT, UPNP_COMPONENT +from lbrynet.daemon.Components import PEER_PROTOCOL_SERVER_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT +from lbrynet.daemon import Components +from lbrynet.tests import mocks + + +class TestComponentManager(unittest.TestCase): + def setUp(self): + mocks.mock_conf_settings(self) + self.default_components_sort = [ + [Components.DatabaseComponent, + Components.ExchangeRateManagerComponent, + Components.UPnPComponent], + [Components.DHTComponent, + Components.WalletComponent], + [Components.HashAnnouncerComponent], + [Components.SessionComponent], + [Components.PeerProtocolServerComponent, + Components.StreamIdentifierComponent], + [Components.FileManagerComponent], + [Components.ReflectorComponent] + ] + self.component_manager = ComponentManager() + + def tearDown(self): + pass + + def test_sort_components(self): + stages = self.component_manager.sort_components() + + for stage_list, sorted_stage_list in zip(stages, self.default_components_sort): + self.assertEqual([type(stage) for stage in stage_list], sorted_stage_list) + + def test_sort_components_reverse(self): + rev_stages = self.component_manager.sort_components(reverse=True) + reverse_default_components_sort = reversed(self.default_components_sort) + + for stage_list, sorted_stage_list in zip(rev_stages, reverse_default_components_sort): + self.assertEqual([type(stage) for stage in stage_list], sorted_stage_list) + + def test_get_component_not_exists(self): + + with self.assertRaises(NameError): + self.component_manager.get_component("random_component") + + +class TestComponentManagerOverrides(unittest.TestCase): + def setUp(self): + mocks.mock_conf_settings(self) + + def test_init_with_overrides(self): + class FakeWallet(object): + component_name = "wallet" + depends_on = [] + + def __init__(self, component_manager): + self.component_manager = component_manager + + @property + def component(self): + return self + + new_component_manager = ComponentManager(wallet=FakeWallet) + fake_wallet = new_component_manager.get_component("wallet") + # wallet should be an instance of FakeWallet and not WalletComponent from Components.py + self.assertIsInstance(fake_wallet, FakeWallet) + self.assertNotIsInstance(fake_wallet, Components.WalletComponent) + + def test_init_with_wrong_overrides(self): + class FakeRandomComponent(object): + component_name = "someComponent" + depends_on = [] + + with self.assertRaises(SyntaxError): + ComponentManager(randomComponent=FakeRandomComponent) + + +class TestComponentManagerProperStart(unittest.TestCase): + def setUp(self): + self.reactor = Clock() + mocks.mock_conf_settings(self) + self.component_manager = ComponentManager( + skip_components=[DATABASE_COMPONENT, DHT_COMPONENT, HASH_ANNOUNCER_COMPONENT, STREAM_IDENTIFIER_COMPONENT, + PEER_PROTOCOL_SERVER_COMPONENT, REFLECTOR_COMPONENT, UPNP_COMPONENT, + EXCHANGE_RATE_MANAGER_COMPONENT], + reactor=self.reactor, + wallet=mocks.FakeDelayedWallet, + session=mocks.FakeDelayedSession, + file_manager=mocks.FakeDelayedFileManager + ) + + def tearDown(self): + pass + + def test_proper_starting_of_components(self): + self.component_manager.setup() + self.assertTrue(self.component_manager.get_component('wallet').running) + self.assertFalse(self.component_manager.get_component('session').running) + self.assertFalse(self.component_manager.get_component('file_manager').running) + + self.reactor.advance(1) + self.assertTrue(self.component_manager.get_component('wallet').running) + self.assertTrue(self.component_manager.get_component('session').running) + self.assertFalse(self.component_manager.get_component('file_manager').running) + + self.reactor.advance(1) + self.assertTrue(self.component_manager.get_component('wallet').running) + self.assertTrue(self.component_manager.get_component('session').running) + self.assertTrue(self.component_manager.get_component('file_manager').running) + + def test_proper_stopping_of_components(self): + self.component_manager.setup() + self.reactor.advance(1) + self.reactor.advance(1) + self.component_manager.stop() + self.assertFalse(self.component_manager.get_component('file_manager').running) + self.assertTrue(self.component_manager.get_component('session').running) + self.assertTrue(self.component_manager.get_component('wallet').running) + + self.reactor.advance(1) + self.assertFalse(self.component_manager.get_component('file_manager').running) + self.assertFalse(self.component_manager.get_component('session').running) + self.assertTrue(self.component_manager.get_component('wallet').running) + + self.reactor.advance(1) + self.assertFalse(self.component_manager.get_component('file_manager').running) + self.assertFalse(self.component_manager.get_component('session').running) + self.assertFalse(self.component_manager.get_component('wallet').running) From 37eaf634c40271e06b59482a41d4b2e5cf21e3e1 Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:45:32 -0400 Subject: [PATCH 16/31] update functional tests --- lbrynet/tests/functional/test_misc.py | 66 ++++++++++++---------- lbrynet/tests/functional/test_reflector.py | 8 +-- lbrynet/tests/functional/test_streamify.py | 44 ++++++--------- 3 files changed, 58 insertions(+), 60 deletions(-) diff --git a/lbrynet/tests/functional/test_misc.py b/lbrynet/tests/functional/test_misc.py index b134b6da2..01badedae 100644 --- a/lbrynet/tests/functional/test_misc.py +++ b/lbrynet/tests/functional/test_misc.py @@ -39,6 +39,7 @@ DummyBlobAvailabilityTracker = mocks.BlobAvailabilityTracker log_format = "%(funcName)s(): %(message)s" logging.basicConfig(level=logging.CRITICAL, format=log_format) +TEST_SKIP_STRING_ANDROID = "Test cannot pass on Android because multiprocessing is not supported at the OS level." def require_system(system): def wrapper(fn): @@ -103,13 +104,14 @@ class LbryUploader(object): rate_limiter = RateLimiter() self.sd_identifier = StreamDescriptorIdentifier() self.db_dir, self.blob_dir = mk_db_and_blob_dir() + dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553, + node_id="abcd", externalIP="127.0.0.1") self.session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, blob_dir=self.blob_dir, node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, - peer_port=5553, dht_node_port=4445, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, - dht_node_class=FakeNode, is_generous=self.is_generous, external_ip="127.0.0.1") + peer_port=5553, dht_node_port=4445, rate_limiter=rate_limiter, wallet=wallet, + dht_node=dht_node, external_ip="127.0.0.1") self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier) if self.ul_rate_limit is not None: self.session.rate_limiter.set_ul_limit(self.ul_rate_limit) @@ -197,12 +199,10 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event, db_dir, blob_dir = mk_db_and_blob_dir() session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, - node_id="abcd" + str(n), dht_node_port=4446, dht_node_class=FakeNode, + node_id="abcd" + str(n), dht_node_port=4446, peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=peer_port, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, - is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], + rate_limiter=rate_limiter, wallet=wallet, external_ip="127.0.0.1") lbry_file_manager = EncryptedFileManager(session, sd_identifier) @@ -303,13 +303,14 @@ def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_genero db_dir, blob_dir = mk_db_and_blob_dir() + dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553, + node_id="abcd", externalIP="127.0.0.1") + session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="efgh", - peer_finder=peer_finder, hash_announcer=hash_announcer, dht_node_class=FakeNode, + peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=peer_port, dht_node_port=4446, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, - is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], - external_ip="127.0.0.1") + rate_limiter=rate_limiter, wallet=wallet, + external_ip="127.0.0.1", dht_node=dht_node) if slow is True: session.rate_limiter.set_ul_limit(2 ** 11) @@ -478,15 +479,16 @@ class TestTransfer(TestCase): hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() + dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553, + node_id="abcd", externalIP="127.0.0.1") db_dir, blob_dir = mk_db_and_blob_dir() self.session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, dht_node_port=4445, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, - dht_node_class=FakeNode, is_generous=self.is_generous, external_ip="127.0.0.1") + rate_limiter=rate_limiter, wallet=wallet, + dht_node=dht_node, external_ip="127.0.0.1") self.lbry_file_manager = EncryptedFileManager( self.session, sd_identifier) @@ -566,15 +568,16 @@ class TestTransfer(TestCase): peer_finder = FakePeerFinder(5553, peer_manager, 2) hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() + dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553, + node_id="abcd", externalIP="127.0.0.1") db_dir, blob_dir = mk_db_and_blob_dir() self.session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=blob_dir, peer_port=5553, dht_node_port=4445, dht_node_class=FakeNode, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, - is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], external_ip="127.0.0.1") + blob_dir=blob_dir, peer_port=5553, dht_node_port=4445, + rate_limiter=rate_limiter, wallet=wallet, + dht_node=dht_node, external_ip="127.0.0.1") d1 = self.wait_for_hash_from_queue(blob_hash_queue_1) d2 = self.wait_for_hash_from_queue(blob_hash_queue_2) @@ -646,17 +649,17 @@ class TestTransfer(TestCase): hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() + dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553, + node_id="abcd", externalIP="127.0.0.1") downloaders = [] db_dir, blob_dir = mk_db_and_blob_dir() self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, - node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, dht_node_class=FakeNode, + node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, - is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], - external_ip="127.0.0.1") + rate_limiter=rate_limiter, wallet=wallet, + external_ip="127.0.0.1", dht_node=dht_node) self.lbry_file_manager = EncryptedFileManager(self.session, sd_identifier) @@ -758,13 +761,11 @@ class TestTransfer(TestCase): sd_identifier = StreamDescriptorIdentifier() db_dir, blob_dir = mk_db_and_blob_dir() - self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, dht_node_class=FakeNode, + self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, hash_announcer=hash_announcer, blob_dir=blob_dir, - peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, - wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, - is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], - external_ip="127.0.0.1") + peer_port=5553, rate_limiter=rate_limiter, + wallet=wallet, external_ip="127.0.0.1") self.lbry_file_manager = EncryptedFileManager( self.session, sd_identifier) @@ -842,3 +843,10 @@ class TestTransfer(TestCase): d.addBoth(stop) return d + + if is_android(): + test_lbry_transfer.skip = TEST_SKIP_STRING_ANDROID + test_last_blob_retrieval.skip = TEST_SKIP_STRING_ANDROID + test_double_download.skip = TEST_SKIP_STRING_ANDROID + test_multiple_uploaders.skip = TEST_SKIP_STRING_ANDROID + diff --git a/lbrynet/tests/functional/test_reflector.py b/lbrynet/tests/functional/test_reflector.py index cde45583b..082d9d74a 100644 --- a/lbrynet/tests/functional/test_reflector.py +++ b/lbrynet/tests/functional/test_reflector.py @@ -53,13 +53,13 @@ class TestReflector(unittest.TestCase): db_dir=self.db_dir, node_id="abcd", peer_finder=peer_finder, + peer_manager=peer_manager, blob_dir=self.blob_dir, peer_port=5553, dht_node_port=4444, - use_upnp=False, wallet=wallet, - blob_tracker_class=mocks.BlobAvailabilityTracker, external_ip="127.0.0.1", + dht_node=mocks.Node(), hash_announcer=mocks.Announcer(), ) @@ -73,13 +73,13 @@ class TestReflector(unittest.TestCase): db_dir=self.server_db_dir, node_id="abcd", peer_finder=peer_finder, + peer_manager=peer_manager, blob_dir=self.server_blob_dir, peer_port=5554, dht_node_port=4443, - use_upnp=False, wallet=wallet, - blob_tracker_class=mocks.BlobAvailabilityTracker, external_ip="127.0.0.1", + dht_node=mocks.Node(), hash_announcer=mocks.Announcer(), ) diff --git a/lbrynet/tests/functional/test_streamify.py b/lbrynet/tests/functional/test_streamify.py index cda06758b..566427bd3 100644 --- a/lbrynet/tests/functional/test_streamify.py +++ b/lbrynet/tests/functional/test_streamify.py @@ -30,6 +30,7 @@ DummyBlobAvailabilityTracker = mocks.BlobAvailabilityTracker class TestStreamify(TestCase): maxDiff = 5000 + def setUp(self): mocks.mock_conf_settings(self) self.session = None @@ -37,6 +38,12 @@ class TestStreamify(TestCase): self.is_generous = True self.db_dir = tempfile.mkdtemp() self.blob_dir = os.path.join(self.db_dir, "blobfiles") + self.dht_node = FakeNode() + self.wallet = FakeWallet() + self.peer_manager = PeerManager() + self.peer_finder = FakePeerFinder(5553, self.peer_manager, 2) + self.rate_limiter = DummyRateLimiter() + self.sd_identifier = StreamDescriptorIdentifier() os.mkdir(self.blob_dir) @defer.inlineCallbacks @@ -54,26 +61,17 @@ class TestStreamify(TestCase): os.remove("test_file") def test_create_stream(self): - wallet = FakeWallet() - peer_manager = PeerManager() - peer_finder = FakePeerFinder(5553, peer_manager, 2) - hash_announcer = FakeAnnouncer() - rate_limiter = DummyRateLimiter() - sd_identifier = StreamDescriptorIdentifier() self.session = Session( - conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, node_id="abcd", - peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=self.blob_dir, peer_port=5553, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, - is_generous=self.is_generous, external_ip="127.0.0.1", dht_node_class=mocks.Node + conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, node_id="abcd", peer_finder=self.peer_finder, + blob_dir=self.blob_dir, peer_port=5553, rate_limiter=self.rate_limiter, wallet=self.wallet, + external_ip="127.0.0.1", dht_node=self.dht_node ) - self.lbry_file_manager = EncryptedFileManager(self.session, sd_identifier) + self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier) d = self.session.setup() - d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) + d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier)) d.addCallback(lambda _: self.lbry_file_manager.setup()) def verify_equal(sd_info): @@ -102,22 +100,14 @@ class TestStreamify(TestCase): return d def test_create_and_combine_stream(self): - wallet = FakeWallet() - peer_manager = PeerManager() - peer_finder = FakePeerFinder(5553, peer_manager, 2) - hash_announcer = FakeAnnouncer() - rate_limiter = DummyRateLimiter() - sd_identifier = StreamDescriptorIdentifier() self.session = Session( - conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, node_id="abcd", - peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=self.blob_dir, peer_port=5553, dht_node_class=mocks.Node, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, external_ip="127.0.0.1" + conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, node_id="abcd", peer_finder=self.peer_finder, + blob_dir=self.blob_dir, peer_port=5553, rate_limiter=self.rate_limiter, wallet=self.wallet, + external_ip="127.0.0.1", dht_node=self.dht_node ) - self.lbry_file_manager = EncryptedFileManager(self.session, sd_identifier) + self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier) @defer.inlineCallbacks def create_stream(): @@ -132,7 +122,7 @@ class TestStreamify(TestCase): self.assertEqual(hashsum.hexdigest(), "68959747edc73df45e45db6379dd7b3b") d = self.session.setup() - d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) + d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier)) d.addCallback(lambda _: self.lbry_file_manager.setup()) d.addCallback(lambda _: create_stream()) return d From 944b94aae8f3339651f1c10fcf3dfbcd96d8f1c8 Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:46:06 -0400 Subject: [PATCH 17/31] update logging tests --- .../unit/{core/test_log_support.py => test_customLogger.py} | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename lbrynet/tests/unit/{core/test_log_support.py => test_customLogger.py} (90%) diff --git a/lbrynet/tests/unit/core/test_log_support.py b/lbrynet/tests/unit/test_customLogger.py similarity index 90% rename from lbrynet/tests/unit/core/test_log_support.py rename to lbrynet/tests/unit/test_customLogger.py index 5f68c6272..8648b7068 100644 --- a/lbrynet/tests/unit/core/test_log_support.py +++ b/lbrynet/tests/unit/test_customLogger.py @@ -6,7 +6,7 @@ import unittest from twisted.internet import defer from twisted import trial -from lbrynet.core import log_support +from lbrynet import customLogger from lbrynet.tests.util import is_android @@ -22,7 +22,7 @@ class TestLogger(trial.unittest.TestCase): return d def setUp(self): - self.log = log_support.Logger('test') + self.log = customLogger.Logger('test') self.stream = StringIO.StringIO() handler = logging.StreamHandler(self.stream) handler.setFormatter(logging.Formatter("%(filename)s:%(lineno)d - %(message)s")) @@ -36,7 +36,7 @@ class TestLogger(trial.unittest.TestCase): return self.stream.getvalue().split('\n') # the line number could change if this file gets refactored - expected_first_line = 'test_log_support.py:20 - My message: terrible things happened' + expected_first_line = 'test_customLogger.py:20 - My message: terrible things happened' # testing the entirety of the message is futile as the # traceback will depend on the system the test is being run on From a741fdfc44cb8d1469c1a9c7da5340d6fa969109 Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:46:18 -0400 Subject: [PATCH 18/31] update daemon unit tests --- .../tests/unit/lbrynet_daemon/test_Daemon.py | 48 ++++++++++++------- 1 file changed, 30 insertions(+), 18 deletions(-) diff --git a/lbrynet/tests/unit/lbrynet_daemon/test_Daemon.py b/lbrynet/tests/unit/lbrynet_daemon/test_Daemon.py index d47c36ba2..8722611a5 100644 --- a/lbrynet/tests/unit/lbrynet_daemon/test_Daemon.py +++ b/lbrynet/tests/unit/lbrynet_daemon/test_Daemon.py @@ -1,11 +1,10 @@ import mock import json -import unittest import random from os import path from twisted.internet import defer -from twisted import trial +from twisted.trial import unittest from faker import Faker @@ -14,12 +13,15 @@ from lbryum.wallet import NewWallet from lbrynet import conf from lbrynet.core import Session, PaymentRateManager, Wallet from lbrynet.database.storage import SQLiteStorage +from lbrynet.daemon.ComponentManager import ComponentManager +from lbrynet.daemon.Components import DATABASE_COMPONENT, DHT_COMPONENT, WALLET_COMPONENT, STREAM_IDENTIFIER_COMPONENT +from lbrynet.daemon.Components import HASH_ANNOUNCER_COMPONENT, REFLECTOR_COMPONENT, UPNP_COMPONENT, SESSION_COMPONENT +from lbrynet.daemon.Components import PEER_PROTOCOL_SERVER_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT from lbrynet.daemon.Daemon import Daemon as LBRYDaemon -from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader from lbrynet.tests import util -from lbrynet.tests.mocks import mock_conf_settings, FakeNetwork +from lbrynet.tests.mocks import mock_conf_settings, FakeNetwork, FakeFileManager from lbrynet.tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker from lbrynet.tests.mocks import ExchangeRateManager as DummyExchangeRateManager from lbrynet.tests.mocks import BTCLBCFeed, USDBTCFeed @@ -40,10 +42,10 @@ def get_test_daemon(data_rate=None, generous=True, with_fee=False): } daemon = LBRYDaemon(None) daemon.session = mock.Mock(spec=Session.Session) - daemon.session.wallet = mock.Mock(spec=Wallet.LBRYumWallet) - daemon.session.wallet.wallet = mock.Mock(spec=NewWallet) - daemon.session.wallet.wallet.use_encryption = False - daemon.session.wallet.network = FakeNetwork() + daemon.wallet = mock.Mock(spec=Wallet.LBRYumWallet) + daemon.wallet.wallet = mock.Mock(spec=NewWallet) + daemon.wallet.wallet.use_encryption = False + daemon.wallet.network = FakeNetwork() daemon.session.storage = mock.Mock(spec=SQLiteStorage) market_feeds = [BTCLBCFeed(), USDBTCFeed()] daemon.exchange_rate_manager = DummyExchangeRateManager(market_feeds, rates) @@ -73,12 +75,12 @@ def get_test_daemon(data_rate=None, generous=True, with_fee=False): {"fee": {"USD": {"address": "bQ6BGboPV2SpTMEP7wLNiAcnsZiH8ye6eA", "amount": 0.75}}}) daemon._resolve_name = lambda _: defer.succeed(metadata) migrated = smart_decode(json.dumps(metadata)) - daemon.session.wallet.resolve = lambda *_: defer.succeed( + daemon.wallet.resolve = lambda *_: defer.succeed( {"test": {'claim': {'value': migrated.claim_dict}}}) return daemon -class TestCostEst(trial.unittest.TestCase): +class TestCostEst(unittest.TestCase): def setUp(self): mock_conf_settings(self) util.resetTime(self) @@ -111,7 +113,8 @@ class TestCostEst(trial.unittest.TestCase): self.assertEquals(daemon.get_est_cost("test", size).result, correct_result) -class TestJsonRpc(trial.unittest.TestCase): +class TestJsonRpc(unittest.TestCase): + def setUp(self): def noop(): return None @@ -119,30 +122,39 @@ class TestJsonRpc(trial.unittest.TestCase): mock_conf_settings(self) util.resetTime(self) self.test_daemon = get_test_daemon() - self.test_daemon.session.wallet.is_first_run = False - self.test_daemon.session.wallet.get_best_blockhash = noop + self.test_daemon.wallet.is_first_run = False + self.test_daemon.wallet.get_best_blockhash = noop def test_status(self): d = defer.maybeDeferred(self.test_daemon.jsonrpc_status) d.addCallback(lambda status: self.assertDictContainsSubset({'is_running': False}, status)) - @unittest.skipIf(is_android(), - 'Test cannot pass on Android because PYTHONOPTIMIZE removes the docstrings.') def test_help(self): d = defer.maybeDeferred(self.test_daemon.jsonrpc_help, command='status') d.addCallback(lambda result: self.assertSubstring('daemon status', result['help'])) # self.assertSubstring('daemon status', d.result) + if is_android(): + test_help.skip = "Test cannot pass on Android because PYTHONOPTIMIZE removes the docstrings." -class TestFileListSorting(trial.unittest.TestCase): + +class TestFileListSorting(unittest.TestCase): def setUp(self): mock_conf_settings(self) util.resetTime(self) self.faker = Faker('en_US') self.faker.seed(66410) self.test_daemon = get_test_daemon() - self.test_daemon.lbry_file_manager = mock.Mock(spec=EncryptedFileManager) - self.test_daemon.lbry_file_manager.lbry_files = self._get_fake_lbry_files() + component_manager = ComponentManager( + skip_components=[DATABASE_COMPONENT, DHT_COMPONENT, WALLET_COMPONENT, SESSION_COMPONENT, UPNP_COMPONENT, + PEER_PROTOCOL_SERVER_COMPONENT, REFLECTOR_COMPONENT, HASH_ANNOUNCER_COMPONENT, + STREAM_IDENTIFIER_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT], + file_manager=FakeFileManager + ) + component_manager.setup() + self.test_daemon.component_manager = component_manager + self.test_daemon.file_manager = component_manager.get_component("file_manager") + self.test_daemon.file_manager.lbry_files = self._get_fake_lbry_files() # Pre-sorted lists of prices and file names in ascending order produced by # faker with seed 66410. This seed was chosen becacuse it produces 3 results From 005a8b3008678eb488ddb875e82356f9caf2cda2 Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:48:43 -0400 Subject: [PATCH 19/31] allow utf-8 characters in Daemon.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit -amit really wants the shrug emoji ¯\_(ツ)_/¯ --- lbrynet/daemon/Daemon.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index f8a640937..4f4fc2090 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -1,3 +1,4 @@ +# coding=utf-8 import binascii import logging.handlers import mimetypes From 55d3bb0ec333290eba0b4d0f3898fc0bf1610130 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Fri, 20 Jul 2018 14:45:44 -0400 Subject: [PATCH 20/31] remove auto_renew --- lbrynet/daemon/Daemon.py | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 4f4fc2090..623bef92f 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -161,7 +161,6 @@ class Daemon(AuthJSONRPCServer): self.disable_max_key_fee = conf.settings['disable_max_key_fee'] self.download_timeout = conf.settings['download_timeout'] self.delete_blobs_on_remove = conf.settings['delete_blobs_on_remove'] - self.auto_renew_claim_height_delta = conf.settings['auto_renew_claim_height_delta'] self.connected_to_internet = True self.connection_status_code = None @@ -222,8 +221,6 @@ class Daemon(AuthJSONRPCServer): self.announced_startup = True log.info("Started lbrynet-daemon") - self._auto_renew() - def _get_platform(self): if self.platform is None: self.platform = system_info.get_platform() @@ -246,26 +243,6 @@ class Daemon(AuthJSONRPCServer): if not self.connected_to_internet: self.connection_status_code = CONNECTION_STATUS_NETWORK - @defer.inlineCallbacks - def _auto_renew(self): - # automatically renew claims - # auto renew is turned off if 0 or some negative number - if self.auto_renew_claim_height_delta < 1: - defer.returnValue(None) - if not self.wallet.network.get_remote_height(): - log.warning("Failed to get remote height, aborting auto renew") - defer.returnValue(None) - log.debug("Renewing claim") - h = self.wallet.network.get_remote_height() + self.auto_renew_claim_height_delta - results = yield self.wallet.claim_renew_all_before_expiration(h) - for outpoint, result in results.iteritems(): - if result['success']: - log.info("Renewed claim at outpoint:%s claim ID:%s, paid fee:%s", - outpoint, result['claim_id'], result['fee']) - else: - log.info("Failed to renew claim at outpoint:%s, reason:%s", - outpoint, result['reason']) - @staticmethod def _already_shutting_down(sig_num, frame): log.info("Already shutting down") From defe9506bbf177ae49a9a07b427453bd7c953ff8 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Fri, 20 Jul 2018 14:48:31 -0400 Subject: [PATCH 21/31] set daemon attribute for each component as they start --- lbrynet/daemon/Component.py | 1 + lbrynet/daemon/ComponentManager.py | 2 +- lbrynet/daemon/Daemon.py | 27 +++++++++++++++++++-------- lbrynet/daemon/auth/server.py | 2 +- 4 files changed, 22 insertions(+), 10 deletions(-) diff --git a/lbrynet/daemon/Component.py b/lbrynet/daemon/Component.py index e7877c47f..e2f18c039 100644 --- a/lbrynet/daemon/Component.py +++ b/lbrynet/daemon/Component.py @@ -42,6 +42,7 @@ class Component(object): def stop(self): raise NotImplementedError() + @property def component(self): raise NotImplementedError() diff --git a/lbrynet/daemon/ComponentManager.py b/lbrynet/daemon/ComponentManager.py index 3541339dc..17dcbcb57 100644 --- a/lbrynet/daemon/ComponentManager.py +++ b/lbrynet/daemon/ComponentManager.py @@ -88,7 +88,7 @@ class ComponentManager(object): def _setup(component): if component.component_name in callbacks: d = component._setup() - d.addCallback(callbacks[component.component_name]) + d.addCallback(callbacks[component.component_name], component) return d return component._setup() diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 623bef92f..61e7bc1cc 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -153,6 +153,16 @@ class Daemon(AuthJSONRPCServer): LBRYnet daemon, a jsonrpc interface to lbry functions """ + component_attributes = { + EXCHANGE_RATE_MANAGER_COMPONENT: "exchange_rate_manager", + DATABASE_COMPONENT: "storage", + SESSION_COMPONENT: "session", + WALLET_COMPONENT: "wallet", + DHT_COMPONENT: "dht_node", + STREAM_IDENTIFIER_COMPONENT: "sd_identifier", + FILE_MANAGER_COMPONENT: "file_manager", + } + def __init__(self, analytics_manager, component_manager=None): AuthJSONRPCServer.__init__(self, conf.settings['use_auth_http']) self.download_directory = conf.settings['download_directory'] @@ -208,15 +218,16 @@ class Daemon(AuthJSONRPCServer): self.looping_call_manager.start(Checker.CONNECTION_STATUS, 30) yield self._initial_setup() - yield self.component_manager.setup() - self.exchange_rate_manager = self.component_manager.get_component(EXCHANGE_RATE_MANAGER_COMPONENT) - self.storage = self.component_manager.get_component(DATABASE_COMPONENT) - self.session = self.component_manager.get_component(SESSION_COMPONENT) - self.wallet = self.component_manager.get_component(WALLET_COMPONENT) - self.dht_node = self.component_manager.get_component(DHT_COMPONENT) yield self._start_analytics() - self.sd_identifier = self.component_manager.get_component(STREAM_IDENTIFIER_COMPONENT) - self.file_manager = self.component_manager.get_component(FILE_MANAGER_COMPONENT) + + def update_attr(component_setup_result, component): + setattr(self, self.component_attributes[component.component_name], component.component) + + setup_callbacks = { + component_name: update_attr for component_name in self.component_attributes.keys() + } + + yield self.component_manager.setup(**setup_callbacks) log.info("Starting balance: " + str(self.wallet.get_balance())) self.announced_startup = True log.info("Started lbrynet-daemon") diff --git a/lbrynet/daemon/auth/server.py b/lbrynet/daemon/auth/server.py index 72d7e7b6b..af2461839 100644 --- a/lbrynet/daemon/auth/server.py +++ b/lbrynet/daemon/auth/server.py @@ -164,7 +164,7 @@ class AuthorizedBase(object): result = yield fn(*args, **kwargs) defer.returnValue(result) else: - raise ComponentsNotStarted("Not all required components are set up:", components) + raise ComponentsNotStarted("Not all required components are set up: %s" % json.dumps(components)) return _inner return _wrap From c3120e93cf84c80d489af94ee64ce216de5a97bb Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Fri, 20 Jul 2018 15:35:09 -0400 Subject: [PATCH 22/31] delete unneeded daemon attributes --- lbrynet/daemon/Daemon.py | 126 +++++++++++---------------------------- 1 file changed, 35 insertions(+), 91 deletions(-) diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 61e7bc1cc..ce832de28 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -153,38 +153,24 @@ class Daemon(AuthJSONRPCServer): LBRYnet daemon, a jsonrpc interface to lbry functions """ - component_attributes = { - EXCHANGE_RATE_MANAGER_COMPONENT: "exchange_rate_manager", - DATABASE_COMPONENT: "storage", - SESSION_COMPONENT: "session", - WALLET_COMPONENT: "wallet", - DHT_COMPONENT: "dht_node", - STREAM_IDENTIFIER_COMPONENT: "sd_identifier", - FILE_MANAGER_COMPONENT: "file_manager", - } - def __init__(self, analytics_manager, component_manager=None): AuthJSONRPCServer.__init__(self, conf.settings['use_auth_http']) - self.download_directory = conf.settings['download_directory'] - self.data_rate = conf.settings['data_rate'] - self.max_key_fee = conf.settings['max_key_fee'] - self.disable_max_key_fee = conf.settings['disable_max_key_fee'] - self.download_timeout = conf.settings['download_timeout'] - self.delete_blobs_on_remove = conf.settings['delete_blobs_on_remove'] + self.analytics_manager = analytics_manager + self.looping_call_manager = LoopingCallManager({ + Checker.INTERNET_CONNECTION: LoopingCall(CheckInternetConnection(self)), + Checker.CONNECTION_STATUS: LoopingCall(self._update_connection_status), + }) + self.component_manager = component_manager or ComponentManager( + analytics_manager=self.analytics_manager, + skip_components=conf.settings['components_to_skip'] + ) + # TODO: move this to a component self.connected_to_internet = True self.connection_status_code = None - self.platform = None - self.db_revision_file = conf.settings.get_db_revision_filename() - self._session_id = conf.settings.get_session_id() - # TODO: this should probably be passed into the daemon, or - # possibly have the entire log upload functionality taken out - # of the daemon, but I don't want to deal with that now - - self.analytics_manager = analytics_manager - self.node_id = conf.settings.node_id # components + # TODO: delete these, get the components where needed self.storage = None self.dht_node = None self.wallet = None @@ -193,58 +179,35 @@ class Daemon(AuthJSONRPCServer): self.file_manager = None self.exchange_rate_manager = None - self.wallet_user = None - self.wallet_password = None - self.waiting_on = {} + # TODO: delete this self.streams = {} - calls = { - Checker.INTERNET_CONNECTION: LoopingCall(CheckInternetConnection(self)), - Checker.CONNECTION_STATUS: LoopingCall(self._update_connection_status), - } - self.looping_call_manager = LoopingCallManager(calls) - self.component_manager = component_manager or ComponentManager( - analytics_manager=self.analytics_manager, - skip_components=conf.settings['components_to_skip'] - ) @defer.inlineCallbacks def setup(self): reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) configure_loggly_handler() - - log.info("Starting lbrynet-daemon") - + if not self.analytics_manager.is_started: + self.analytics_manager.start() self.looping_call_manager.start(Checker.INTERNET_CONNECTION, 3600) self.looping_call_manager.start(Checker.CONNECTION_STATUS, 30) - yield self._initial_setup() - yield self._start_analytics() - - def update_attr(component_setup_result, component): - setattr(self, self.component_attributes[component.component_name], component.component) - - setup_callbacks = { - component_name: update_attr for component_name in self.component_attributes.keys() + components = { + EXCHANGE_RATE_MANAGER_COMPONENT: "exchange_rate_manager", + DATABASE_COMPONENT: "storage", + SESSION_COMPONENT: "session", + WALLET_COMPONENT: "wallet", + DHT_COMPONENT: "dht_node", + STREAM_IDENTIFIER_COMPONENT: "sd_identifier", + FILE_MANAGER_COMPONENT: "file_manager", } - yield self.component_manager.setup(**setup_callbacks) - log.info("Starting balance: " + str(self.wallet.get_balance())) - self.announced_startup = True + log.info("Starting lbrynet-daemon") + log.info("Platform: %s", json.dumps(system_info.get_platform())) + yield self.component_manager.setup(**{n: lambda _, c: setattr(self, components[c.component_name], c.component) + for n in components.keys()}) + log.info("Started lbrynet-daemon") - def _get_platform(self): - if self.platform is None: - self.platform = system_info.get_platform() - return self.platform - - def _initial_setup(self): - def _log_platform(): - log.info("Platform: %s", json.dumps(self._get_platform())) - return defer.succeed(None) - - d = _log_platform() - return d - def _check_network_connection(self): self.connected_to_internet = utils.check_connection() @@ -280,10 +243,6 @@ class Daemon(AuthJSONRPCServer): d.addErrback(log.fail(), 'Failure while shutting down') return d - def _start_analytics(self): - if not self.analytics_manager.is_started: - self.analytics_manager.start() - def _download_blob(self, blob_hash, rate_manager=None, timeout=None): """ Download a blob @@ -365,8 +324,8 @@ class Daemon(AuthJSONRPCServer): self.analytics_manager.send_download_started(download_id, name, claim_dict) self.streams[sd_hash] = GetStream(self.sd_identifier, self.session, - self.exchange_rate_manager, self.max_key_fee, - self.disable_max_key_fee, + self.exchange_rate_manager, conf.settings['max_key_fee'], + conf.settings['disable_max_key_fee'], conf.settings['data_rate'], timeout) try: lbry_file, finished_deferred = yield self.streams[sd_hash].start( @@ -432,17 +391,9 @@ class Daemon(AuthJSONRPCServer): def _get_or_download_sd_blob(self, blob, sd_hash): if blob: return self.session.blob_manager.get_blob(blob[0]) - - def _check_est(downloader): - if downloader.result is not None: - downloader.cancel() - - d = defer.succeed(None) - reactor.callLater(conf.settings['search_timeout'], _check_est, d) - d.addCallback( - lambda _: download_sd_blob( - self.session, sd_hash, self.session.payment_rate_manager)) - return d + return download_sd_blob( + self.session, sd_hash, self.session.payment_rate_manager, conf.settings['search_timeout'] + ) def get_or_download_sd_blob(self, sd_hash): """Return previously downloaded sd blob if already in the blob @@ -815,7 +766,7 @@ class Daemon(AuthJSONRPCServer): } """ - platform_info = self._get_platform() + platform_info = system_info.get_platform() log.info("Get version info: " + json.dumps(platform_info)) return self._render_response(platform_info) @@ -834,7 +785,7 @@ class Daemon(AuthJSONRPCServer): (bool) true if successful """ - platform_name = self._get_platform()['platform'] + platform_name = system_info.get_platform()['platform'] report_bug_to_slack( message, conf.settings.installation_id, @@ -944,13 +895,6 @@ class Daemon(AuthJSONRPCServer): conf.settings.update({key: converted}, data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED)) conf.settings.save_conf_file_settings() - - self.data_rate = conf.settings['data_rate'] - self.max_key_fee = conf.settings['max_key_fee'] - self.disable_max_key_fee = conf.settings['disable_max_key_fee'] - self.download_directory = conf.settings['download_directory'] - self.download_timeout = conf.settings['download_timeout'] - return self._render_response(conf.settings.get_adjustable_settings_dict()) def jsonrpc_help(self, command=None): @@ -1392,7 +1336,7 @@ class Daemon(AuthJSONRPCServer): } """ - timeout = timeout if timeout is not None else self.download_timeout + timeout = timeout if timeout is not None else conf.settings['download_timeout'] parsed_uri = parse_lbry_uri(uri) if parsed_uri.is_channel and not parsed_uri.path: From a89306b6bfc0ea0461eb427cfbcbe0c8f22fc617 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Fri, 20 Jul 2018 16:46:15 -0400 Subject: [PATCH 23/31] reorganize daemon startup -fix loggly not using the share usage setting -delete more --- lbrynet/core/log_support.py | 2 + lbrynet/daemon/Daemon.py | 33 ++------ lbrynet/daemon/DaemonConsole.py | 14 +--- lbrynet/daemon/DaemonControl.py | 29 ++----- lbrynet/daemon/DaemonServer.py | 77 ------------------- lbrynet/daemon/auth/factory.py | 38 +++++++++ lbrynet/daemon/auth/server.py | 53 ++++++++----- .../unit/lbrynet_daemon/auth/test_server.py | 2 +- 8 files changed, 87 insertions(+), 161 deletions(-) delete mode 100644 lbrynet/daemon/DaemonServer.py create mode 100644 lbrynet/daemon/auth/factory.py diff --git a/lbrynet/core/log_support.py b/lbrynet/core/log_support.py index a623c8b81..add93ea84 100644 --- a/lbrynet/core/log_support.py +++ b/lbrynet/core/log_support.py @@ -118,6 +118,8 @@ def get_loggly_url(token=None, version=None): def configure_loggly_handler(): if build_type.BUILD == 'dev': return + if not conf.settings['share_usage_data']: + return level = logging.ERROR handler = get_loggly_handler(level=level, installation_id=conf.settings.installation_id, session_id=conf.settings.get_session_id()) diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index ce832de28..11a6d91fc 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -3,7 +3,6 @@ import binascii import logging.handlers import mimetypes import os -import base58 import requests import urllib import json @@ -27,7 +26,6 @@ from lbryschema.decode import smart_decode from lbrynet.core.system_info import get_lbrynet_version from lbrynet import conf from lbrynet.reflector import reupload -from lbrynet.core.log_support import configure_loggly_handler from lbrynet.daemon.Component import ComponentManager from lbrynet.daemon.Components import WALLET_COMPONENT, DATABASE_COMPONENT, SESSION_COMPONENT, DHT_COMPONENT from lbrynet.daemon.Components import STREAM_IDENTIFIER_COMPONENT, FILE_MANAGER_COMPONENT @@ -78,6 +76,7 @@ DIRECTION_ASCENDING = 'asc' DIRECTION_DESCENDING = 'desc' DIRECTIONS = DIRECTION_ASCENDING, DIRECTION_DESCENDING + class IterableContainer(object): def __iter__(self): for attr in dir(self): @@ -153,12 +152,10 @@ class Daemon(AuthJSONRPCServer): LBRYnet daemon, a jsonrpc interface to lbry functions """ - def __init__(self, analytics_manager, component_manager=None): - AuthJSONRPCServer.__init__(self, conf.settings['use_auth_http']) - self.analytics_manager = analytics_manager + def __init__(self, analytics_manager=None, component_manager=None): + AuthJSONRPCServer.__init__(self, analytics_manager, conf.settings['use_auth_http']) self.looping_call_manager = LoopingCallManager({ Checker.INTERNET_CONNECTION: LoopingCall(CheckInternetConnection(self)), - Checker.CONNECTION_STATUS: LoopingCall(self._update_connection_status), }) self.component_manager = component_manager or ComponentManager( analytics_manager=self.analytics_manager, @@ -185,11 +182,9 @@ class Daemon(AuthJSONRPCServer): @defer.inlineCallbacks def setup(self): reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) - configure_loggly_handler() if not self.analytics_manager.is_started: self.analytics_manager.start() self.looping_call_manager.start(Checker.INTERNET_CONNECTION, 3600) - self.looping_call_manager.start(Checker.CONNECTION_STATUS, 30) components = { EXCHANGE_RATE_MANAGER_COMPONENT: "exchange_rate_manager", @@ -205,18 +200,8 @@ class Daemon(AuthJSONRPCServer): log.info("Platform: %s", json.dumps(system_info.get_platform())) yield self.component_manager.setup(**{n: lambda _, c: setattr(self, components[c.component_name], c.component) for n in components.keys()}) - log.info("Started lbrynet-daemon") - def _check_network_connection(self): - self.connected_to_internet = utils.check_connection() - - def _update_connection_status(self): - self.connection_status_code = CONNECTION_STATUS_CONNECTED - - if not self.connected_to_internet: - self.connection_status_code = CONNECTION_STATUS_NETWORK - @staticmethod def _already_shutting_down(sig_num, frame): log.info("Already shutting down") @@ -603,7 +588,6 @@ class Daemon(AuthJSONRPCServer): direction = pieces[0] return field, direction - def _get_single_peer_downloader(self): downloader = SinglePeerDownloader() downloader.setup(self.wallet) @@ -706,19 +690,16 @@ class Daemon(AuthJSONRPCServer): wallet_is_encrypted = has_wallet and self.wallet.wallet and \ self.wallet.wallet.use_encryption + connection_code = CONNECTION_STATUS_CONNECTED if utils.check_connection() else CONNECTION_STATUS_NETWORK response = { 'lbry_id': base58.b58encode(self.node_id), 'installation_id': conf.settings.installation_id, - 'is_running': self.announced_startup, + 'is_running': all(self.component_manager.get_components_status().values()), 'is_first_run': self.wallet.is_first_run if has_wallet else None, 'startup_status': self.component_manager.get_components_status(), 'connection_status': { - 'code': self.connection_status_code, - 'message': ( - CONNECTION_MESSAGES[self.connection_status_code] - if self.connection_status_code is not None - else '' - ), + 'code': connection_code, + 'message': CONNECTION_MESSAGES[connection_code], }, 'wallet_is_encrypted': wallet_is_encrypted, 'blocks_behind': remote_height - local_height, # deprecated. remove from UI, then here diff --git a/lbrynet/daemon/DaemonConsole.py b/lbrynet/daemon/DaemonConsole.py index 6210dfc0e..65442e751 100644 --- a/lbrynet/daemon/DaemonConsole.py +++ b/lbrynet/daemon/DaemonConsole.py @@ -10,7 +10,6 @@ from lbrynet import analytics from lbrynet import conf from lbrynet.core import utils from lbrynet.core import log_support -from lbrynet.daemon.DaemonServer import DaemonServer from lbrynet.daemon.auth.client import LBRYAPIClient from lbrynet.daemon.Daemon import Daemon @@ -175,18 +174,7 @@ def start_server_and_listen(use_auth, analytics_manager, quiet): logging.getLogger("requests").setLevel(logging.CRITICAL) analytics_manager.send_server_startup() - daemon_server = DaemonServer(analytics_manager) - try: - yield daemon_server.start(use_auth) - analytics_manager.send_server_startup_success() - if not quiet: - print "Started lbrynet-daemon!" - defer.returnValue(True) - except Exception as e: - log.exception('Failed to start lbrynet-daemon') - analytics_manager.send_server_startup_error(str(e)) - daemon_server.stop() - raise + yield Daemon().start_listening() def threaded_terminal(started_daemon, quiet): diff --git a/lbrynet/daemon/DaemonControl.py b/lbrynet/daemon/DaemonControl.py index 8d73c9ce0..8db0511b9 100644 --- a/lbrynet/daemon/DaemonControl.py +++ b/lbrynet/daemon/DaemonControl.py @@ -12,13 +12,12 @@ from lbrynet.core import log_support import argparse import logging.handlers -from twisted.internet import defer, reactor +from twisted.internet import reactor from jsonrpc.proxy import JSONRPCProxy -from lbrynet import analytics from lbrynet import conf from lbrynet.core import utils, system_info -from lbrynet.daemon.DaemonServer import DaemonServer +from lbrynet.daemon.Daemon import Daemon log = logging.getLogger(__name__) @@ -71,6 +70,7 @@ def start(): lbrynet_log = conf.settings.get_log_filename() log_support.configure_logging(lbrynet_log, not args.quiet, args.verbose) + log_support.configure_loggly_handler() log.debug('Final Settings: %s', conf.settings.get_current_settings_dict()) try: @@ -84,8 +84,8 @@ def start(): log.info("Starting lbrynet-daemon from command line") if test_internet_connection(): - analytics_manager = analytics.Manager.new_instance() - start_server_and_listen(analytics_manager) + daemon = Daemon() + daemon.start_listening() reactor.run() else: log.info("Not connected to internet, unable to start") @@ -101,24 +101,5 @@ def update_settings_from_args(args): }, data_types=(conf.TYPE_CLI,)) - -@defer.inlineCallbacks -def start_server_and_listen(analytics_manager): - """ - Args: - use_auth: set to true to enable http authentication - analytics_manager: to send analytics - """ - analytics_manager.send_server_startup() - daemon_server = DaemonServer(analytics_manager) - try: - yield daemon_server.start(conf.settings['use_auth_http']) - analytics_manager.send_server_startup_success() - except Exception as e: - log.exception('Failed to start lbrynet-daemon') - analytics_manager.send_server_startup_error(str(e)) - daemon_server.stop() - - if __name__ == "__main__": start() diff --git a/lbrynet/daemon/DaemonServer.py b/lbrynet/daemon/DaemonServer.py deleted file mode 100644 index e8c00606b..000000000 --- a/lbrynet/daemon/DaemonServer.py +++ /dev/null @@ -1,77 +0,0 @@ -import logging -import os - -from twisted.web import server, guard, resource -from twisted.internet import defer, reactor, error -from twisted.cred import portal - -from lbrynet import conf -from lbrynet.daemon.Daemon import Daemon -from lbrynet.daemon.auth.auth import PasswordChecker, HttpPasswordRealm -from lbrynet.daemon.auth.util import initialize_api_key_file - -log = logging.getLogger(__name__) - - -class IndexResource(resource.Resource): - def getChild(self, name, request): - request.setHeader('cache-control', 'no-cache, no-store, must-revalidate') - request.setHeader('expires', '0') - return self if name == '' else resource.Resource.getChild(self, name, request) - - -class DaemonServer(object): - def __init__(self, analytics_manager=None): - self._daemon = None - self.root = None - self.server_port = None - self.analytics_manager = analytics_manager - - def _setup_server(self, use_auth): - self.root = IndexResource() - self._daemon = Daemon(self.analytics_manager) - self.root.putChild("", self._daemon) - # TODO: DEPRECATED, remove this and just serve the API at the root - self.root.putChild(conf.settings['API_ADDRESS'], self._daemon) - - lbrynet_server = get_site_base(use_auth, self.root) - - try: - self.server_port = reactor.listenTCP( - conf.settings['api_port'], lbrynet_server, interface=conf.settings['api_host']) - log.info("lbrynet API listening on TCP %s:%i", conf.settings['api_host'], conf.settings['api_port']) - except error.CannotListenError: - log.info('Daemon already running, exiting app') - raise - - return defer.succeed(True) - - @defer.inlineCallbacks - def start(self, use_auth): - yield self._setup_server(use_auth) - yield self._daemon.setup() - - def stop(self): - if reactor.running: - log.info("Stopping the reactor") - reactor.fireSystemEvent("shutdown") - - -def get_site_base(use_auth, root): - if use_auth: - log.info("Using authenticated API") - root = create_auth_session(root) - else: - log.info("Using non-authenticated API") - return server.Site(root) - - -def create_auth_session(root): - pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") - initialize_api_key_file(pw_path) - checker = PasswordChecker.load_file(pw_path) - realm = HttpPasswordRealm(root) - portal_to_realm = portal.Portal(realm, [checker, ]) - factory = guard.BasicCredentialFactory('Login to lbrynet api') - _lbrynet_server = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) - return _lbrynet_server diff --git a/lbrynet/daemon/auth/factory.py b/lbrynet/daemon/auth/factory.py new file mode 100644 index 000000000..fed157cc0 --- /dev/null +++ b/lbrynet/daemon/auth/factory.py @@ -0,0 +1,38 @@ +import logging +import os + +from twisted.web import server, guard, resource +from twisted.cred import portal + +from lbrynet import conf +from .auth import PasswordChecker, HttpPasswordRealm +from .util import initialize_api_key_file + +log = logging.getLogger(__name__) + + +class AuthJSONRPCResource(resource.Resource): + def __init__(self, protocol): + resource.Resource.__init__(self) + self.putChild("", protocol) + self.putChild(conf.settings['API_ADDRESS'], protocol) + + def getChild(self, name, request): + request.setHeader('cache-control', 'no-cache, no-store, must-revalidate') + request.setHeader('expires', '0') + return self if name == '' else resource.Resource.getChild(self, name, request) + + def getServerFactory(self): + if conf.settings['use_auth_http']: + log.info("Using authenticated API") + pw_path = os.path.join(conf.settings['data_dir'], ".api_keys") + initialize_api_key_file(pw_path) + checker = PasswordChecker.load_file(pw_path) + realm = HttpPasswordRealm(self) + portal_to_realm = portal.Portal(realm, [checker, ]) + factory = guard.BasicCredentialFactory('Login to lbrynet api') + root = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) + else: + log.info("Using non-authenticated API") + root = self + return server.Site(root) diff --git a/lbrynet/daemon/auth/server.py b/lbrynet/daemon/auth/server.py index af2461839..f71a1826c 100644 --- a/lbrynet/daemon/auth/server.py +++ b/lbrynet/daemon/auth/server.py @@ -13,14 +13,14 @@ from twisted.internet.error import ConnectionDone, ConnectionLost from txjsonrpc import jsonrpclib from traceback import format_exc -from lbrynet import conf +from lbrynet import conf, analytics from lbrynet.core.Error import InvalidAuthenticationToken from lbrynet.core import utils from lbrynet.core.Error import ComponentsNotStarted, ComponentStartConditionNotMet -from lbrynet.daemon.auth.util import APIKey, get_auth_message -from lbrynet.daemon.auth.client import LBRY_SECRET from lbrynet.undecorated import undecorated - +from .util import APIKey, get_auth_message +from .client import LBRY_SECRET +from .factory import AuthJSONRPCResource log = logging.getLogger(__name__) EMPTY_PARAMS = [{}] @@ -93,10 +93,6 @@ class UnknownAPIMethodError(Exception): pass -class NotAllowedDuringStartupError(Exception): - pass - - def trap(err, *to_trap): err.trap(*to_trap) @@ -197,13 +193,37 @@ class AuthJSONRPCServer(AuthorizedBase): isLeaf = True allowed_during_startup = [] - def __init__(self, use_authentication=None): + def __init__(self, analytics_manager, use_authentication=None): + self.analytics_manager = analytics_manager or analytics.Manager.new_instance() self._use_authentication = use_authentication or conf.settings['use_auth_http'] self.announced_startup = False self.sessions = {} + @defer.inlineCallbacks + def start_listening(self): + from twisted.internet import reactor, error as tx_error + + try: + reactor.listenTCP( + conf.settings['api_port'], self.get_server_factory(), interface=conf.settings['api_host'] + ) + log.info("lbrynet API listening on TCP %s:%i", conf.settings['api_host'], conf.settings['api_port']) + yield self.setup() + self.analytics_manager.send_server_startup_success() + except tx_error.CannotListenError: + log.error('lbrynet API failed to bind TCP %s:%i for listening', conf.settings['api_host'], + conf.settings['api_port']) + reactor.fireSystemEvent("shutdown") + except Exception as err: + self.analytics_manager.send_server_startup_error(str(err)) + log.exception('Failed to start lbrynet-daemon') + reactor.fireSystemEvent("shutdown") + def setup(self): - return NotImplementedError() + raise NotImplementedError() + + def get_server_factory(self): + return AuthJSONRPCResource(self).getServerFactory() def _set_headers(self, request, data, update_secret=False): if conf.settings['allowed_origin']: @@ -233,8 +253,9 @@ class AuthJSONRPCServer(AuthorizedBase): else: # last resort, just cast it as a string error = JSONRPCError(str(failure)) - log.warning("error processing api request: %s\ntraceback: %s", error.message, - "\n".join(error.traceback)) + if not failure.check(ComponentsNotStarted, ComponentStartConditionNotMet): + log.warning("error processing api request: %s\ntraceback: %s", error.message, + "\n".join(error.traceback)) response_content = jsonrpc_dumps_pretty(error, id=id_) self._set_headers(request, response_content) request.setResponseCode(200) @@ -330,14 +351,6 @@ class AuthJSONRPCServer(AuthorizedBase): request, request_id ) return server.NOT_DONE_YET - except NotAllowedDuringStartupError: - log.warning('Function not allowed during startup: %s', function_name) - self._render_error( - JSONRPCError("This method is unavailable until the daemon is fully started", - code=JSONRPCError.CODE_INVALID_REQUEST), - request, request_id - ) - return server.NOT_DONE_YET if args == EMPTY_PARAMS or args == []: _args, _kwargs = (), {} diff --git a/lbrynet/tests/unit/lbrynet_daemon/auth/test_server.py b/lbrynet/tests/unit/lbrynet_daemon/auth/test_server.py index 80fa4aa7c..bd1d5399e 100644 --- a/lbrynet/tests/unit/lbrynet_daemon/auth/test_server.py +++ b/lbrynet/tests/unit/lbrynet_daemon/auth/test_server.py @@ -11,7 +11,7 @@ class AuthJSONRPCServerTest(unittest.TestCase): # onto it. def setUp(self): conf.initialize_settings(False) - self.server = server.AuthJSONRPCServer(use_authentication=False) + self.server = server.AuthJSONRPCServer(True, use_authentication=False) def test_get_server_port(self): self.assertSequenceEqual( From a800f6ddf0a377918ea1b876bd8bdab25734a7c4 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Fri, 20 Jul 2018 17:22:10 -0400 Subject: [PATCH 24/31] update status command --- lbrynet/daemon/Daemon.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 11a6d91fc..1432bc45f 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -654,8 +654,7 @@ class Daemon(AuthJSONRPCServer): Returns: (dict) lbrynet-daemon status { - 'lbry_id': lbry peer id, base58, - 'installation_id': installation id, base58, + 'installation_id': installation id - base58, 'is_running': bool, 'is_first_run': bool, 'startup_status': { @@ -670,8 +669,11 @@ class Daemon(AuthJSONRPCServer): 'blocks_behind': remote_height - local_height, 'best_blockhash': block hash of most recent block, }, + 'dht_node_status': { + 'node_id': (str) lbry dht node id - hex encoded, + 'peers_in_routing_table': (int) the number of peers in the routing table, + }, 'wallet_is_encrypted': bool, - If given the session status option: 'session_status': { 'managed_blobs': count of blobs in the blob manager, @@ -692,7 +694,6 @@ class Daemon(AuthJSONRPCServer): connection_code = CONNECTION_STATUS_CONNECTED if utils.check_connection() else CONNECTION_STATUS_NETWORK response = { - 'lbry_id': base58.b58encode(self.node_id), 'installation_id': conf.settings.installation_id, 'is_running': all(self.component_manager.get_components_status().values()), 'is_first_run': self.wallet.is_first_run if has_wallet else None, @@ -707,6 +708,11 @@ class Daemon(AuthJSONRPCServer): 'blocks': local_height, 'blocks_behind': remote_height - local_height, 'best_blockhash': best_hash, + }, + 'dht_node_status': { + 'node_id': conf.settings.node_id.encode('hex'), + 'peers_in_routing_table': 0 if not self.component_manager.all_components_running("dht") else + len(self.dht_node.contacts) } } if session_status: From b06dcf0a0d91e4713a8c14d411357bd768f13c70 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Mon, 23 Jul 2018 16:13:56 -0400 Subject: [PATCH 25/31] cancel starting components if the reactor is stopped before startup has finished -don't block starting the dht component on having found enough peers, only on setting up the protocol --- lbrynet/daemon/Component.py | 5 +++++ lbrynet/daemon/Components.py | 9 +++++++-- lbrynet/daemon/Daemon.py | 11 +++++++++-- lbrynet/daemon/auth/server.py | 3 +++ lbrynet/dht/node.py | 2 ++ 5 files changed, 26 insertions(+), 4 deletions(-) diff --git a/lbrynet/daemon/Component.py b/lbrynet/daemon/Component.py index e2f18c039..8909df65e 100644 --- a/lbrynet/daemon/Component.py +++ b/lbrynet/daemon/Component.py @@ -1,5 +1,6 @@ import logging from twisted.internet import defer +from twisted._threads import AlreadyQuit from ComponentManager import ComponentManager log = logging.getLogger(__name__) @@ -52,6 +53,8 @@ class Component(object): result = yield defer.maybeDeferred(self.start) self._running = True defer.returnValue(result) + except (defer.CancelledError, AlreadyQuit): + pass except Exception as err: log.exception("Error setting up %s", self.component_name or self.__class__.__name__) raise err @@ -62,6 +65,8 @@ class Component(object): result = yield defer.maybeDeferred(self.stop) self._running = False defer.returnValue(result) + except (defer.CancelledError, AlreadyQuit): + pass except Exception as err: log.exception("Error stopping %s", self.__class__.__name__) raise err diff --git a/lbrynet/daemon/Components.py b/lbrynet/daemon/Components.py index 5f328523e..acc216567 100644 --- a/lbrynet/daemon/Components.py +++ b/lbrynet/daemon/Components.py @@ -248,8 +248,13 @@ class DHTComponent(Component): externalIP=CS.get_external_ip(), peerPort=self.peer_port ) - yield self.dht_node.start(GCS('known_dht_nodes')) - log.info("Joined the dht") + + self.dht_node.start_listening() + yield self.dht_node._protocol._listening + d = self.dht_node.joinNetwork(GCS('known_dht_nodes')) + d.addCallback(lambda _: self.dht_node.start_looping_calls()) + d.addCallback(lambda _: log.info("Joined the dht")) + log.info("Started the dht") @defer.inlineCallbacks def stop(self): diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 1432bc45f..0eec79093 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -161,6 +161,7 @@ class Daemon(AuthJSONRPCServer): analytics_manager=self.analytics_manager, skip_components=conf.settings['components_to_skip'] ) + self._component_setup_deferred = None # TODO: move this to a component self.connected_to_internet = True @@ -198,8 +199,9 @@ class Daemon(AuthJSONRPCServer): log.info("Starting lbrynet-daemon") log.info("Platform: %s", json.dumps(system_info.get_platform())) - yield self.component_manager.setup(**{n: lambda _, c: setattr(self, components[c.component_name], c.component) - for n in components.keys()}) + self._component_setup_deferred = self.component_manager.setup(**{ + n: lambda _, c: setattr(self, components[c.component_name], c.component) for n in components.keys()}) + yield self._component_setup_deferred log.info("Started lbrynet-daemon") @staticmethod @@ -223,6 +225,11 @@ class Daemon(AuthJSONRPCServer): if self.analytics_manager: self.analytics_manager.shutdown() + try: + self._component_setup_deferred.cancel() + except defer.CancelledError: + pass + if self.component_manager is not None: d = self.component_manager.stop() d.addErrback(log.fail(), 'Failure while shutting down') diff --git a/lbrynet/daemon/auth/server.py b/lbrynet/daemon/auth/server.py index f71a1826c..d5470fddd 100644 --- a/lbrynet/daemon/auth/server.py +++ b/lbrynet/daemon/auth/server.py @@ -214,6 +214,9 @@ class AuthJSONRPCServer(AuthorizedBase): log.error('lbrynet API failed to bind TCP %s:%i for listening', conf.settings['api_host'], conf.settings['api_port']) reactor.fireSystemEvent("shutdown") + except defer.CancelledError: + log.info("shutting down before finished starting") + reactor.fireSystemEvent("shutdown") except Exception as err: self.analytics_manager.send_server_startup_error(str(err)) log.exception('Failed to start lbrynet-daemon') diff --git a/lbrynet/dht/node.py b/lbrynet/dht/node.py index 935ba1264..9c3b0a5a2 100644 --- a/lbrynet/dht/node.py +++ b/lbrynet/dht/node.py @@ -281,7 +281,9 @@ class Node(MockKademliaHelper): yield self._protocol._listening # TODO: Refresh all k-buckets further away than this node's closest neighbour yield self.joinNetwork(known_node_addresses or []) + self.start_looping_calls() + def start_looping_calls(self): self.safe_start_looping_call(self._change_token_lc, constants.tokenSecretChangeInterval) # Start refreshing k-buckets periodically, if necessary self.safe_start_looping_call(self._refresh_node_lc, constants.checkRefreshInterval) From 13bf8125e3a468388682c9753c25e73c8649ea91 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Mon, 23 Jul 2018 16:15:12 -0400 Subject: [PATCH 26/31] status doc, better determination of is_first_run -add skipped_components to status response -re-add run_reflector_server to settings --- lbrynet/conf.py | 3 ++- lbrynet/daemon/Daemon.py | 58 +++++++++++++++++++++++++++++----------- 2 files changed, 44 insertions(+), 17 deletions(-) diff --git a/lbrynet/conf.py b/lbrynet/conf.py index 3a8871f1d..c0ec03293 100644 --- a/lbrynet/conf.py +++ b/lbrynet/conf.py @@ -285,6 +285,7 @@ ADJUSTABLE_SETTINGS = { 'peer_port': (int, 3333), 'pointtrader_server': (str, 'http://127.0.0.1:2424'), 'reflector_port': (int, 5566), + 'run_reflector_server': (bool, False), # adds `reflector` to components_to_skip unless True # if reflect_uploads is True, send files to reflector after publishing (as well as a periodic check in the # event the initial upload failed or was disconnected part way through, provided the auto_re_reflect_interval > 0) 'reflect_uploads': (bool, True), @@ -302,7 +303,7 @@ ADJUSTABLE_SETTINGS = { 'lbryum_servers': (list, [('lbryumx1.lbry.io', 50001), ('lbryumx2.lbry.io', 50001)], server_list, server_list_reverse), 's3_headers_depth': (int, 96 * 10), # download headers from s3 when the local height is more than 10 chunks behind - 'components_to_skip': (list, ['reflector']) # components which will be skipped during start-up of daemon + 'components_to_skip': (list, []) # components which will be skipped during start-up of daemon } diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 0eec79093..57dbd8971 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -147,6 +147,16 @@ def sort_claim_results(claims): return claims +def is_first_run(): + if os.path.isfile(conf.settings.get_db_revision_filename()): + return False + if os.path.isfile(os.path.join(conf.settings['data_dir'], 'lbrynet.sqlite')): + return False + if os.path.isfile(os.path.join(conf.settings['lbryum_wallet_dir'], 'blockchain_headers')): + return False + return True + + class Daemon(AuthJSONRPCServer): """ LBRYnet daemon, a jsonrpc interface to lbry functions @@ -157,10 +167,14 @@ class Daemon(AuthJSONRPCServer): self.looping_call_manager = LoopingCallManager({ Checker.INTERNET_CONNECTION: LoopingCall(CheckInternetConnection(self)), }) + to_skip = list(conf.settings['components_to_skip']) + if 'reflector' not in to_skip and not conf.settings['run_reflector_server']: + to_skip.append('reflector') self.component_manager = component_manager or ComponentManager( analytics_manager=self.analytics_manager, - skip_components=conf.settings['components_to_skip'] + skip_components=to_skip ) + self.is_first_run = is_first_run() self._component_setup_deferred = None # TODO: move this to a component @@ -661,32 +675,43 @@ class Daemon(AuthJSONRPCServer): Returns: (dict) lbrynet-daemon status { - 'installation_id': installation id - base58, - 'is_running': bool, + 'installation_id': (str) installation id - base58, + 'is_running': (bool), 'is_first_run': bool, - 'startup_status': { - (str) component_name: (bool) True if running else False, + 'skipped_components': (list) [names of skipped components (str)], + 'startup_status': { Does not include components which have been skipped + 'database': (bool), + 'wallet': (bool), + 'session': (bool), + 'dht': (bool), + 'hash_announcer': (bool), + 'stream_identifier': (bool), + 'file_manager': (bool), + 'peer_protocol_server': (bool), + 'reflector': (bool), + 'upnp': (bool), + 'exchange_rate_manager': (bool), }, 'connection_status': { - 'code': connection status code, - 'message': connection status message + 'code': (str) connection status code, + 'message': (str) connection status message }, 'blockchain_status': { - 'blocks': local blockchain height, - 'blocks_behind': remote_height - local_height, - 'best_blockhash': block hash of most recent block, + 'blocks': (int) local blockchain height, + 'blocks_behind': (int) remote_height - local_height, + 'best_blockhash': (str) block hash of most recent block, }, 'dht_node_status': { 'node_id': (str) lbry dht node id - hex encoded, 'peers_in_routing_table': (int) the number of peers in the routing table, }, - 'wallet_is_encrypted': bool, + 'wallet_is_encrypted': (bool), If given the session status option: 'session_status': { - 'managed_blobs': count of blobs in the blob manager, - 'managed_streams': count of streams in the file manager - 'announce_queue_size': number of blobs currently queued to be announced - 'should_announce_blobs': number of blobs that should be announced + 'managed_blobs': (int) count of blobs in the blob manager, + 'managed_streams': (int) count of streams in the file manager, + 'announce_queue_size': (int) number of blobs currently queued to be announced, + 'should_announce_blobs': (int) number of blobs that should be announced, } } """ @@ -703,7 +728,8 @@ class Daemon(AuthJSONRPCServer): response = { 'installation_id': conf.settings.installation_id, 'is_running': all(self.component_manager.get_components_status().values()), - 'is_first_run': self.wallet.is_first_run if has_wallet else None, + 'is_first_run': self.is_first_run, + 'skipped_components': self.component_manager.skip_components, 'startup_status': self.component_manager.get_components_status(), 'connection_status': { 'code': connection_code, From a285db1b086b7a067c7caa15b93a0f92ee9555b5 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Tue, 24 Jul 2018 18:22:20 -0400 Subject: [PATCH 27/31] pylint --- lbrynet/conf.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lbrynet/conf.py b/lbrynet/conf.py index c0ec03293..1d0020f89 100644 --- a/lbrynet/conf.py +++ b/lbrynet/conf.py @@ -285,13 +285,12 @@ ADJUSTABLE_SETTINGS = { 'peer_port': (int, 3333), 'pointtrader_server': (str, 'http://127.0.0.1:2424'), 'reflector_port': (int, 5566), - 'run_reflector_server': (bool, False), # adds `reflector` to components_to_skip unless True # if reflect_uploads is True, send files to reflector after publishing (as well as a periodic check in the # event the initial upload failed or was disconnected part way through, provided the auto_re_reflect_interval > 0) 'reflect_uploads': (bool, True), 'auto_re_reflect_interval': (int, 86400), # set to 0 to disable 'reflector_servers': (list, [('reflector2.lbry.io', 5566)], server_list, server_list_reverse), - 'run_reflector_server': (bool, False), + 'run_reflector_server': (bool, False), # adds `reflector` to components_to_skip unless True 'sd_download_timeout': (int, 3), 'share_usage_data': (bool, True), # whether to share usage stats and diagnostic info with LBRY 'peer_search_timeout': (int, 30), From a9c94ca22d7fdbbca23f8edcf04c23bf2f4c2224 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Tue, 24 Jul 2018 18:24:51 -0400 Subject: [PATCH 28/31] move setup and _shutdown to AuthJSONRPCServer --- lbrynet/daemon/Daemon.py | 76 ++++++++++------------------------- lbrynet/daemon/auth/server.py | 51 ++++++++++++++++++++++- 2 files changed, 70 insertions(+), 57 deletions(-) diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 57dbd8971..d62b2b2c3 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -7,7 +7,6 @@ import requests import urllib import json import textwrap -import signal from copy import deepcopy from decimal import Decimal, InvalidOperation from twisted.web import server @@ -26,7 +25,6 @@ from lbryschema.decode import smart_decode from lbrynet.core.system_info import get_lbrynet_version from lbrynet import conf from lbrynet.reflector import reupload -from lbrynet.daemon.Component import ComponentManager from lbrynet.daemon.Components import WALLET_COMPONENT, DATABASE_COMPONENT, SESSION_COMPONENT, DHT_COMPONENT from lbrynet.daemon.Components import STREAM_IDENTIFIER_COMPONENT, FILE_MANAGER_COMPONENT from lbrynet.daemon.Components import EXCHANGE_RATE_MANAGER_COMPONENT @@ -36,7 +34,6 @@ from lbrynet.daemon.auth.server import AuthJSONRPCServer from lbrynet.core.PaymentRateManager import OnlyFreePaymentsManager from lbrynet.core import utils, system_info from lbrynet.core.StreamDescriptor import download_sd_blob -from lbrynet.core.looping_call_manager import LoopingCallManager from lbrynet.core.Error import InsufficientFundsError, UnknownNameError from lbrynet.core.Error import DownloadDataTimeout, DownloadSDTimeout from lbrynet.core.Error import NullFundsError, NegativeFundsError @@ -92,8 +89,8 @@ class IterableContainer(object): class Checker(object): """The looping calls the daemon runs""" - INTERNET_CONNECTION = 'internet_connection_checker' - CONNECTION_STATUS = 'connection_status_checker' + INTERNET_CONNECTION = 'internet_connection_checker', 3600 + # CONNECTION_STATUS = 'connection_status_checker' class _FileID(IterableContainer): @@ -162,20 +159,28 @@ class Daemon(AuthJSONRPCServer): LBRYnet daemon, a jsonrpc interface to lbry functions """ + component_attributes = { + EXCHANGE_RATE_MANAGER_COMPONENT: "exchange_rate_manager", + DATABASE_COMPONENT: "storage", + SESSION_COMPONENT: "session", + WALLET_COMPONENT: "wallet", + DHT_COMPONENT: "dht_node", + STREAM_IDENTIFIER_COMPONENT: "sd_identifier", + FILE_MANAGER_COMPONENT: "file_manager", + } + def __init__(self, analytics_manager=None, component_manager=None): - AuthJSONRPCServer.__init__(self, analytics_manager, conf.settings['use_auth_http']) - self.looping_call_manager = LoopingCallManager({ - Checker.INTERNET_CONNECTION: LoopingCall(CheckInternetConnection(self)), - }) to_skip = list(conf.settings['components_to_skip']) if 'reflector' not in to_skip and not conf.settings['run_reflector_server']: to_skip.append('reflector') - self.component_manager = component_manager or ComponentManager( - analytics_manager=self.analytics_manager, - skip_components=to_skip - ) + looping_calls = { + Checker.INTERNET_CONNECTION[0]: (LoopingCall(CheckInternetConnection(self)), + Checker.INTERNET_CONNECTION[1]) + } + AuthJSONRPCServer.__init__(self, analytics_manager=analytics_manager, component_manager=component_manager, + use_authentication=conf.settings['use_auth_http'], to_skip=to_skip, + looping_calls=looping_calls) self.is_first_run = is_first_run() - self._component_setup_deferred = None # TODO: move this to a component self.connected_to_internet = True @@ -196,58 +201,19 @@ class Daemon(AuthJSONRPCServer): @defer.inlineCallbacks def setup(self): - reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) - if not self.analytics_manager.is_started: - self.analytics_manager.start() - self.looping_call_manager.start(Checker.INTERNET_CONNECTION, 3600) - - components = { - EXCHANGE_RATE_MANAGER_COMPONENT: "exchange_rate_manager", - DATABASE_COMPONENT: "storage", - SESSION_COMPONENT: "session", - WALLET_COMPONENT: "wallet", - DHT_COMPONENT: "dht_node", - STREAM_IDENTIFIER_COMPONENT: "sd_identifier", - FILE_MANAGER_COMPONENT: "file_manager", - } - log.info("Starting lbrynet-daemon") log.info("Platform: %s", json.dumps(system_info.get_platform())) - self._component_setup_deferred = self.component_manager.setup(**{ - n: lambda _, c: setattr(self, components[c.component_name], c.component) for n in components.keys()}) - yield self._component_setup_deferred + yield super(Daemon, self).setup() log.info("Started lbrynet-daemon") - @staticmethod - def _already_shutting_down(sig_num, frame): - log.info("Already shutting down") - def _stop_streams(self): """stop pending GetStream downloads""" for sd_hash, stream in self.streams.iteritems(): stream.cancel(reason="daemon shutdown") def _shutdown(self): - # ignore INT/TERM signals once shutdown has started - signal.signal(signal.SIGINT, self._already_shutting_down) - signal.signal(signal.SIGTERM, self._already_shutting_down) - - log.info("Closing lbrynet session") - self._stop_streams() - self.looping_call_manager.shutdown() - if self.analytics_manager: - self.analytics_manager.shutdown() - - try: - self._component_setup_deferred.cancel() - except defer.CancelledError: - pass - - if self.component_manager is not None: - d = self.component_manager.stop() - d.addErrback(log.fail(), 'Failure while shutting down') - return d + return super(Daemon, self)._shutdown() def _download_blob(self, blob_hash, rate_manager=None, timeout=None): """ diff --git a/lbrynet/daemon/auth/server.py b/lbrynet/daemon/auth/server.py index d5470fddd..7a91c858b 100644 --- a/lbrynet/daemon/auth/server.py +++ b/lbrynet/daemon/auth/server.py @@ -2,6 +2,7 @@ import logging import urlparse import json import inspect +import signal from decimal import Decimal from functools import wraps @@ -17,6 +18,8 @@ from lbrynet import conf, analytics from lbrynet.core.Error import InvalidAuthenticationToken from lbrynet.core import utils from lbrynet.core.Error import ComponentsNotStarted, ComponentStartConditionNotMet +from lbrynet.core.looping_call_manager import LoopingCallManager +from lbrynet.daemon.ComponentManager import ComponentManager from lbrynet.undecorated import undecorated from .util import APIKey, get_auth_message from .client import LBRY_SECRET @@ -192,10 +195,19 @@ class AuthJSONRPCServer(AuthorizedBase): isLeaf = True allowed_during_startup = [] + component_attributes = {} - def __init__(self, analytics_manager, use_authentication=None): + def __init__(self, analytics_manager=None, component_manager=None, use_authentication=None, to_skip=None, + looping_calls=None): self.analytics_manager = analytics_manager or analytics.Manager.new_instance() + self.component_manager = component_manager or ComponentManager( + analytics_manager=self.analytics_manager, + skip_components=to_skip or [] + ) + self.looping_call_manager = LoopingCallManager({n: lc for n, (lc, t) in (looping_calls or {}).iteritems()}) + self._looping_call_times = {n: t for n, (lc, t) in (looping_calls or {}).iteritems()} self._use_authentication = use_authentication or conf.settings['use_auth_http'] + self._component_setup_deferred = None self.announced_startup = False self.sessions = {} @@ -223,7 +235,42 @@ class AuthJSONRPCServer(AuthorizedBase): reactor.fireSystemEvent("shutdown") def setup(self): - raise NotImplementedError() + from twisted.internet import reactor + + reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) + if not self.analytics_manager.is_started: + self.analytics_manager.start() + for lc_name, lc_time in self._looping_call_times.iteritems(): + self.looping_call_manager.start(lc_name, lc_time) + + def update_attribute(setup_result, component): + setattr(self, self.component_attributes[component.component_name], component.component) + + kwargs = {component: update_attribute for component in self.component_attributes.keys()} + self._component_setup_deferred = self.component_manager.setup(**kwargs) + return self._component_setup_deferred + + @staticmethod + def _already_shutting_down(sig_num, frame): + log.info("Already shutting down") + + def _shutdown(self): + # ignore INT/TERM signals once shutdown has started + signal.signal(signal.SIGINT, self._already_shutting_down) + signal.signal(signal.SIGTERM, self._already_shutting_down) + self.looping_call_manager.shutdown() + if self.analytics_manager: + self.analytics_manager.shutdown() + try: + self._component_setup_deferred.cancel() + except (AttributeError, defer.CancelledError): + pass + if self.component_manager is not None: + d = self.component_manager.stop() + d.addErrback(log.fail(), 'Failure while shutting down') + else: + d = defer.succeed(None) + return d def get_server_factory(self): return AuthJSONRPCResource(self).getServerFactory() From 5d9a18765936e3b3935b88a52f1624c06658a890 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Tue, 24 Jul 2018 18:35:18 -0400 Subject: [PATCH 29/31] refactor required conditions and @requires decorator --- lbrynet/daemon/ComponentManager.py | 38 +++++++++ lbrynet/daemon/Daemon.py | 128 +++++++++++++++++------------ lbrynet/daemon/auth/server.py | 32 ++++---- 3 files changed, 130 insertions(+), 68 deletions(-) diff --git a/lbrynet/daemon/ComponentManager.py b/lbrynet/daemon/ComponentManager.py index 17dcbcb57..cd4bb84fe 100644 --- a/lbrynet/daemon/ComponentManager.py +++ b/lbrynet/daemon/ComponentManager.py @@ -6,6 +6,32 @@ from lbrynet.core.Error import ComponentStartConditionNotMet log = logging.getLogger(__name__) +class RegisteredConditions(object): + conditions = {} + + +class RequiredConditionType(type): + def __new__(mcs, name, bases, newattrs): + klass = type.__new__(mcs, name, bases, newattrs) + if name != "RequiredCondition": + if klass.name in RegisteredConditions.conditions: + raise SyntaxError("already have a component registered for \"%s\"" % klass.name) + RegisteredConditions.conditions[klass.name] = klass + return klass + + +class RequiredCondition(object): + name = "" + component = "" + message = "" + + @staticmethod + def evaluate(component): + raise NotImplementedError() + + __metaclass__ = RequiredConditionType + + class ComponentManager(object): default_component_classes = {} @@ -29,6 +55,18 @@ class ComponentManager(object): for component_class in self.component_classes.itervalues(): self.components.add(component_class(self)) + @defer.inlineCallbacks + def evaluate_condition(self, condition_name): + if condition_name not in RegisteredConditions.conditions: + raise NameError(condition_name) + condition = RegisteredConditions.conditions[condition_name] + try: + component = self.get_component(condition.component) + result = yield defer.maybeDeferred(condition.evaluate, component) + except Exception as err: + result = False + defer.returnValue((result, "" if result else condition.message)) + def sort_components(self, reverse=False): """ Sort components by requirements diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index d62b2b2c3..b80ba3581 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -28,6 +28,7 @@ from lbrynet.reflector import reupload from lbrynet.daemon.Components import WALLET_COMPONENT, DATABASE_COMPONENT, SESSION_COMPONENT, DHT_COMPONENT from lbrynet.daemon.Components import STREAM_IDENTIFIER_COMPONENT, FILE_MANAGER_COMPONENT from lbrynet.daemon.Components import EXCHANGE_RATE_MANAGER_COMPONENT +from lbrynet.daemon.ComponentManager import RequiredCondition from lbrynet.daemon.Downloader import GetStream from lbrynet.daemon.Publisher import Publisher from lbrynet.daemon.auth.server import AuthJSONRPCServer @@ -43,6 +44,7 @@ from lbrynet.core.SinglePeerDownloader import SinglePeerDownloader from lbrynet.core.client.StandaloneBlobDownloader import StandaloneBlobDownloader log = logging.getLogger(__name__) +requires = AuthJSONRPCServer.requires INITIALIZING_CODE = 'initializing' @@ -154,6 +156,30 @@ def is_first_run(): return True +DHT_HAS_CONTACTS = "dht_has_contacts" +WALLET_IS_UNLOCKED = "wallet_is_unlocked" + + +class DHTHasContacts(RequiredCondition): + name = DHT_HAS_CONTACTS + component = DHT_COMPONENT + message = "your node is not connected to the dht" + + @staticmethod + def evaluate(component): + return len(component.contacts) > 0 + + +class WalletIsLocked(RequiredCondition): + name = WALLET_IS_UNLOCKED + component = WALLET_COMPONENT + message = "your wallet is locked" + + @staticmethod + def evaluate(component): + return component.check_locked() + + class Daemon(AuthJSONRPCServer): """ LBRYnet daemon, a jsonrpc interface to lbry functions @@ -710,7 +736,7 @@ class Daemon(AuthJSONRPCServer): }, 'dht_node_status': { 'node_id': conf.settings.node_id.encode('hex'), - 'peers_in_routing_table': 0 if not self.component_manager.all_components_running("dht") else + 'peers_in_routing_table': 0 if not self.component_manager.all_components_running(DHT_COMPONENT) else len(self.dht_node.contacts) } } @@ -931,7 +957,7 @@ class Daemon(AuthJSONRPCServer): """ return self._render_response(sorted([command for command in self.callable_methods.keys()])) - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) def jsonrpc_wallet_balance(self, address=None, include_unconfirmed=False): """ Return the balance of the wallet @@ -953,7 +979,7 @@ class Daemon(AuthJSONRPCServer): return self._render_response(float( self.wallet.get_address_balance(address, include_unconfirmed))) - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_wallet_unlock(self, password): """ @@ -980,7 +1006,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) - @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_wallet_decrypt(self): """ @@ -1000,7 +1026,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) - @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_wallet_encrypt(self, new_password): """ @@ -1041,7 +1067,7 @@ class Daemon(AuthJSONRPCServer): reactor.callLater(0.1, reactor.fireSystemEvent, "shutdown") defer.returnValue(response) - @AuthJSONRPCServer.requires("file_manager") + @requires(FILE_MANAGER_COMPONENT) @defer.inlineCallbacks def jsonrpc_file_list(self, sort=None, **kwargs): """ @@ -1113,7 +1139,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_resolve_name(self, name, force=False): """ @@ -1139,7 +1165,7 @@ class Daemon(AuthJSONRPCServer): else: defer.returnValue(metadata) - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_claim_show(self, txid=None, nout=None, claim_id=None): """ @@ -1185,7 +1211,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(claim_results) defer.returnValue(response) - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_resolve(self, force=False, uri=None, uris=[]): """ @@ -1276,7 +1302,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(results) defer.returnValue(response) - @AuthJSONRPCServer.requires("wallet", "file_manager", "session", wallet=lambda wallet: wallet.check_locked()) + @requires(WALLET_COMPONENT, FILE_MANAGER_COMPONENT, SESSION_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_get(self, uri, file_name=None, timeout=None): """ @@ -1365,7 +1391,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) - @AuthJSONRPCServer.requires("file_manager") + @requires(FILE_MANAGER_COMPONENT) @defer.inlineCallbacks def jsonrpc_file_set_status(self, status, **kwargs): """ @@ -1406,7 +1432,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(msg) defer.returnValue(response) - @AuthJSONRPCServer.requires("file_manager") + @requires(FILE_MANAGER_COMPONENT) @defer.inlineCallbacks def jsonrpc_file_delete(self, delete_from_download_dir=False, delete_all=False, **kwargs): """ @@ -1467,7 +1493,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) - @AuthJSONRPCServer.requires("wallet", "session", wallet=lambda wallet: wallet.check_locked()) + @requires(WALLET_COMPONENT, SESSION_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_stream_cost_estimate(self, uri, size=None): """ @@ -1488,7 +1514,7 @@ class Daemon(AuthJSONRPCServer): cost = yield self.get_est_cost(uri, size) defer.returnValue(cost) - @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_channel_new(self, channel_name, amount): """ @@ -1545,7 +1571,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_channel_list(self): """ @@ -1566,7 +1592,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) @AuthJSONRPCServer.deprecated("channel_list") def jsonrpc_channel_list_mine(self): """ @@ -1584,7 +1610,7 @@ class Daemon(AuthJSONRPCServer): return self.jsonrpc_channel_list() - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_channel_export(self, claim_id): """ @@ -1603,7 +1629,7 @@ class Daemon(AuthJSONRPCServer): result = yield self.wallet.export_certificate_info(claim_id) defer.returnValue(result) - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_channel_import(self, serialized_certificate_info): """ @@ -1622,7 +1648,7 @@ class Daemon(AuthJSONRPCServer): result = yield self.wallet.import_certificate_info(serialized_certificate_info) defer.returnValue(result) - @AuthJSONRPCServer.requires("wallet", "file_manager", "session", wallet=lambda wallet: wallet.check_locked()) + @requires(WALLET_COMPONENT, FILE_MANAGER_COMPONENT, SESSION_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_publish(self, name, bid, metadata=None, file_path=None, fee=None, title=None, description=None, author=None, language=None, license=None, @@ -1834,7 +1860,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) - @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_claim_abandon(self, claim_id=None, txid=None, nout=None): """ @@ -1867,7 +1893,7 @@ class Daemon(AuthJSONRPCServer): self.analytics_manager.send_claim_action('abandon') defer.returnValue(result) - @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_claim_new_support(self, name, claim_id, amount): """ @@ -1895,7 +1921,7 @@ class Daemon(AuthJSONRPCServer): self.analytics_manager.send_claim_action('new_support') defer.returnValue(result) - @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_claim_renew(self, outpoint=None, height=None): """ @@ -1938,7 +1964,7 @@ class Daemon(AuthJSONRPCServer): result = yield self.wallet.claim_renew_all_before_expiration(height) defer.returnValue(result) - @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_claim_send_to_address(self, claim_id, address, amount=None): """ @@ -1971,7 +1997,7 @@ class Daemon(AuthJSONRPCServer): defer.returnValue(response) # TODO: claim_list_mine should be merged into claim_list, but idk how to authenticate it -Grin - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) def jsonrpc_claim_list_mine(self): """ List my name claims @@ -2009,7 +2035,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda claims: self._render_response(claims)) return d - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_claim_list(self, name): """ @@ -2048,7 +2074,7 @@ class Daemon(AuthJSONRPCServer): sort_claim_results(claims['claims']) defer.returnValue(claims) - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_claim_list_by_channel(self, page=0, page_size=10, uri=None, uris=[]): """ @@ -2138,7 +2164,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(results) defer.returnValue(response) - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) def jsonrpc_transaction_list(self): """ List transactions belonging to wallet @@ -2200,7 +2226,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) def jsonrpc_transaction_show(self, txid): """ Get a decoded transaction from a txid @@ -2219,7 +2245,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) def jsonrpc_wallet_is_address_mine(self, address): """ Checks if an address is associated with the current wallet. @@ -2238,7 +2264,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda is_mine: self._render_response(is_mine)) return d - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) def jsonrpc_wallet_public_key(self, address): """ Get public key from wallet address @@ -2258,7 +2284,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_wallet_list(self): """ @@ -2278,7 +2304,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(addresses) defer.returnValue(response) - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) def jsonrpc_wallet_new_address(self): """ Generate a new wallet address @@ -2302,7 +2328,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda address: self._render_response(address)) return d - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) def jsonrpc_wallet_unused_address(self): """ Return an address containing no balance, will create @@ -2327,7 +2353,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda address: self._render_response(address)) return d - @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @AuthJSONRPCServer.deprecated("wallet_send") @defer.inlineCallbacks def jsonrpc_send_amount_to_address(self, amount, address): @@ -2357,7 +2383,7 @@ class Daemon(AuthJSONRPCServer): self.analytics_manager.send_credits_sent() defer.returnValue(True) - @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_wallet_send(self, amount, address=None, claim_id=None): """ @@ -2406,7 +2432,7 @@ class Daemon(AuthJSONRPCServer): self.analytics_manager.send_claim_action('new_support') defer.returnValue(result) - @AuthJSONRPCServer.requires("wallet", wallet=lambda wallet: wallet.check_locked()) + @requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_wallet_prefill_addresses(self, num_addresses, amount, no_broadcast=False): """ @@ -2437,7 +2463,7 @@ class Daemon(AuthJSONRPCServer): tx['broadcast'] = broadcast defer.returnValue(tx) - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_utxo_list(self): """ @@ -2477,7 +2503,7 @@ class Daemon(AuthJSONRPCServer): defer.returnValue(unspent) - @AuthJSONRPCServer.requires("wallet") + @requires(WALLET_COMPONENT) def jsonrpc_block_show(self, blockhash=None, height=None): """ Get contents of a block @@ -2505,7 +2531,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d - @AuthJSONRPCServer.requires("wallet", "session", wallet=lambda wallet: wallet.check_locked()) + @requires(WALLET_COMPONENT, SESSION_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_blob_get(self, blob_hash, timeout=None, encoding=None, payment_rate_manager=None): """ @@ -2549,7 +2575,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(result) defer.returnValue(response) - @AuthJSONRPCServer.requires("session") + @requires(SESSION_COMPONENT) @defer.inlineCallbacks def jsonrpc_blob_delete(self, blob_hash): """ @@ -2577,7 +2603,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response("Deleted %s" % blob_hash) defer.returnValue(response) - @AuthJSONRPCServer.requires("dht") + @requires(DHT_COMPONENT) @defer.inlineCallbacks def jsonrpc_peer_list(self, blob_hash, timeout=None): """ @@ -2616,7 +2642,7 @@ class Daemon(AuthJSONRPCServer): ] defer.returnValue(results) - @AuthJSONRPCServer.requires("database") + @requires(SESSION_COMPONENT, DHT_COMPONENT, conditions=[DHT_HAS_CONTACTS]) @defer.inlineCallbacks def jsonrpc_blob_announce(self, blob_hash=None, stream_hash=None, sd_hash=None): """ @@ -2653,7 +2679,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(True) defer.returnValue(response) - @AuthJSONRPCServer.requires("file_manager") + @requires(FILE_MANAGER_COMPONENT) @defer.inlineCallbacks def jsonrpc_file_reflect(self, **kwargs): """ @@ -2689,7 +2715,7 @@ class Daemon(AuthJSONRPCServer): results = yield reupload.reflect_file(lbry_file, reflector_server=reflector_server) defer.returnValue(results) - @AuthJSONRPCServer.requires("database", "session", "wallet") + @requires(SESSION_COMPONENT, WALLET_COMPONENT) @defer.inlineCallbacks def jsonrpc_blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None, finished=None, page_size=None, page=None): @@ -2753,7 +2779,7 @@ class Daemon(AuthJSONRPCServer): response = yield self._render_response(blob_hashes_for_return) defer.returnValue(response) - @AuthJSONRPCServer.requires("session") + @requires(SESSION_COMPONENT) def jsonrpc_blob_reflect(self, blob_hashes, reflector_server=None): """ Reflects specified blobs @@ -2772,7 +2798,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d - @AuthJSONRPCServer.requires("session") + @requires(SESSION_COMPONENT) def jsonrpc_blob_reflect_all(self): """ Reflects all saved blobs @@ -2792,7 +2818,7 @@ class Daemon(AuthJSONRPCServer): d.addCallback(lambda r: self._render_response(r)) return d - @AuthJSONRPCServer.requires("dht") + @requires(DHT_COMPONENT) @defer.inlineCallbacks def jsonrpc_peer_ping(self, node_id): """ @@ -2822,7 +2848,7 @@ class Daemon(AuthJSONRPCServer): result = {'error': 'ping timeout'} defer.returnValue(result) - @AuthJSONRPCServer.requires("dht") + @requires(DHT_COMPONENT) def jsonrpc_routing_table_get(self): """ Get DHT routing information @@ -2899,7 +2925,7 @@ class Daemon(AuthJSONRPCServer): return self._render_response(result) # the single peer downloader needs wallet access - @AuthJSONRPCServer.requires("dht", "wallet", wallet=lambda wallet: wallet.check_locked()) + @requires(DHT_COMPONENT, WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) def jsonrpc_blob_availability(self, blob_hash, search_timeout=None, blob_timeout=None): """ Get blob availability @@ -2924,7 +2950,7 @@ class Daemon(AuthJSONRPCServer): return self._blob_availability(blob_hash, search_timeout, blob_timeout) - @AuthJSONRPCServer.requires("session", "wallet", "dht", wallet=lambda wallet: wallet.check_locked()) + @requires(SESSION_COMPONENT, WALLET_COMPONENT, DHT_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @AuthJSONRPCServer.deprecated("stream_availability") def jsonrpc_get_availability(self, uri, sd_timeout=None, peer_timeout=None): """ @@ -2945,7 +2971,7 @@ class Daemon(AuthJSONRPCServer): return self.jsonrpc_stream_availability(uri, peer_timeout, sd_timeout) - @AuthJSONRPCServer.requires("session", "wallet", "dht", wallet=lambda wallet: wallet.check_locked()) + @requires(SESSION_COMPONENT, WALLET_COMPONENT, DHT_COMPONENT, conditions=[WALLET_IS_UNLOCKED]) @defer.inlineCallbacks def jsonrpc_stream_availability(self, uri, search_timeout=None, blob_timeout=None): """ diff --git a/lbrynet/daemon/auth/server.py b/lbrynet/daemon/auth/server.py index 7a91c858b..1190a58de 100644 --- a/lbrynet/daemon/auth/server.py +++ b/lbrynet/daemon/auth/server.py @@ -143,27 +143,25 @@ class AuthorizedBase(object): return _deprecated_wrapper @staticmethod - def requires(*components, **component_conditionals): + def requires(*components, **conditions): + if conditions and ["conditions"] != conditions.keys(): + raise SyntaxError("invalid conditions argument") + condition_names = conditions.get("conditions", []) + def _wrap(fn): @defer.inlineCallbacks @wraps(fn) def _inner(*args, **kwargs): - if component_conditionals: - for component_name, condition in component_conditionals.iteritems(): - if not callable(condition): - raise SyntaxError("The specified condition is invalid/not callable") - if args[0].component_manager.all_components_running(component_name): - if not (yield condition(args[0].component_manager.get_component(component_name))): - raise ComponentStartConditionNotMet( - "Not all conditions required to do this operation are met") - else: - raise ComponentsNotStarted("%s component is not setup.\nConditional cannot be checked" - % component_name) - if args[0].component_manager.all_components_running(*components): - result = yield fn(*args, **kwargs) - defer.returnValue(result) - else: - raise ComponentsNotStarted("Not all required components are set up: %s" % json.dumps(components)) + component_manager = args[0].component_manager + for condition_name in condition_names: + condition_result, err_msg = yield component_manager.evaluate_condition(condition_name) + if not condition_result: + raise ComponentStartConditionNotMet(err_msg) + if not component_manager.all_components_running(*components): + raise ComponentsNotStarted("the following required components have not yet started: " + "%s" % json.dumps(components)) + result = yield fn(*args, **kwargs) + defer.returnValue(result) return _inner return _wrap From 903cd86cdd981cadca827d47ee0ce5bba5a00902 Mon Sep 17 00:00:00 2001 From: hackrush Date: Tue, 24 Jul 2018 12:47:15 -0400 Subject: [PATCH 30/31] changelog --- CHANGELOG.md | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1f513039e..7c410402e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,24 +19,31 @@ at anytime. * ### Fixed - * + * loggly error reporting not following `share_usage_data` * ### Deprecated - * + * automatic claim renew, this is no longer needed * ### Changed - * - * + * api server class to use components, and for all JSONRPC API commands to be callable so long as the required components are available. + * return error messages when required conditions on components are not met for API calls + * `status` to no longer return a base58 encoded `lbry_id`, instead return this as the hex encoded `node_id` in a new `dht_node_status` field. + * `startup_status` field in the response to `status` to be a dict of component names to status booleans + * moved wallet, upnp and dht startup code from `Session` to `Components` ### Added + * `skipped_components` list to the response from `status` + * `skipped_components` config setting, accemapts a list of names of components to not run + * `ComponentManager` for managing the lifecycles of dependencies + * `requires` decorator to register the components required by a `jsonrpc_` command, to facilitate commands registering asynchronously + * unittests for `ComponentManager` * script to generate docs/api.json file (https://github.com/lbryio/lbry.tech/issues/42) - * + ### Removed - * - * + * most of the internal attributes from `Daemon` ## [0.20.4] - 2018-07-18 From 5984ae7ce90805f8383c3c8b1a88506de913592a Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Tue, 24 Jul 2018 21:10:53 -0400 Subject: [PATCH 31/31] fix --- CHANGELOG.md | 8 +------- lbrynet/__init__.py | 1 - lbrynet/core/__init__.py | 2 ++ lbrynet/{customLogger.py => custom_logger.py} | 0 lbrynet/daemon/__init__.py | 1 + lbrynet/tests/unit/test_customLogger.py | 4 ++-- 6 files changed, 6 insertions(+), 10 deletions(-) rename lbrynet/{customLogger.py => custom_logger.py} (100%) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c410402e..ca2f70285 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,12 +8,6 @@ can and probably will change functionality and break backwards compatability at anytime. ## [Unreleased] - -## [0.20.3] - 2018-07-20 -### Changed -* Additional information added to the balance error message when editing a claim. -(https://github.com/lbryio/lbry/pull/1309) - ### Security * * @@ -40,7 +34,7 @@ at anytime. * `requires` decorator to register the components required by a `jsonrpc_` command, to facilitate commands registering asynchronously * unittests for `ComponentManager` * script to generate docs/api.json file (https://github.com/lbryio/lbry.tech/issues/42) - + * additional information to the balance error message when editing a claim (https://github.com/lbryio/lbry/pull/1309) ### Removed * most of the internal attributes from `Daemon` diff --git a/lbrynet/__init__.py b/lbrynet/__init__.py index a93812309..0a9c7f041 100644 --- a/lbrynet/__init__.py +++ b/lbrynet/__init__.py @@ -1,5 +1,4 @@ import logging -import customLogger __version__ = "0.20.4" version = tuple(__version__.split('.')) diff --git a/lbrynet/core/__init__.py b/lbrynet/core/__init__.py index 6ac1f3432..df7d37558 100644 --- a/lbrynet/core/__init__.py +++ b/lbrynet/core/__init__.py @@ -5,3 +5,5 @@ This includes classes for connecting to other peers and downloading blobs from t connections from peers and responding to their requests, managing locally stored blobs, sending and receiving payments, and locating peers in the DHT. """ + +from lbrynet import custom_logger diff --git a/lbrynet/customLogger.py b/lbrynet/custom_logger.py similarity index 100% rename from lbrynet/customLogger.py rename to lbrynet/custom_logger.py diff --git a/lbrynet/daemon/__init__.py b/lbrynet/daemon/__init__.py index 8e0f5feca..c428bbb3b 100644 --- a/lbrynet/daemon/__init__.py +++ b/lbrynet/daemon/__init__.py @@ -1,3 +1,4 @@ +from lbrynet import custom_logger import Components # register Component classes from lbrynet.daemon.auth.client import LBRYAPIClient get_client = LBRYAPIClient.get_client diff --git a/lbrynet/tests/unit/test_customLogger.py b/lbrynet/tests/unit/test_customLogger.py index 8648b7068..74cfbb8e6 100644 --- a/lbrynet/tests/unit/test_customLogger.py +++ b/lbrynet/tests/unit/test_customLogger.py @@ -6,7 +6,7 @@ import unittest from twisted.internet import defer from twisted import trial -from lbrynet import customLogger +from lbrynet import custom_logger from lbrynet.tests.util import is_android @@ -22,7 +22,7 @@ class TestLogger(trial.unittest.TestCase): return d def setUp(self): - self.log = customLogger.Logger('test') + self.log = custom_logger.Logger('test') self.stream = StringIO.StringIO() handler = logging.StreamHandler(self.stream) handler.setFormatter(logging.Formatter("%(filename)s:%(lineno)d - %(message)s"))