refactor lbrynet-daemon into modular components (#1164)

* add daemon Component and ComponentManager classes

* convert directory and SQLiteStorage setup to be a Component

* support callbacks to component setups

* Fixed typo in ComponentManager

* convert wallet to be Component

* Use storage from session.

* Remove create_session internal function and PEP8

* Starting to convert session to its own component. Removed ref to `self.storage` from Daemon.py

* Making DHT component(broken)

* Refactored classes to reduce redundancy in getting config setting

* DHT is now it's own component

* Fixed `test_streamify` test

* Fixed regression caused by removing `peer_manager` from session

* refactor ComponentManager and Component to use instance instead of class methods

* Hash announcer, file manager, stream identifier components

* Query Handler and server components

* Reflector Component

* Fixed test_streamify(well Jack did, but ¯\_(ツ)_/¯)

* All tests now passing

* Pylint fixes

* Oops(That's all you're gonna get :-P)

* Making decorators(WIP, commit so that I don't lose work)

* Decorator made and decorating of functions done(some other changes)

* import fixes and removed temporary test function

* Fixed new broken tests from daemon refactor

* Sanitization of modules

* Reworded errors

* wallet unlock condition checks, fixed breaking changes

* Rebased on amster and other crazy stuff

* Started writing tests

* Tests for component manager

* Fix Daemon Tests

* Fixed passing mutable args in init

* Using constants instead of strings. Added CHANGELOG.md

* Now components can be skipped by setting relevant config in file.

* P-Y-L-I-N-T #angry_emoji
This commit is contained in:
Jack Robison 2018-07-05 15:21:52 -04:00 committed by Lex Berezhny
parent 148cc96025
commit 75a6ff269e
19 changed files with 1326 additions and 662 deletions

View file

@ -23,14 +23,19 @@ at anytime.
### Changed ### Changed
* *
* *
* `publish` to accept bid as a decimal string
* all JSONRPC API commands are now registered asynchronously and are available to be called as soon as they are ready
### Added ### Added
* * Component Manager for managing the dependencies, startup and stopping of components
* `requires` decorator to register the components required by a `jsonrpc_` command, to facilitate commands registering asynchronously
* added unittests for Component Manager
* *
### Removed ### Removed
* * `STARTUP_STAGES` from `status` API and CLI call, it instead returns a dictionary of components along with their running status(this is a **potentially breaking change** if `STARTUP_STAGES` is relied upon)
* * all component startup code from `Daemon.py`
* wallet, upnp and dht startup code from `session.py`, the code now resides in `Components.py`
## [0.20.3] - 2018-07-03 ## [0.20.3] - 2018-07-03

View file

@ -168,9 +168,11 @@ def server_port(server_and_port):
def server_list(servers): def server_list(servers):
return [server_port(server) for server in servers] return [server_port(server) for server in servers]
def server_list_reverse(servers): def server_list_reverse(servers):
return ["%s:%s" % (server, port) for server, port in servers] return ["%s:%s" % (server, port) for server, port in servers]
class Env(envparse.Env): class Env(envparse.Env):
"""An Env parser that automatically namespaces the variables with LBRY""" """An Env parser that automatically namespaces the variables with LBRY"""
@ -299,7 +301,8 @@ ADJUSTABLE_SETTINGS = {
'blockchain_name': (str, 'lbrycrd_main'), 'blockchain_name': (str, 'lbrycrd_main'),
'lbryum_servers': (list, [('lbryumx1.lbry.io', 50001), ('lbryumx2.lbry.io', 'lbryum_servers': (list, [('lbryumx1.lbry.io', 50001), ('lbryumx2.lbry.io',
50001)], server_list, server_list_reverse), 50001)], server_list, server_list_reverse),
's3_headers_depth': (int, 96 * 10) # download headers from s3 when the local height is more than 10 chunks behind 's3_headers_depth': (int, 96 * 10), # download headers from s3 when the local height is more than 10 chunks behind
'components_to_skip': (list, []) # components which will be skipped during start-up of daemon
} }

View file

@ -155,13 +155,23 @@ class InvalidAuthenticationToken(Exception):
class NegotiationError(Exception): class NegotiationError(Exception):
pass pass
class InvalidCurrencyError(Exception): class InvalidCurrencyError(Exception):
def __init__(self, currency): def __init__(self, currency):
self.currency = currency self.currency = currency
Exception.__init__( Exception.__init__(
self, 'Invalid currency: {} is not a supported currency.'.format(currency)) self, 'Invalid currency: {} is not a supported currency.'.format(currency))
class NoSuchDirectoryError(Exception): class NoSuchDirectoryError(Exception):
def __init__(self, directory): def __init__(self, directory):
self.directory = directory self.directory = directory
Exception.__init__(self, 'No such directory {}'.format(directory)) Exception.__init__(self, 'No such directory {}'.format(directory))
class ComponentStartConditionNotMet(Exception):
pass
class ComponentsNotStarted(Exception):
pass

View file

@ -1,11 +1,8 @@
import logging import logging
import miniupnpc from twisted.internet import defer
from twisted.internet import threads, defer
from lbrynet.core.BlobManager import DiskBlobManager from lbrynet.core.BlobManager import DiskBlobManager
from lbrynet.dht import node, hashannouncer
from lbrynet.database.storage import SQLiteStorage from lbrynet.database.storage import SQLiteStorage
from lbrynet.core.RateLimiter import RateLimiter from lbrynet.core.RateLimiter import RateLimiter
from lbrynet.core.utils import generate_id
from lbrynet.core.PaymentRateManager import BasePaymentRateManager, OnlyFreePaymentsManager from lbrynet.core.PaymentRateManager import BasePaymentRateManager, OnlyFreePaymentsManager
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -32,11 +29,11 @@ class Session(object):
peers can connect to this peer. peers can connect to this peer.
""" """
def __init__(self, blob_data_payment_rate, db_dir=None, node_id=None, peer_manager=None, dht_node_port=None, def __init__(self, blob_data_payment_rate, db_dir=None, node_id=None, dht_node_port=None,
known_dht_nodes=None, peer_finder=None, hash_announcer=None, blob_dir=None, blob_manager=None, known_dht_nodes=None, peer_finder=None, hash_announcer=None, blob_dir=None, blob_manager=None,
peer_port=None, use_upnp=True, rate_limiter=None, wallet=None, dht_node_class=node.Node, peer_port=None, use_upnp=True, rate_limiter=None, wallet=None, blob_tracker_class=None,
blob_tracker_class=None, payment_rate_manager_class=None, is_generous=True, external_ip=None, payment_rate_manager_class=None, is_generous=True, external_ip=None, storage=None,
storage=None): dht_node=None, peer_manager=None):
"""@param blob_data_payment_rate: The default payment rate for blob data """@param blob_data_payment_rate: The default payment rate for blob data
@param db_dir: The directory in which levelDB files should be stored @param db_dir: The directory in which levelDB files should be stored
@ -111,8 +108,7 @@ class Session(object):
self.external_ip = external_ip self.external_ip = external_ip
self.upnp_redirects = [] self.upnp_redirects = []
self.wallet = wallet self.wallet = wallet
self.dht_node_class = dht_node_class self.dht_node = dht_node
self.dht_node = None
self.base_payment_rate_manager = BasePaymentRateManager(blob_data_payment_rate) self.base_payment_rate_manager = BasePaymentRateManager(blob_data_payment_rate)
self.payment_rate_manager = OnlyFreePaymentsManager() self.payment_rate_manager = OnlyFreePaymentsManager()
# self.payment_rate_manager_class = payment_rate_manager_class or NegotiatedPaymentRateManager # self.payment_rate_manager_class = payment_rate_manager_class or NegotiatedPaymentRateManager
@ -124,15 +120,14 @@ class Session(object):
log.debug("Starting session.") log.debug("Starting session.")
if self.node_id is None: if self.dht_node is not None:
self.node_id = generate_id() if self.peer_manager is None:
self.peer_manager = self.dht_node.peer_manager
if self.use_upnp is True: if self.peer_finder is None:
d = self._try_upnp() self.peer_finder = self.dht_node.peer_finder
else:
d = defer.succeed(True) d = self.storage.setup()
d.addCallback(lambda _: self.storage.setup())
d.addCallback(lambda _: self._setup_dht())
d.addCallback(lambda _: self._setup_other_components()) d.addCallback(lambda _: self._setup_other_components())
return d return d
@ -140,97 +135,16 @@ class Session(object):
"""Stop all services""" """Stop all services"""
log.info('Stopping session.') log.info('Stopping session.')
ds = [] ds = []
if self.hash_announcer:
self.hash_announcer.stop()
# if self.blob_tracker is not None: # if self.blob_tracker is not None:
# ds.append(defer.maybeDeferred(self.blob_tracker.stop)) # ds.append(defer.maybeDeferred(self.blob_tracker.stop))
if self.dht_node is not None:
ds.append(defer.maybeDeferred(self.dht_node.stop))
if self.rate_limiter is not None: if self.rate_limiter is not None:
ds.append(defer.maybeDeferred(self.rate_limiter.stop)) ds.append(defer.maybeDeferred(self.rate_limiter.stop))
if self.wallet is not None:
ds.append(defer.maybeDeferred(self.wallet.stop))
if self.blob_manager is not None: if self.blob_manager is not None:
ds.append(defer.maybeDeferred(self.blob_manager.stop)) ds.append(defer.maybeDeferred(self.blob_manager.stop))
if self.use_upnp is True: # if self.use_upnp is True:
ds.append(defer.maybeDeferred(self._unset_upnp)) # ds.append(defer.maybeDeferred(self._unset_upnp))
return defer.DeferredList(ds) return defer.DeferredList(ds)
def _try_upnp(self):
log.debug("In _try_upnp")
def get_free_port(upnp, port, protocol):
# returns an existing mapping if it exists
mapping = upnp.getspecificportmapping(port, protocol)
if not mapping:
return port
if upnp.lanaddr == mapping[0]:
return mapping[1]
return get_free_port(upnp, port + 1, protocol)
def get_port_mapping(upnp, port, protocol, description):
# try to map to the requested port, if there is already a mapping use the next external
# port available
if protocol not in ['UDP', 'TCP']:
raise Exception("invalid protocol")
port = get_free_port(upnp, port, protocol)
if isinstance(port, tuple):
log.info("Found existing UPnP redirect %s:%i (%s) to %s:%i, using it",
self.external_ip, port, protocol, upnp.lanaddr, port)
return port
upnp.addportmapping(port, protocol, upnp.lanaddr, port,
description, '')
log.info("Set UPnP redirect %s:%i (%s) to %s:%i", self.external_ip, port,
protocol, upnp.lanaddr, port)
return port
def threaded_try_upnp():
if self.use_upnp is False:
log.debug("Not using upnp")
return False
u = miniupnpc.UPnP()
num_devices_found = u.discover()
if num_devices_found > 0:
u.selectigd()
external_ip = u.externalipaddress()
if external_ip != '0.0.0.0' and not self.external_ip:
# best not to rely on this external ip, the router can be behind layers of NATs
self.external_ip = external_ip
if self.peer_port:
self.peer_port = get_port_mapping(u, self.peer_port, 'TCP', 'LBRY peer port')
self.upnp_redirects.append((self.peer_port, 'TCP'))
if self.dht_node_port:
self.dht_node_port = get_port_mapping(u, self.dht_node_port, 'UDP', 'LBRY DHT port')
self.upnp_redirects.append((self.dht_node_port, 'UDP'))
return True
return False
def upnp_failed(err):
log.warning("UPnP failed. Reason: %s", err.getErrorMessage())
return False
d = threads.deferToThread(threaded_try_upnp)
d.addErrback(upnp_failed)
return d
def _setup_dht(self): # does not block startup, the dht will re-attempt if necessary
self.dht_node = self.dht_node_class(
node_id=self.node_id,
udpPort=self.dht_node_port,
externalIP=self.external_ip,
peerPort=self.peer_port,
peer_manager=self.peer_manager,
peer_finder=self.peer_finder,
)
if not self.hash_announcer:
self.hash_announcer = hashannouncer.DHTHashAnnouncer(self.dht_node, self.storage)
self.peer_manager = self.dht_node.peer_manager
self.peer_finder = self.dht_node.peer_finder
d = self.dht_node.start(self.known_dht_nodes)
d.addCallback(lambda _: log.info("Joined the dht"))
d.addCallback(lambda _: self.hash_announcer.start())
def _setup_other_components(self): def _setup_other_components(self):
log.debug("Setting up the rest of the components") log.debug("Setting up the rest of the components")
@ -255,28 +169,4 @@ class Session(object):
self.rate_limiter.start() self.rate_limiter.start()
d = self.blob_manager.setup() d = self.blob_manager.setup()
d.addCallback(lambda _: self.wallet.start())
# d.addCallback(lambda _: self.blob_tracker.start())
return d
def _unset_upnp(self):
log.info("Unsetting upnp for session")
def threaded_unset_upnp():
u = miniupnpc.UPnP()
num_devices_found = u.discover()
if num_devices_found > 0:
u.selectigd()
for port, protocol in self.upnp_redirects:
if u.getspecificportmapping(port, protocol) is None:
log.warning(
"UPnP redirect for %s %d was removed by something else.",
protocol, port)
else:
u.deleteportmapping(port, protocol)
log.info("Removed UPnP redirect for %s %d.", protocol, port)
self.upnp_redirects = []
d = threads.deferToThread(threaded_unset_upnp)
d.addErrback(lambda err: str(err))
return d return d

View file

@ -938,9 +938,7 @@ class LBRYumWallet(Wallet):
self._lag_counter = 0 self._lag_counter = 0
self.blocks_behind = 0 self.blocks_behind = 0
self.catchup_progress = 0 self.catchup_progress = 0
self.is_wallet_unlocked = None
# fired when the wallet actually unlocks (wallet_unlocked_d can be called multiple times)
self.wallet_unlock_success = defer.Deferred()
def _is_first_run(self): def _is_first_run(self):
return (not self.printed_retrieving_headers and return (not self.printed_retrieving_headers and
@ -953,21 +951,23 @@ class LBRYumWallet(Wallet):
return self._cmd_runner return self._cmd_runner
def check_locked(self): def check_locked(self):
if not self.wallet.use_encryption: """
log.info("Wallet is not encrypted") Checks if the wallet is encrypted(locked) or not
self.wallet_unlock_success.callback(True)
elif not self._cmd_runner: :return: (boolean) indicating whether the wallet is locked or not
"""
if not self._cmd_runner:
raise Exception("Command runner hasn't been initialized yet") raise Exception("Command runner hasn't been initialized yet")
elif self._cmd_runner.locked: elif self._cmd_runner.locked:
log.info("Waiting for wallet password") log.info("Waiting for wallet password")
self.wallet_unlocked_d.addCallback(self.unlock) self.wallet_unlocked_d.addCallback(self.unlock)
return self.wallet_unlock_success return self.is_wallet_unlocked
def unlock(self, password): def unlock(self, password):
if self._cmd_runner and self._cmd_runner.locked: if self._cmd_runner and self._cmd_runner.locked:
try: try:
self._cmd_runner.unlock_wallet(password) self._cmd_runner.unlock_wallet(password)
self.wallet_unlock_success.callback(True) self.is_wallet_unlocked = True
log.info("Unlocked the wallet!") log.info("Unlocked the wallet!")
except InvalidPassword: except InvalidPassword:
log.warning("Incorrect password, try again") log.warning("Incorrect password, try again")
@ -1054,6 +1054,7 @@ class LBRYumWallet(Wallet):
wallet.create_main_account() wallet.create_main_account()
wallet.synchronize() wallet.synchronize()
self.wallet = wallet self.wallet = wallet
self.is_wallet_unlocked = not self.wallet.use_encryption
self._check_large_wallet() self._check_large_wallet()
return defer.succeed(True) return defer.succeed(True)

View file

@ -0,0 +1,63 @@
import logging
from twisted.internet import defer
from ComponentManager import ComponentManager
log = logging.getLogger(__name__)
class ComponentType(type):
def __new__(mcs, name, bases, newattrs):
klass = type.__new__(mcs, name, bases, newattrs)
if name != "Component":
ComponentManager.default_component_classes[klass.component_name] = klass
return klass
class Component(object):
"""
lbrynet-daemon component helper
Inheriting classes will be automatically registered with the ComponentManager and must implement setup and stop
methods
"""
__metaclass__ = ComponentType
depends_on = []
component_name = None
def __init__(self, component_manager):
self.component_manager = component_manager
self._running = False
@property
def running(self):
return self._running
def start(self):
raise NotImplementedError()
def stop(self):
raise NotImplementedError()
def component(self):
raise NotImplementedError()
@defer.inlineCallbacks
def _setup(self):
try:
result = yield defer.maybeDeferred(self.start)
self._running = True
defer.returnValue(result)
except Exception as err:
log.exception("Error setting up %s", self.component_name or self.__class__.__name__)
raise err
@defer.inlineCallbacks
def _stop(self):
try:
result = yield defer.maybeDeferred(self.stop)
self._running = False
defer.returnValue(result)
except Exception as err:
log.exception("Error stopping %s", self.__class__.__name__)
raise err

View file

@ -0,0 +1,139 @@
import logging
from twisted.internet import defer
from lbrynet import conf
from lbrynet.core.Error import ComponentStartConditionNotMet
log = logging.getLogger(__name__)
class ComponentManager(object):
default_component_classes = {}
def __init__(self, reactor=None, analytics_manager=None, skip_components=None, **override_components):
self.skip_components = skip_components or []
self.skip_components.extend(conf.settings['components_to_skip'])
self.reactor = reactor
self.component_classes = {}
self.components = set()
self.analytics_manager = analytics_manager
for component_name, component_class in self.default_component_classes.iteritems():
if component_name in override_components:
component_class = override_components.pop(component_name)
if component_name not in self.skip_components:
self.component_classes[component_name] = component_class
if override_components:
raise SyntaxError("unexpected components: %s" % override_components)
for component_class in self.component_classes.itervalues():
self.components.add(component_class(self))
def sort_components(self, reverse=False):
"""
Sort components by requirements
"""
steps = []
staged = set()
components = set(self.components)
# components with no requirements
step = []
for component in set(components):
if not component.depends_on:
step.append(component)
staged.add(component.component_name)
components.remove(component)
if step:
steps.append(step)
while components:
step = []
to_stage = set()
for component in set(components):
reqs_met = 0
for needed in component.depends_on:
if needed in staged:
reqs_met += 1
if reqs_met == len(component.depends_on):
step.append(component)
to_stage.add(component.component_name)
components.remove(component)
if step:
staged.update(to_stage)
steps.append(step)
elif components:
raise ComponentStartConditionNotMet("Unresolved dependencies for: %s" % components)
if reverse:
steps.reverse()
return steps
@defer.inlineCallbacks
def setup(self, **callbacks):
"""
Start Components in sequence sorted by requirements
:return: (defer.Deferred)
"""
for component_name, cb in callbacks.iteritems():
if component_name not in self.component_classes:
raise NameError("unknown component: %s" % component_name)
if not callable(cb):
raise ValueError("%s is not callable" % cb)
def _setup(component):
if component.component_name in callbacks:
d = component._setup()
d.addCallback(callbacks[component.component_name])
return d
return component._setup()
stages = self.sort_components()
for stage in stages:
yield defer.DeferredList([_setup(component) for component in stage])
@defer.inlineCallbacks
def stop(self):
"""
Stop Components in reversed startup order
:return: (defer.Deferred)
"""
stages = self.sort_components(reverse=True)
for stage in stages:
yield defer.DeferredList([component._stop() for component in stage])
def all_components_running(self, *component_names):
"""
Check if components are running
:return: (bool) True if all specified components are running
"""
components = {component.component_name: component for component in self.components}
for component in component_names:
if component not in components:
raise NameError("%s is not a known Component" % component)
if not components[component].running:
return False
return True
def get_components_status(self):
"""
List status of all the components, whether they are running or not
:return: (dict) {(str) component_name: (bool) True is running else False}
"""
return {
component.component_name: component.running
for component in self.components
}
def get_component(self, component_name):
for component in self.components:
if component.component_name == component_name:
return component.component
raise NameError(component_name)

View file

@ -0,0 +1,523 @@
import os
import logging
import miniupnpc
from twisted.internet import defer, threads, reactor, error
from lbrynet import conf
from lbrynet.core.Session import Session
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, EncryptedFileStreamType
from lbrynet.core.Wallet import LBRYumWallet
from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory
from lbrynet.core.server.ServerProtocol import ServerProtocolFactory
from lbrynet.daemon.Component import Component
from lbrynet.database.storage import SQLiteStorage
from lbrynet.dht import node, hashannouncer
from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager
from lbrynet.lbry_file.client.EncryptedFileDownloader import EncryptedFileSaverFactory
from lbrynet.lbry_file.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier
from lbrynet.reflector import ServerFactory as reflector_server_factory
from lbrynet.core.utils import generate_id
log = logging.getLogger(__name__)
# settings must be initialized before this file is imported
DATABASE_COMPONENT = "database"
WALLET_COMPONENT = "wallet"
SESSION_COMPONENT = "session"
DHT_COMPONENT = "dht"
HASH_ANNOUNCER_COMPONENT = "hashAnnouncer"
STREAM_IDENTIFIER_COMPONENT = "streamIdentifier"
FILE_MANAGER_COMPONENT = "fileManager"
PEER_PROTOCOL_SERVER_COMPONENT = "peerProtocolServer"
REFLECTOR_COMPONENT = "reflector"
UPNP_COMPONENT = "upnp"
class ConfigSettings(object):
@staticmethod
def get_conf_setting(setting_name):
return conf.settings[setting_name]
@staticmethod
def get_blobfiles_dir():
if conf.settings['BLOBFILES_DIR'] == "blobfiles":
return os.path.join(GCS("data_dir"), "blobfiles")
else:
log.info("Using non-default blobfiles directory: %s", conf.settings['BLOBFILES_DIR'])
return conf.settings['BLOBFILES_DIR']
@staticmethod
def get_node_id():
return conf.settings.node_id
@staticmethod
def get_external_ip():
from lbrynet.core.system_info import get_platform
platform = get_platform(get_ip=True)
return platform['ip']
# Shorthand for common ConfigSettings methods
CS = ConfigSettings
GCS = ConfigSettings.get_conf_setting
class DatabaseComponent(Component):
component_name = DATABASE_COMPONENT
def __init__(self, component_manager):
Component.__init__(self, component_manager)
self.storage = None
@property
def component(self):
return self.storage
@staticmethod
def get_current_db_revision():
return 9
@staticmethod
def get_revision_filename():
return conf.settings.get_db_revision_filename()
@staticmethod
def _write_db_revision_file(version_num):
with open(conf.settings.get_db_revision_filename(), mode='w') as db_revision:
db_revision.write(str(version_num))
@defer.inlineCallbacks
def start(self):
# check directories exist, create them if they don't
log.info("Loading databases")
if not os.path.exists(GCS('download_directory')):
os.mkdir(GCS('download_directory'))
if not os.path.exists(GCS('data_dir')):
os.mkdir(GCS('data_dir'))
self._write_db_revision_file(self.get_current_db_revision())
log.debug("Created the db revision file: %s", self.get_revision_filename())
if not os.path.exists(CS.get_blobfiles_dir()):
os.mkdir(CS.get_blobfiles_dir())
log.debug("Created the blobfile directory: %s", str(CS.get_blobfiles_dir()))
if not os.path.exists(self.get_revision_filename()):
log.warning("db_revision file not found. Creating it")
self._write_db_revision_file(self.get_current_db_revision())
# check the db migration and run any needed migrations
with open(self.get_revision_filename(), "r") as revision_read_handle:
old_revision = int(revision_read_handle.read().strip())
if old_revision > self.get_current_db_revision():
raise Exception('This version of lbrynet is not compatible with the database\n'
'Your database is revision %i, expected %i' %
(old_revision, self.get_current_db_revision()))
if old_revision < self.get_current_db_revision():
from lbrynet.database.migrator import dbmigrator
log.info("Upgrading your databases (revision %i to %i)", old_revision, self.get_current_db_revision())
yield threads.deferToThread(
dbmigrator.migrate_db, GCS('data_dir'), old_revision, self.get_current_db_revision()
)
self._write_db_revision_file(self.get_current_db_revision())
log.info("Finished upgrading the databases.")
# start SQLiteStorage
self.storage = SQLiteStorage(GCS('data_dir'))
yield self.storage.setup()
@defer.inlineCallbacks
def stop(self):
yield self.storage.stop()
self.storage = None
class WalletComponent(Component):
component_name = WALLET_COMPONENT
depends_on = [DATABASE_COMPONENT]
def __init__(self, component_manager):
Component.__init__(self, component_manager)
self.wallet = None
@property
def component(self):
return self.wallet
@defer.inlineCallbacks
def start(self):
storage = self.component_manager.get_component(DATABASE_COMPONENT)
wallet_type = GCS('wallet')
if wallet_type == conf.LBRYCRD_WALLET:
raise ValueError('LBRYcrd Wallet is no longer supported')
elif wallet_type == conf.LBRYUM_WALLET:
log.info("Using lbryum wallet")
lbryum_servers = {address: {'t': str(port)}
for address, port in GCS('lbryum_servers')}
config = {
'auto_connect': True,
'chain': GCS('blockchain_name'),
'default_servers': lbryum_servers
}
if 'use_keyring' in conf.settings:
config['use_keyring'] = GCS('use_keyring')
if conf.settings['lbryum_wallet_dir']:
config['lbryum_path'] = GCS('lbryum_wallet_dir')
self.wallet = LBRYumWallet(storage, config)
yield self.wallet.start()
else:
raise ValueError('Wallet Type {} is not valid'.format(wallet_type))
@defer.inlineCallbacks
def stop(self):
yield self.wallet.stop()
self.wallet = None
class SessionComponent(Component):
component_name = SESSION_COMPONENT
depends_on = [DATABASE_COMPONENT, WALLET_COMPONENT, DHT_COMPONENT, HASH_ANNOUNCER_COMPONENT]
def __init__(self, component_manager):
Component.__init__(self, component_manager)
self.session = None
@property
def component(self):
return self.session
@defer.inlineCallbacks
def start(self):
self.session = Session(
GCS('data_rate'),
db_dir=GCS('data_dir'),
node_id=CS.get_node_id(),
blob_dir=CS.get_blobfiles_dir(),
dht_node=self.component_manager.get_component(DHT_COMPONENT),
hash_announcer=self.component_manager.get_component(HASH_ANNOUNCER_COMPONENT),
dht_node_port=GCS('dht_node_port'),
known_dht_nodes=GCS('known_dht_nodes'),
peer_port=GCS('peer_port'),
use_upnp=GCS('use_upnp'),
wallet=self.component_manager.get_component(WALLET_COMPONENT),
is_generous=GCS('is_generous_host'),
external_ip=CS.get_external_ip(),
storage=self.component_manager.get_component(DATABASE_COMPONENT)
)
yield self.session.setup()
@defer.inlineCallbacks
def stop(self):
yield self.session.shut_down()
class DHTComponent(Component):
component_name = DHT_COMPONENT
depends_on = [UPNP_COMPONENT]
def __init__(self, component_manager):
Component.__init__(self, component_manager)
self.dht_node = None
self.upnp_component = None
self.udp_port, self.peer_port = None, None
@property
def component(self):
return self.dht_node
@defer.inlineCallbacks
def start(self):
self.upnp_component = self.component_manager.get_component(UPNP_COMPONENT)
self.udp_port, self.peer_port = self.upnp_component.get_redirects()
node_id = CS.get_node_id()
if node_id is None:
node_id = generate_id()
self.dht_node = node.Node(
node_id=node_id,
udpPort=self.udp_port,
externalIP=CS.get_external_ip(),
peerPort=self.peer_port
)
yield self.dht_node.start(GCS('known_dht_nodes'))
log.info("Joined the dht")
@defer.inlineCallbacks
def stop(self):
yield self.dht_node.stop()
class HashAnnouncer(Component):
component_name = HASH_ANNOUNCER_COMPONENT
depends_on = [DHT_COMPONENT, DATABASE_COMPONENT]
def __init__(self, component_manager):
Component.__init__(self, component_manager)
self.hash_announcer = None
@property
def component(self):
return self.hash_announcer
@defer.inlineCallbacks
def start(self):
storage = self.component_manager.get_component(DATABASE_COMPONENT)
dht_node = self.component_manager.get_component(DHT_COMPONENT)
self.hash_announcer = hashannouncer.DHTHashAnnouncer(dht_node, storage)
yield self.hash_announcer.start()
@defer.inlineCallbacks
def stop(self):
yield self.hash_announcer.stop()
class StreamIdentifier(Component):
component_name = STREAM_IDENTIFIER_COMPONENT
depends_on = [SESSION_COMPONENT]
def __init__(self, component_manager):
Component.__init__(self, component_manager)
self.sd_identifier = StreamDescriptorIdentifier()
@property
def component(self):
return self.sd_identifier
@defer.inlineCallbacks
def start(self):
session = self.component_manager.get_component(SESSION_COMPONENT)
add_lbry_file_to_sd_identifier(self.sd_identifier)
file_saver_factory = EncryptedFileSaverFactory(
session.peer_finder,
session.rate_limiter,
session.blob_manager,
session.storage,
session.wallet,
GCS('download_directory')
)
yield self.sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType, file_saver_factory)
def stop(self):
pass
class FileManager(Component):
component_name = FILE_MANAGER_COMPONENT
depends_on = [SESSION_COMPONENT, STREAM_IDENTIFIER_COMPONENT]
def __init__(self, component_manager):
Component.__init__(self, component_manager)
self.file_manager = None
@property
def component(self):
return self.file_manager
@defer.inlineCallbacks
def start(self):
session = self.component_manager.get_component(SESSION_COMPONENT)
sd_identifier = self.component_manager.get_component(STREAM_IDENTIFIER_COMPONENT)
log.info('Starting the file manager')
self.file_manager = EncryptedFileManager(session, sd_identifier)
yield self.file_manager.setup()
log.info('Done setting up file manager')
@defer.inlineCallbacks
def stop(self):
yield self.file_manager.stop()
class PeerProtocolServer(Component):
component_name = PEER_PROTOCOL_SERVER_COMPONENT
depends_on = [SESSION_COMPONENT]
def __init__(self, component_manager):
Component.__init__(self, component_manager)
self.lbry_server_port = None
@property
def component(self):
return self.lbry_server_port
@defer.inlineCallbacks
def start(self):
query_handlers = {}
peer_port = GCS('peer_port')
session = self.component_manager.get_component(SESSION_COMPONENT)
handlers = [
BlobRequestHandlerFactory(
session.blob_manager,
session.wallet,
session.payment_rate_manager,
self.component_manager.analytics_manager
),
session.wallet.get_wallet_info_query_handler_factory(),
]
for handler in handlers:
query_id = handler.get_primary_query_identifier()
query_handlers[query_id] = handler
if peer_port is not None:
server_factory = ServerProtocolFactory(session.rate_limiter, query_handlers, session.peer_manager)
try:
log.info("Peer protocol listening on TCP %d", peer_port)
self.lbry_server_port = yield reactor.listenTCP(peer_port, server_factory)
except error.CannotListenError as e:
import traceback
log.error("Couldn't bind to port %d. Visit lbry.io/faq/how-to-change-port for"
" more details.", peer_port)
log.error("%s", traceback.format_exc())
raise ValueError("%s lbrynet may already be running on your computer." % str(e))
@defer.inlineCallbacks
def stop(self):
if self.lbry_server_port is not None:
self.lbry_server_port, old_port = None, self.lbry_server_port
log.info('Stop listening on port %s', old_port.port)
yield old_port.stopListening()
class ReflectorComponent(Component):
component_name = REFLECTOR_COMPONENT
depends_on = [SESSION_COMPONENT, FILE_MANAGER_COMPONENT]
def __init__(self, component_manager):
Component.__init__(self, component_manager)
self.reflector_server_port = GCS('reflector_port')
self.run_reflector_server = GCS('run_reflector_server')
@property
def component(self):
return self
@defer.inlineCallbacks
def start(self):
session = self.component_manager.get_component(SESSION_COMPONENT)
file_manager = self.component_manager.get_component(FILE_MANAGER_COMPONENT)
if self.run_reflector_server and self.reflector_server_port is not None:
log.info("Starting reflector server")
reflector_factory = reflector_server_factory(session.peer_manager, session.blob_manager, file_manager)
try:
self.reflector_server_port = yield reactor.listenTCP(self.reflector_server_port, reflector_factory)
log.info('Started reflector on port %s', self.reflector_server_port)
except error.CannotListenError as e:
log.exception("Couldn't bind reflector to port %d", self.reflector_server_port)
raise ValueError("{} lbrynet may already be running on your computer.".format(e))
def stop(self):
if self.run_reflector_server and self.reflector_server_port is not None:
log.info("Stopping reflector server")
if self.reflector_server_port is not None:
self.reflector_server_port, p = None, self.reflector_server_port
yield p.stopListening
class UPnPComponent(Component):
component_name = UPNP_COMPONENT
def __init__(self, component_manager):
Component.__init__(self, component_manager)
self.peer_port = GCS('peer_port')
self.dht_node_port = GCS('dht_node_port')
self.use_upnp = GCS('use_upnp')
self.external_ip = CS.get_external_ip()
self.upnp_redirects = []
@property
def component(self):
return self
def get_redirects(self):
return self.peer_port, self.dht_node_port
def start(self):
log.debug("In _try_upnp")
def get_free_port(upnp, port, protocol):
# returns an existing mapping if it exists
mapping = upnp.getspecificportmapping(port, protocol)
if not mapping:
return port
if upnp.lanaddr == mapping[0]:
return mapping[1]
return get_free_port(upnp, port + 1, protocol)
def get_port_mapping(upnp, port, protocol, description):
# try to map to the requested port, if there is already a mapping use the next external
# port available
if protocol not in ['UDP', 'TCP']:
raise Exception("invalid protocol")
port = get_free_port(upnp, port, protocol)
if isinstance(port, tuple):
log.info("Found existing UPnP redirect %s:%i (%s) to %s:%i, using it",
self.external_ip, port, protocol, upnp.lanaddr, port)
return port
upnp.addportmapping(port, protocol, upnp.lanaddr, port,
description, '')
log.info("Set UPnP redirect %s:%i (%s) to %s:%i", self.external_ip, port,
protocol, upnp.lanaddr, port)
return port
def threaded_try_upnp():
if self.use_upnp is False:
log.debug("Not using upnp")
return False
u = miniupnpc.UPnP()
num_devices_found = u.discover()
if num_devices_found > 0:
u.selectigd()
external_ip = u.externalipaddress()
if external_ip != '0.0.0.0' and not self.external_ip:
# best not to rely on this external ip, the router can be behind layers of NATs
self.external_ip = external_ip
if self.peer_port:
self.peer_port = get_port_mapping(u, self.peer_port, 'TCP', 'LBRY peer port')
self.upnp_redirects.append((self.peer_port, 'TCP'))
if self.dht_node_port:
self.dht_node_port = get_port_mapping(u, self.dht_node_port, 'UDP', 'LBRY DHT port')
self.upnp_redirects.append((self.dht_node_port, 'UDP'))
return True
return False
def upnp_failed(err):
log.warning("UPnP failed. Reason: %s", err.getErrorMessage())
return False
d = threads.deferToThread(threaded_try_upnp)
d.addErrback(upnp_failed)
return d
def stop(self):
log.info("Unsetting upnp for session")
def threaded_unset_upnp():
u = miniupnpc.UPnP()
num_devices_found = u.discover()
if num_devices_found > 0:
u.selectigd()
for port, protocol in self.upnp_redirects:
if u.getspecificportmapping(port, protocol) is None:
log.warning(
"UPnP redirect for %s %d was removed by something else.",
protocol, port)
else:
u.deleteportmapping(port, protocol)
log.info("Removed UPnP redirect for %s %d.", protocol, port)
self.upnp_redirects = []
d = threads.deferToThread(threaded_unset_upnp)
d.addErrback(lambda err: str(err))
return d

File diff suppressed because it is too large Load diff

View file

@ -7,7 +7,7 @@ from collections import OrderedDict
from lbrynet import conf from lbrynet import conf
from lbrynet.core import utils from lbrynet.core import utils
from lbrynet.daemon.auth.client import JSONRPCException, LBRYAPIClient, AuthAPIClient from lbrynet.daemon.auth.client import JSONRPCException, LBRYAPIClient, AuthAPIClient
from lbrynet.daemon.Daemon import LOADING_WALLET_CODE, Daemon from lbrynet.daemon.Daemon import Daemon
from lbrynet.core.system_info import get_platform from lbrynet.core.system_info import get_platform
from jsonrpc.common import RPCError from jsonrpc.common import RPCError
from requests.exceptions import ConnectionError from requests.exceptions import ConnectionError
@ -21,17 +21,13 @@ def remove_brackets(key):
return key return key
def set_flag_vals(flag_names, parsed_args): def set_kwargs(parsed_args):
kwargs = OrderedDict() kwargs = OrderedDict()
for key, arg in parsed_args.iteritems(): for key, arg in parsed_args.iteritems():
if arg is None: if arg is None:
continue continue
elif key.startswith("--"): elif key.startswith("--") and remove_brackets(key[2:]) not in kwargs:
if remove_brackets(key[2:]) not in kwargs: k = remove_brackets(key[2:])
k = remove_brackets(key[2:])
elif key in flag_names:
if remove_brackets(flag_names[key]) not in kwargs:
k = remove_brackets(flag_names[key])
elif remove_brackets(key) not in kwargs: elif remove_brackets(key) not in kwargs:
k = remove_brackets(key) k = remove_brackets(key)
kwargs[k] = guess_type(arg, k) kwargs[k] = guess_type(arg, k)
@ -79,26 +75,22 @@ def main():
method = new_method method = new_method
fn = Daemon.callable_methods[method] fn = Daemon.callable_methods[method]
if hasattr(fn, "_flags"):
flag_names = fn._flags
else:
flag_names = {}
parsed = docopt(fn.__doc__, args) parsed = docopt(fn.__doc__, args)
kwargs = set_flag_vals(flag_names, parsed) kwargs = set_kwargs(parsed)
colorama.init() colorama.init()
conf.initialize_settings() conf.initialize_settings()
try: try:
api = LBRYAPIClient.get_client() api = LBRYAPIClient.get_client()
status = api.status() api.status()
except (URLError, ConnectionError) as err: except (URLError, ConnectionError) as err:
if isinstance(err, HTTPError) and err.code == UNAUTHORIZED: if isinstance(err, HTTPError) and err.code == UNAUTHORIZED:
api = AuthAPIClient.config() api = AuthAPIClient.config()
# this can happen if the daemon is using auth with the --http-auth flag # this can happen if the daemon is using auth with the --http-auth flag
# when the config setting is to not use it # when the config setting is to not use it
try: try:
status = api.status() api.status()
except: except:
print_error("Daemon requires authentication, but none was provided.", print_error("Daemon requires authentication, but none was provided.",
suggest_help=False) suggest_help=False)
@ -108,20 +100,6 @@ def main():
suggest_help=False) suggest_help=False)
return 1 return 1
status_code = status['startup_status']['code']
if status_code != "started" and method not in Daemon.allowed_during_startup:
print "Daemon is in the process of starting. Please try again in a bit."
message = status['startup_status']['message']
if message:
if (
status['startup_status']['code'] == LOADING_WALLET_CODE
and status['blockchain_status']['blocks_behind'] > 0
):
message += '. Blocks left: ' + str(status['blockchain_status']['blocks_behind'])
print " Status: " + message
return 1
# TODO: check if port is bound. Error if its not # TODO: check if port is bound. Error if its not
try: try:

View file

@ -1,3 +1,3 @@
import Components # register Component classes
from lbrynet.daemon.auth.client import LBRYAPIClient from lbrynet.daemon.auth.client import LBRYAPIClient
get_client = LBRYAPIClient.get_client get_client = LBRYAPIClient.get_client

View file

@ -4,6 +4,7 @@ import json
import inspect import inspect
from decimal import Decimal from decimal import Decimal
from functools import wraps
from zope.interface import implements from zope.interface import implements
from twisted.web import server, resource from twisted.web import server, resource
from twisted.internet import defer from twisted.internet import defer
@ -15,6 +16,7 @@ from traceback import format_exc
from lbrynet import conf from lbrynet import conf
from lbrynet.core.Error import InvalidAuthenticationToken from lbrynet.core.Error import InvalidAuthenticationToken
from lbrynet.core import utils from lbrynet.core import utils
from lbrynet.core.Error import ComponentsNotStarted, ComponentStartConditionNotMet
from lbrynet.daemon.auth.util import APIKey, get_auth_message from lbrynet.daemon.auth.util import APIKey, get_auth_message
from lbrynet.daemon.auth.client import LBRY_SECRET from lbrynet.daemon.auth.client import LBRY_SECRET
from lbrynet.undecorated import undecorated from lbrynet.undecorated import undecorated
@ -141,6 +143,27 @@ class AuthorizedBase(object):
return f return f
return _deprecated_wrapper return _deprecated_wrapper
@staticmethod
def requires(*components, **component_conditionals):
def _wrap(fn):
@defer.inlineCallbacks
@wraps(fn)
def _inner(*args, **kwargs):
if component_conditionals:
for component_name, condition in component_conditionals.iteritems():
if not callable(condition):
raise SyntaxError("The specified condition is invalid/not callable")
if not (yield condition(args[0].component_manager.get_component(component_name))):
raise ComponentStartConditionNotMet(
"Not all conditions required to start component are met")
if args[0].component_manager.all_components_running(*components):
result = yield fn(*args, **kwargs)
defer.returnValue(result)
else:
raise ComponentsNotStarted("Not all required components are set up:", components)
return _inner
return _wrap
class AuthJSONRPCServer(AuthorizedBase): class AuthJSONRPCServer(AuthorizedBase):
""" """
@ -149,7 +172,6 @@ class AuthJSONRPCServer(AuthorizedBase):
API methods are named with a leading "jsonrpc_" API methods are named with a leading "jsonrpc_"
Attributes: Attributes:
allowed_during_startup (list): list of api methods that are callable before the server has finished startup
sessions (dict): (dict): {<session id>: <lbrynet.daemon.auth.util.APIKey>} sessions (dict): (dict): {<session id>: <lbrynet.daemon.auth.util.APIKey>}
callable_methods (dict): {<api method name>: <api method>} callable_methods (dict): {<api method name>: <api method>}
@ -416,9 +438,6 @@ class AuthJSONRPCServer(AuthorizedBase):
def _verify_method_is_callable(self, function_path): def _verify_method_is_callable(self, function_path):
if function_path not in self.callable_methods: if function_path not in self.callable_methods:
raise UnknownAPIMethodError(function_path) raise UnknownAPIMethodError(function_path)
if not self.announced_startup:
if function_path not in self.allowed_during_startup:
raise NotAllowedDuringStartupError(function_path)
def _get_jsonrpc_method(self, function_path): def _get_jsonrpc_method(self, function_path):
if function_path in self.deprecated_methods: if function_path in self.deprecated_methods:

View file

@ -39,6 +39,7 @@ DummyBlobAvailabilityTracker = mocks.BlobAvailabilityTracker
log_format = "%(funcName)s(): %(message)s" log_format = "%(funcName)s(): %(message)s"
logging.basicConfig(level=logging.CRITICAL, format=log_format) logging.basicConfig(level=logging.CRITICAL, format=log_format)
TEST_SKIP_STRING_ANDROID = "Test cannot pass on Android because multiprocessing is not supported at the OS level."
def require_system(system): def require_system(system):
def wrapper(fn): def wrapper(fn):
@ -103,13 +104,15 @@ class LbryUploader(object):
rate_limiter = RateLimiter() rate_limiter = RateLimiter()
self.sd_identifier = StreamDescriptorIdentifier() self.sd_identifier = StreamDescriptorIdentifier()
self.db_dir, self.blob_dir = mk_db_and_blob_dir() self.db_dir, self.blob_dir = mk_db_and_blob_dir()
dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553,
node_id="abcd", externalIP="127.0.0.1")
self.session = Session( self.session = Session(
conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, blob_dir=self.blob_dir, conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, blob_dir=self.blob_dir,
node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer,
peer_port=5553, dht_node_port=4445, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, peer_port=5553, dht_node_port=4445, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, blob_tracker_class=DummyBlobAvailabilityTracker,
dht_node_class=FakeNode, is_generous=self.is_generous, external_ip="127.0.0.1") dht_node=dht_node, is_generous=self.is_generous, external_ip="127.0.0.1")
self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier) self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier)
if self.ul_rate_limit is not None: if self.ul_rate_limit is not None:
self.session.rate_limiter.set_ul_limit(self.ul_rate_limit) self.session.rate_limiter.set_ul_limit(self.ul_rate_limit)
@ -197,7 +200,7 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event,
db_dir, blob_dir = mk_db_and_blob_dir() db_dir, blob_dir = mk_db_and_blob_dir()
session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir,
node_id="abcd" + str(n), dht_node_port=4446, dht_node_class=FakeNode, node_id="abcd" + str(n), dht_node_port=4446,
peer_finder=peer_finder, hash_announcer=hash_announcer, peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=peer_port, blob_dir=blob_dir, peer_port=peer_port,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
@ -303,13 +306,16 @@ def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_genero
db_dir, blob_dir = mk_db_and_blob_dir() db_dir, blob_dir = mk_db_and_blob_dir()
dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553,
node_id="abcd", externalIP="127.0.0.1")
session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="efgh", session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="efgh",
peer_finder=peer_finder, hash_announcer=hash_announcer, dht_node_class=FakeNode, peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=peer_port, dht_node_port=4446, blob_dir=blob_dir, peer_port=peer_port, dht_node_port=4446,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, blob_tracker_class=DummyBlobAvailabilityTracker,
is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1],
external_ip="127.0.0.1") external_ip="127.0.0.1", dht_node=dht_node)
if slow is True: if slow is True:
session.rate_limiter.set_ul_limit(2 ** 11) session.rate_limiter.set_ul_limit(2 ** 11)
@ -478,6 +484,8 @@ class TestTransfer(TestCase):
hash_announcer = FakeAnnouncer() hash_announcer = FakeAnnouncer()
rate_limiter = DummyRateLimiter() rate_limiter = DummyRateLimiter()
sd_identifier = StreamDescriptorIdentifier() sd_identifier = StreamDescriptorIdentifier()
dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553,
node_id="abcd", externalIP="127.0.0.1")
db_dir, blob_dir = mk_db_and_blob_dir() db_dir, blob_dir = mk_db_and_blob_dir()
self.session = Session( self.session = Session(
@ -486,7 +494,7 @@ class TestTransfer(TestCase):
blob_dir=blob_dir, peer_port=5553, dht_node_port=4445, blob_dir=blob_dir, peer_port=5553, dht_node_port=4445,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, blob_tracker_class=DummyBlobAvailabilityTracker,
dht_node_class=FakeNode, is_generous=self.is_generous, external_ip="127.0.0.1") dht_node=dht_node, is_generous=self.is_generous, external_ip="127.0.0.1")
self.lbry_file_manager = EncryptedFileManager( self.lbry_file_manager = EncryptedFileManager(
self.session, sd_identifier) self.session, sd_identifier)
@ -566,14 +574,16 @@ class TestTransfer(TestCase):
peer_finder = FakePeerFinder(5553, peer_manager, 2) peer_finder = FakePeerFinder(5553, peer_manager, 2)
hash_announcer = FakeAnnouncer() hash_announcer = FakeAnnouncer()
rate_limiter = DummyRateLimiter() rate_limiter = DummyRateLimiter()
dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553,
node_id="abcd", externalIP="127.0.0.1")
db_dir, blob_dir = mk_db_and_blob_dir() db_dir, blob_dir = mk_db_and_blob_dir()
self.session = Session( self.session = Session(
conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="abcd", conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer, peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553, dht_node_port=4445, dht_node_class=FakeNode, blob_dir=blob_dir, peer_port=5553, dht_node_port=4445,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, blob_tracker_class=DummyBlobAvailabilityTracker, dht_node=dht_node,
is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], external_ip="127.0.0.1") is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], external_ip="127.0.0.1")
d1 = self.wait_for_hash_from_queue(blob_hash_queue_1) d1 = self.wait_for_hash_from_queue(blob_hash_queue_1)
@ -646,17 +656,19 @@ class TestTransfer(TestCase):
hash_announcer = FakeAnnouncer() hash_announcer = FakeAnnouncer()
rate_limiter = DummyRateLimiter() rate_limiter = DummyRateLimiter()
sd_identifier = StreamDescriptorIdentifier() sd_identifier = StreamDescriptorIdentifier()
dht_node = FakeNode(peer_finder=peer_finder, peer_manager=peer_manager, udpPort=4445, peerPort=5553,
node_id="abcd", externalIP="127.0.0.1")
downloaders = [] downloaders = []
db_dir, blob_dir = mk_db_and_blob_dir() db_dir, blob_dir = mk_db_and_blob_dir()
self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir,
node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, dht_node_class=FakeNode, node_id="abcd", peer_finder=peer_finder, dht_node_port=4445,
hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, blob_tracker_class=DummyBlobAvailabilityTracker,
is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1],
external_ip="127.0.0.1") external_ip="127.0.0.1", dht_node=dht_node)
self.lbry_file_manager = EncryptedFileManager(self.session, sd_identifier) self.lbry_file_manager = EncryptedFileManager(self.session, sd_identifier)
@ -758,7 +770,7 @@ class TestTransfer(TestCase):
sd_identifier = StreamDescriptorIdentifier() sd_identifier = StreamDescriptorIdentifier()
db_dir, blob_dir = mk_db_and_blob_dir() db_dir, blob_dir = mk_db_and_blob_dir()
self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, dht_node_class=FakeNode, self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir,
node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, node_id="abcd", peer_finder=peer_finder, dht_node_port=4445,
hash_announcer=hash_announcer, blob_dir=blob_dir, hash_announcer=hash_announcer, blob_dir=blob_dir,
peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter,
@ -842,3 +854,10 @@ class TestTransfer(TestCase):
d.addBoth(stop) d.addBoth(stop)
return d return d
if is_android():
test_lbry_transfer.skip = TEST_SKIP_STRING_ANDROID
test_last_blob_retrieval.skip = TEST_SKIP_STRING_ANDROID
test_double_download.skip = TEST_SKIP_STRING_ANDROID
test_multiple_uploaders.skip = TEST_SKIP_STRING_ANDROID

View file

@ -53,6 +53,7 @@ class TestReflector(unittest.TestCase):
db_dir=self.db_dir, db_dir=self.db_dir,
node_id="abcd", node_id="abcd",
peer_finder=peer_finder, peer_finder=peer_finder,
peer_manager=peer_manager,
blob_dir=self.blob_dir, blob_dir=self.blob_dir,
peer_port=5553, peer_port=5553,
dht_node_port=4444, dht_node_port=4444,
@ -60,8 +61,8 @@ class TestReflector(unittest.TestCase):
wallet=wallet, wallet=wallet,
blob_tracker_class=mocks.BlobAvailabilityTracker, blob_tracker_class=mocks.BlobAvailabilityTracker,
external_ip="127.0.0.1", external_ip="127.0.0.1",
dht_node_class=mocks.Node, dht_node=mocks.Node,
hash_announcer=mocks.Announcer() hash_announcer=mocks.Announcer(),
) )
self.lbry_file_manager = EncryptedFileManager.EncryptedFileManager(self.session, self.lbry_file_manager = EncryptedFileManager.EncryptedFileManager(self.session,
@ -74,6 +75,7 @@ class TestReflector(unittest.TestCase):
db_dir=self.server_db_dir, db_dir=self.server_db_dir,
node_id="abcd", node_id="abcd",
peer_finder=peer_finder, peer_finder=peer_finder,
peer_manager=peer_manager,
blob_dir=self.server_blob_dir, blob_dir=self.server_blob_dir,
peer_port=5554, peer_port=5554,
dht_node_port=4443, dht_node_port=4443,
@ -81,8 +83,8 @@ class TestReflector(unittest.TestCase):
wallet=wallet, wallet=wallet,
blob_tracker_class=mocks.BlobAvailabilityTracker, blob_tracker_class=mocks.BlobAvailabilityTracker,
external_ip="127.0.0.1", external_ip="127.0.0.1",
dht_node_class=mocks.Node, dht_node=mocks.Node,
hash_announcer=mocks.Announcer() hash_announcer=mocks.Announcer(),
) )
self.server_blob_manager = BlobManager.DiskBlobManager(self.server_blob_dir, self.server_blob_manager = BlobManager.DiskBlobManager(self.server_blob_dir,

View file

@ -30,6 +30,7 @@ DummyBlobAvailabilityTracker = mocks.BlobAvailabilityTracker
class TestStreamify(TestCase): class TestStreamify(TestCase):
maxDiff = 5000 maxDiff = 5000
def setUp(self): def setUp(self):
mocks.mock_conf_settings(self) mocks.mock_conf_settings(self)
self.session = None self.session = None
@ -37,6 +38,12 @@ class TestStreamify(TestCase):
self.is_generous = True self.is_generous = True
self.db_dir = tempfile.mkdtemp() self.db_dir = tempfile.mkdtemp()
self.blob_dir = os.path.join(self.db_dir, "blobfiles") self.blob_dir = os.path.join(self.db_dir, "blobfiles")
self.dht_node = FakeNode()
self.wallet = FakeWallet()
self.peer_manager = PeerManager()
self.peer_finder = FakePeerFinder(5553, self.peer_manager, 2)
self.rate_limiter = DummyRateLimiter()
self.sd_identifier = StreamDescriptorIdentifier()
os.mkdir(self.blob_dir) os.mkdir(self.blob_dir)
@defer.inlineCallbacks @defer.inlineCallbacks
@ -54,26 +61,17 @@ class TestStreamify(TestCase):
os.remove("test_file") os.remove("test_file")
def test_create_stream(self): def test_create_stream(self):
wallet = FakeWallet()
peer_manager = PeerManager()
peer_finder = FakePeerFinder(5553, peer_manager, 2)
hash_announcer = FakeAnnouncer()
rate_limiter = DummyRateLimiter()
sd_identifier = StreamDescriptorIdentifier()
self.session = Session( self.session = Session(
conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, node_id="abcd", conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, node_id="abcd", peer_finder=self.peer_finder,
peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=self.blob_dir, peer_port=5553, use_upnp=False, rate_limiter=self.rate_limiter, wallet=self.wallet,
blob_dir=self.blob_dir, peer_port=5553, blob_tracker_class=DummyBlobAvailabilityTracker, external_ip="127.0.0.1", dht_node=self.dht_node
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker,
is_generous=self.is_generous, external_ip="127.0.0.1", dht_node_class=mocks.Node
) )
self.lbry_file_manager = EncryptedFileManager(self.session, sd_identifier) self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier)
d = self.session.setup() d = self.session.setup()
d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier))
d.addCallback(lambda _: self.lbry_file_manager.setup()) d.addCallback(lambda _: self.lbry_file_manager.setup())
def verify_equal(sd_info): def verify_equal(sd_info):
@ -102,22 +100,14 @@ class TestStreamify(TestCase):
return d return d
def test_create_and_combine_stream(self): def test_create_and_combine_stream(self):
wallet = FakeWallet()
peer_manager = PeerManager()
peer_finder = FakePeerFinder(5553, peer_manager, 2)
hash_announcer = FakeAnnouncer()
rate_limiter = DummyRateLimiter()
sd_identifier = StreamDescriptorIdentifier()
self.session = Session( self.session = Session(
conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, node_id="abcd", conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, node_id="abcd", peer_finder=self.peer_finder,
peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=self.blob_dir, peer_port=5553, use_upnp=False, rate_limiter=self.rate_limiter, wallet=self.wallet,
blob_dir=self.blob_dir, peer_port=5553, dht_node_class=mocks.Node, blob_tracker_class=DummyBlobAvailabilityTracker, external_ip="127.0.0.1", dht_node=self.dht_node
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, external_ip="127.0.0.1"
) )
self.lbry_file_manager = EncryptedFileManager(self.session, sd_identifier) self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier)
@defer.inlineCallbacks @defer.inlineCallbacks
def create_stream(): def create_stream():
@ -132,7 +122,7 @@ class TestStreamify(TestCase):
self.assertEqual(hashsum.hexdigest(), "68959747edc73df45e45db6379dd7b3b") self.assertEqual(hashsum.hexdigest(), "68959747edc73df45e45db6379dd7b3b")
d = self.session.setup() d = self.session.setup()
d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier))
d.addCallback(lambda _: self.lbry_file_manager.setup()) d.addCallback(lambda _: self.lbry_file_manager.setup())
d.addCallback(lambda _: create_stream()) d.addCallback(lambda _: create_stream())
return d return d

View file

@ -1,5 +1,6 @@
import base64 import base64
import io import io
import mock
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.hazmat.primitives.asymmetric import rsa
@ -10,6 +11,7 @@ from twisted.python.failure import Failure
from lbrynet.core.client.ClientRequest import ClientRequest from lbrynet.core.client.ClientRequest import ClientRequest
from lbrynet.core.Error import RequestCanceledError from lbrynet.core.Error import RequestCanceledError
from lbrynet.core import BlobAvailability from lbrynet.core import BlobAvailability
from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager
from lbrynet.dht.node import Node as RealNode from lbrynet.dht.node import Node as RealNode
from lbrynet.daemon import ExchangeRateManager as ERM from lbrynet.daemon import ExchangeRateManager as ERM
from lbrynet import conf from lbrynet import conf
@ -63,6 +65,7 @@ class BTCLBCFeed(ERM.MarketFeed):
0.0 0.0
) )
class USDBTCFeed(ERM.MarketFeed): class USDBTCFeed(ERM.MarketFeed):
def __init__(self): def __init__(self):
ERM.MarketFeed.__init__( ERM.MarketFeed.__init__(
@ -74,6 +77,7 @@ class USDBTCFeed(ERM.MarketFeed):
0.0 0.0
) )
class ExchangeRateManager(ERM.ExchangeRateManager): class ExchangeRateManager(ERM.ExchangeRateManager):
def __init__(self, market_feeds, rates): def __init__(self, market_feeds, rates):
self.market_feeds = market_feeds self.market_feeds = market_feeds
@ -360,6 +364,96 @@ class BlobAvailabilityTracker(BlobAvailability.BlobAvailabilityTracker):
pass pass
# The components below viz. FakeWallet, FakeSession, FakeFileManager are just for testing Component Manager's
# startup and stop
class FakeComponent(object):
depends_on = []
component_name = None
def __init__(self, component_manager):
self.component_manager = component_manager
self._running = False
@property
def running(self):
return self._running
def start(self):
raise NotImplementedError # Override
def stop(self):
return defer.succeed(None)
@property
def component(self):
return self
@defer.inlineCallbacks
def _setup(self):
result = yield defer.maybeDeferred(self.start)
self._running = True
defer.returnValue(result)
@defer.inlineCallbacks
def _stop(self):
result = yield defer.maybeDeferred(self.stop)
self._running = False
defer.returnValue(result)
class FakeDelayedWallet(FakeComponent):
component_name = "wallet"
depends_on = []
def start(self):
return defer.succeed(True)
def stop(self):
d = defer.Deferred()
self.component_manager.reactor.callLater(1, d.callback, True)
return d
class FakeDelayedSession(FakeComponent):
component_name = "session"
depends_on = [FakeDelayedWallet.component_name]
def start(self):
d = defer.Deferred()
self.component_manager.reactor.callLater(1, d.callback, True)
return d
def stop(self):
d = defer.Deferred()
self.component_manager.reactor.callLater(1, d.callback, True)
return d
class FakeDelayedFileManager(FakeComponent):
component_name = "file_manager"
depends_on = [FakeDelayedSession.component_name]
def start(self):
d = defer.Deferred()
self.component_manager.reactor.callLater(1, d.callback, True)
return d
def stop(self):
return defer.succeed(True)
class FakeFileManager(FakeComponent):
component_name = "fileManager"
depends_on = []
@property
def component(self):
return mock.Mock(spec=EncryptedFileManager)
def start(self):
return defer.succeed(True)
def stop(self):
pass
create_stream_sd_file = { create_stream_sd_file = {
'stream_name': '746573745f66696c65', 'stream_name': '746573745f66696c65',

View file

@ -0,0 +1,131 @@
from twisted.internet.task import Clock
from twisted.trial import unittest
from lbrynet.daemon.ComponentManager import ComponentManager
from lbrynet.daemon.Components import DATABASE_COMPONENT, DHT_COMPONENT, STREAM_IDENTIFIER_COMPONENT
from lbrynet.daemon.Components import HASH_ANNOUNCER_COMPONENT, REFLECTOR_COMPONENT, UPNP_COMPONENT
from lbrynet.daemon.Components import PEER_PROTOCOL_SERVER_COMPONENT
from lbrynet.daemon import Components
from lbrynet.tests import mocks
class TestComponentManager(unittest.TestCase):
def setUp(self):
mocks.mock_conf_settings(self)
self.default_components_sort = [
[Components.DatabaseComponent,
Components.UPnPComponent],
[Components.DHTComponent,
Components.WalletComponent],
[Components.HashAnnouncer],
[Components.SessionComponent],
[Components.PeerProtocolServer,
Components.StreamIdentifier],
[Components.FileManager],
[Components.ReflectorComponent]
]
self.component_manager = ComponentManager()
def tearDown(self):
pass
def test_sort_components(self):
stages = self.component_manager.sort_components()
for stage_list, sorted_stage_list in zip(stages, self.default_components_sort):
self.assertSetEqual(set([type(stage) for stage in stage_list]), set(sorted_stage_list))
def test_sort_components_reverse(self):
rev_stages = self.component_manager.sort_components(reverse=True)
reverse_default_components_sort = reversed(self.default_components_sort)
for stage_list, sorted_stage_list in zip(rev_stages, reverse_default_components_sort):
self.assertSetEqual(set([type(stage) for stage in stage_list]), set(sorted_stage_list))
def test_get_component_not_exists(self):
with self.assertRaises(NameError):
self.component_manager.get_component("random_component")
class TestComponentManagerOverrides(unittest.TestCase):
def setUp(self):
mocks.mock_conf_settings(self)
def test_init_with_overrides(self):
class FakeWallet(object):
component_name = "wallet"
depends_on = []
def __init__(self, component_manager):
self.component_manager = component_manager
@property
def component(self):
return self
new_component_manager = ComponentManager(wallet=FakeWallet)
fake_wallet = new_component_manager.get_component("wallet")
# wallet should be an instance of FakeWallet and not WalletComponent from Components.py
self.assertEquals(type(fake_wallet), FakeWallet)
self.assertNotEquals(type(fake_wallet), Components.WalletComponent)
def test_init_with_wrong_overrides(self):
class FakeRandomComponent(object):
component_name = "someComponent"
depends_on = []
with self.assertRaises(SyntaxError):
ComponentManager(randomComponent=FakeRandomComponent)
class TestComponentManagerProperStart(unittest.TestCase):
def setUp(self):
self.reactor = Clock()
mocks.mock_conf_settings(self)
self.component_manager = ComponentManager(
skip_components=[DATABASE_COMPONENT, DHT_COMPONENT, HASH_ANNOUNCER_COMPONENT, STREAM_IDENTIFIER_COMPONENT,
PEER_PROTOCOL_SERVER_COMPONENT, REFLECTOR_COMPONENT, UPNP_COMPONENT],
reactor=self.reactor,
wallet=mocks.FakeDelayedWallet,
session=mocks.FakeDelayedSession,
fileManager=mocks.FakeDelayedFileManager
)
def tearDown(self):
pass
def test_proper_starting_of_components(self):
self.component_manager.setup()
self.assertTrue(self.component_manager.get_component('wallet').running)
self.assertFalse(self.component_manager.get_component('session').running)
self.assertFalse(self.component_manager.get_component('file_manager').running)
self.reactor.advance(1)
self.assertTrue(self.component_manager.get_component('wallet').running)
self.assertTrue(self.component_manager.get_component('session').running)
self.assertFalse(self.component_manager.get_component('file_manager').running)
self.reactor.advance(1)
self.assertTrue(self.component_manager.get_component('wallet').running)
self.assertTrue(self.component_manager.get_component('session').running)
self.assertTrue(self.component_manager.get_component('file_manager').running)
def test_proper_stopping_of_components(self):
self.component_manager.setup()
self.reactor.advance(1)
self.reactor.advance(1)
self.component_manager.stop()
self.assertFalse(self.component_manager.get_component('file_manager').running)
self.assertTrue(self.component_manager.get_component('session').running)
self.assertTrue(self.component_manager.get_component('wallet').running)
self.reactor.advance(1)
self.assertFalse(self.component_manager.get_component('file_manager').running)
self.assertFalse(self.component_manager.get_component('session').running)
self.assertTrue(self.component_manager.get_component('wallet').running)
self.reactor.advance(1)
self.assertFalse(self.component_manager.get_component('file_manager').running)
self.assertFalse(self.component_manager.get_component('session').running)
self.assertFalse(self.component_manager.get_component('wallet').running)

View file

@ -1,11 +1,10 @@
import mock import mock
import json import json
import unittest
import random import random
from os import path from os import path
from twisted.internet import defer from twisted.internet import defer
from twisted import trial from twisted.trial import unittest
from faker import Faker from faker import Faker
@ -14,12 +13,15 @@ from lbryum.wallet import NewWallet
from lbrynet import conf from lbrynet import conf
from lbrynet.core import Session, PaymentRateManager, Wallet from lbrynet.core import Session, PaymentRateManager, Wallet
from lbrynet.database.storage import SQLiteStorage from lbrynet.database.storage import SQLiteStorage
from lbrynet.daemon.ComponentManager import ComponentManager
from lbrynet.daemon.Components import DATABASE_COMPONENT, DHT_COMPONENT, WALLET_COMPONENT, STREAM_IDENTIFIER_COMPONENT
from lbrynet.daemon.Components import HASH_ANNOUNCER_COMPONENT, REFLECTOR_COMPONENT, UPNP_COMPONENT, SESSION_COMPONENT
from lbrynet.daemon.Components import PEER_PROTOCOL_SERVER_COMPONENT
from lbrynet.daemon.Daemon import Daemon as LBRYDaemon from lbrynet.daemon.Daemon import Daemon as LBRYDaemon
from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader
from lbrynet.tests import util from lbrynet.tests import util
from lbrynet.tests.mocks import mock_conf_settings, FakeNetwork from lbrynet.tests.mocks import mock_conf_settings, FakeNetwork, FakeFileManager
from lbrynet.tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker from lbrynet.tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker
from lbrynet.tests.mocks import ExchangeRateManager as DummyExchangeRateManager from lbrynet.tests.mocks import ExchangeRateManager as DummyExchangeRateManager
from lbrynet.tests.mocks import BTCLBCFeed, USDBTCFeed from lbrynet.tests.mocks import BTCLBCFeed, USDBTCFeed
@ -40,10 +42,10 @@ def get_test_daemon(data_rate=None, generous=True, with_fee=False):
} }
daemon = LBRYDaemon(None) daemon = LBRYDaemon(None)
daemon.session = mock.Mock(spec=Session.Session) daemon.session = mock.Mock(spec=Session.Session)
daemon.session.wallet = mock.Mock(spec=Wallet.LBRYumWallet) daemon.wallet = mock.Mock(spec=Wallet.LBRYumWallet)
daemon.session.wallet.wallet = mock.Mock(spec=NewWallet) daemon.wallet.wallet = mock.Mock(spec=NewWallet)
daemon.session.wallet.wallet.use_encryption = False daemon.wallet.wallet.use_encryption = False
daemon.session.wallet.network = FakeNetwork() daemon.wallet.network = FakeNetwork()
daemon.session.storage = mock.Mock(spec=SQLiteStorage) daemon.session.storage = mock.Mock(spec=SQLiteStorage)
market_feeds = [BTCLBCFeed(), USDBTCFeed()] market_feeds = [BTCLBCFeed(), USDBTCFeed()]
daemon.exchange_rate_manager = DummyExchangeRateManager(market_feeds, rates) daemon.exchange_rate_manager = DummyExchangeRateManager(market_feeds, rates)
@ -73,12 +75,12 @@ def get_test_daemon(data_rate=None, generous=True, with_fee=False):
{"fee": {"USD": {"address": "bQ6BGboPV2SpTMEP7wLNiAcnsZiH8ye6eA", "amount": 0.75}}}) {"fee": {"USD": {"address": "bQ6BGboPV2SpTMEP7wLNiAcnsZiH8ye6eA", "amount": 0.75}}})
daemon._resolve_name = lambda _: defer.succeed(metadata) daemon._resolve_name = lambda _: defer.succeed(metadata)
migrated = smart_decode(json.dumps(metadata)) migrated = smart_decode(json.dumps(metadata))
daemon.session.wallet.resolve = lambda *_: defer.succeed( daemon.wallet.resolve = lambda *_: defer.succeed(
{"test": {'claim': {'value': migrated.claim_dict}}}) {"test": {'claim': {'value': migrated.claim_dict}}})
return daemon return daemon
class TestCostEst(trial.unittest.TestCase): class TestCostEst(unittest.TestCase):
def setUp(self): def setUp(self):
mock_conf_settings(self) mock_conf_settings(self)
util.resetTime(self) util.resetTime(self)
@ -111,7 +113,8 @@ class TestCostEst(trial.unittest.TestCase):
self.assertEquals(daemon.get_est_cost("test", size).result, correct_result) self.assertEquals(daemon.get_est_cost("test", size).result, correct_result)
class TestJsonRpc(trial.unittest.TestCase): class TestJsonRpc(unittest.TestCase):
def setUp(self): def setUp(self):
def noop(): def noop():
return None return None
@ -119,30 +122,39 @@ class TestJsonRpc(trial.unittest.TestCase):
mock_conf_settings(self) mock_conf_settings(self)
util.resetTime(self) util.resetTime(self)
self.test_daemon = get_test_daemon() self.test_daemon = get_test_daemon()
self.test_daemon.session.wallet.is_first_run = False self.test_daemon.wallet.is_first_run = False
self.test_daemon.session.wallet.get_best_blockhash = noop self.test_daemon.wallet.get_best_blockhash = noop
def test_status(self): def test_status(self):
d = defer.maybeDeferred(self.test_daemon.jsonrpc_status) d = defer.maybeDeferred(self.test_daemon.jsonrpc_status)
d.addCallback(lambda status: self.assertDictContainsSubset({'is_running': False}, status)) d.addCallback(lambda status: self.assertDictContainsSubset({'is_running': False}, status))
@unittest.skipIf(is_android(),
'Test cannot pass on Android because PYTHONOPTIMIZE removes the docstrings.')
def test_help(self): def test_help(self):
d = defer.maybeDeferred(self.test_daemon.jsonrpc_help, command='status') d = defer.maybeDeferred(self.test_daemon.jsonrpc_help, command='status')
d.addCallback(lambda result: self.assertSubstring('daemon status', result['help'])) d.addCallback(lambda result: self.assertSubstring('daemon status', result['help']))
# self.assertSubstring('daemon status', d.result) # self.assertSubstring('daemon status', d.result)
if is_android():
test_help.skip = "Test cannot pass on Android because PYTHONOPTIMIZE removes the docstrings."
class TestFileListSorting(trial.unittest.TestCase):
class TestFileListSorting(unittest.TestCase):
def setUp(self): def setUp(self):
mock_conf_settings(self) mock_conf_settings(self)
util.resetTime(self) util.resetTime(self)
self.faker = Faker('en_US') self.faker = Faker('en_US')
self.faker.seed(66410) self.faker.seed(66410)
self.test_daemon = get_test_daemon() self.test_daemon = get_test_daemon()
self.test_daemon.lbry_file_manager = mock.Mock(spec=EncryptedFileManager) component_manager = ComponentManager(
self.test_daemon.lbry_file_manager.lbry_files = self._get_fake_lbry_files() skip_components=[DATABASE_COMPONENT, DHT_COMPONENT, WALLET_COMPONENT, SESSION_COMPONENT, UPNP_COMPONENT,
PEER_PROTOCOL_SERVER_COMPONENT, REFLECTOR_COMPONENT, HASH_ANNOUNCER_COMPONENT,
STREAM_IDENTIFIER_COMPONENT],
fileManager=FakeFileManager
)
component_manager.setup()
self.test_daemon.component_manager = component_manager
self.test_daemon.file_manager = component_manager.get_component("fileManager")
self.test_daemon.file_manager.lbry_files = self._get_fake_lbry_files()
# Pre-sorted lists of prices and file names in ascending order produced by # Pre-sorted lists of prices and file names in ascending order produced by
# faker with seed 66410. This seed was chosen becacuse it produces 3 results # faker with seed 66410. This seed was chosen becacuse it produces 3 results