diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 5defdb8df..09788c583 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.6.9 +current_version = 0.6.10 commit = True tag = True diff --git a/INSTALL.md b/INSTALL.md index ccce3802e..4d67c9bce 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -33,7 +33,7 @@ On Ubuntu or Mint you can install the prerequisites by running ``` sudo apt-get install libgmp3-dev build-essential python2.7 python2.7-dev \ - python-pip git python-virtualenv + python-pip git python-virtualenv libssl-dev libffi-dev ``` ##### OSX and Linux Installation diff --git a/RUNNING.md b/RUNNING.md deleted file mode 100644 index d2f1842b4..000000000 --- a/RUNNING.md +++ /dev/null @@ -1,146 +0,0 @@ -How to watch It's a Wonderful Life via LBRY - -## Quickest quick guide - -Create a directory called lbry, and go into that directory - -Download the file https://raw.githubusercontent.com/lbryio/lbry-setup/master/lbry_setup.sh and run it in that directory - -Once it's done building, type: - -``` -lbrynet-console -``` - -A console application will load, and after a moment you will be presented with a ">" signifying -that the console is ready for commands. - -If it's your first time running lbrynet-console, you should be given some credits to test the -network. If they don't show up within a minute or two, let us know, and we'll send you some. - -After your credits have shown up, type - -``` -get wonderfullife -``` - -into the prompt and hit enter. - -You will be asked if you want to cancel, change options, save, and perhaps stream the file. - -Enter 's' for save and then hit enter. - -The file should start downloading. Enter the command 'status' to check on the status of files -that are in the process of being downloaded. - -To stop lbrynet-console, enter the command 'exit'. - - -## Slightly longer install guide - -### Installing lbrycrd, the full blockchain client - -Note: this process takes upwards of an hour and is not necessary to use lbrynet. - -``` -sudo apt-get install build-essential libtool autotools-dev autoconf pkg-config libssl-dev libboost-all-dev libdb-dev libdb++-dev libqt4-dev libprotobuf-dev protobuf-compiler git -git clone --depth=1 -b alpha https://github.com/lbryio/lbrycrd.git -cd lbrycrd -./autogen.sh -./configure --with-incompatible-bdb --without-gui - -make -``` - -When make has completed, create the directory where LBRYcrd data will be stored. ~/.lbrycrd is where LBRYcrd will look by default and so is recommended. - -``` -mkdir ~/.lbrycrd -echo 'rpcuser=rpcuser -rpcpassword=rpcpassword' > ~/.lbrycrd/lbrycrd.conf -# (use a long random password if your computer is on a network anyone else has access to. e.g, pwgen -s 20) -cd .. - -``` - -### Installing lbrynet from source - -Acquire the LBRYnet source code from https://github.com/lbryio/lbry - -``` -cd lbry -sudo apt-get install libgmp3-dev build-essential python-dev python-pip -``` - -(with virtualenv) - -``` -python-virtualenv - -virtualenv . - -source bin/activate - -python setup.py install - -``` - -to deactivate the virtualenv later: - -``` -deactivate -``` - -to reactivate it, go to the directory in which you created it and: - -``` -source bin/activate -``` - -(without virtualenv) - -``` -python setup.py build bdist_egg - -sudo python setup.py install -``` - -## Slightly longer running guide - -### lbrynet-console can be set to use lbrycrdd instead of the built in lightweight client. - -To run lbrynet-console with lbrycrdd: - -``` -lbrynet-console -``` - -If lbrycrdd is not already running, lbrynet will launch it at that path, and will shut it down -when lbrynet exits. If lbrycrdd is already running, lbrynet will not launch it and will not -shut it down, but it will connect to it and use it. - -### Running lbrycrdd manually - -From the lbrycrd directory, run: - -``` -./src/lbrycrdd -server -daemon -``` - -If you want to mine LBC, also use the flag '-gen', so: - -``` -./src/lbrycrdd -server -daemon -gen -``` - -Warning: This will put a heavy load on your CPU - -It will take a few minutes for your client to download the whole block chain. - -To shut lbrycrdd down: from the lbrycrd directory, run: - -``` -./src/lbrycrd-cli stop -``` - -Any questions or problems, email jimmy@lbry.io diff --git a/lbrynet/__init__.py b/lbrynet/__init__.py index a6f811b94..ed341cbb3 100644 --- a/lbrynet/__init__.py +++ b/lbrynet/__init__.py @@ -1,6 +1,6 @@ import logging -__version__ = "0.6.9" +__version__ = "0.6.10" version = tuple(__version__.split('.')) -logging.getLogger(__name__).addHandler(logging.NullHandler()) \ No newline at end of file +logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/lbrynet/analytics/__init__.py b/lbrynet/analytics/__init__.py index 598751034..0b39a5e8f 100644 --- a/lbrynet/analytics/__init__.py +++ b/lbrynet/analytics/__init__.py @@ -1,2 +1,6 @@ +from constants import * from events import * -from api import AnalyticsApi as Api \ No newline at end of file +from api import Api +from track import Track +from manager import Manager + diff --git a/lbrynet/analytics/api.py b/lbrynet/analytics/api.py index 2b8a3344c..50c20212d 100644 --- a/lbrynet/analytics/api.py +++ b/lbrynet/analytics/api.py @@ -5,7 +5,7 @@ import logging from requests import auth from requests_futures import sessions -from lbrynet import conf +from lbrynet.conf import settings from lbrynet.analytics import utils @@ -28,7 +28,7 @@ def log_response(fn): return wrapper -class AnalyticsApi(object): +class Api(object): def __init__(self, session, url, write_key): self.session = session self.url = url @@ -61,11 +61,11 @@ class AnalyticsApi(object): @classmethod def load(cls, session=None): - """Initialize an instance using values from lbry.io.""" + """Initialize an instance using values from the configuration""" if not session: session = sessions.FuturesSession() return cls( session, - conf.ANALYTICS_ENDPOINT, - utils.deobfuscate(conf.ANALYTICS_TOKEN) + settings.ANALYTICS_ENDPOINT, + utils.deobfuscate(settings.ANALYTICS_TOKEN) ) diff --git a/lbrynet/analytics/constants.py b/lbrynet/analytics/constants.py new file mode 100644 index 000000000..280b51a72 --- /dev/null +++ b/lbrynet/analytics/constants.py @@ -0,0 +1,4 @@ +"""Constants for metrics""" + +BLOB_BYTES_UPLOADED = 'Blob Bytes Uploaded' +BLOB_BYTES_AVAILABLE = 'Blob Bytes Available' diff --git a/lbrynet/analytics/events.py b/lbrynet/analytics/events.py index 8b1c63287..3b2e879be 100644 --- a/lbrynet/analytics/events.py +++ b/lbrynet/analytics/events.py @@ -1,6 +1,6 @@ import logging -from lbrynet.analytics import utils +from lbrynet.core import utils log = logging.getLogger(__name__) @@ -23,31 +23,39 @@ class Events(object): self.session_id = session_id def heartbeat(self): - return { - 'userId': 'lbry', - 'event': 'Heartbeat', - 'properties': { - 'lbry_id': self.lbry_id, - 'session_id': self.session_id - }, - 'context': self.context, - 'timestamp': utils.now() - } + return self._event('Heartbeat') def download_started(self, name, stream_info=None): + properties = { + 'name': name, + 'stream_info': get_sd_hash(stream_info) + } + return self._event('Download Started', properties) + + def metric_observed(self, metric_name, value): + properties = { + 'value': value, + } + return self._event(metric_name, properties) + + def _event(self, event, event_properties=None): return { 'userId': 'lbry', - 'event': 'Download Started', - 'properties': { - 'lbry_id': self.lbry_id, - 'session_id': self.session_id, - 'name': name, - 'stream_info': get_sd_hash(stream_info) - }, + 'event': event, + 'properties': self._properties(event_properties), 'context': self.context, - 'timestamp': utils.now() + 'timestamp': utils.isonow() } + def _properties(self, event_properties=None): + event_properties = event_properties or {} + properties = { + 'lbry_id': self.lbry_id, + 'session_id': self.session_id, + } + properties.update(event_properties) + return properties + def make_context(platform, wallet, is_dev=False): # TODO: distinguish between developer and release instances diff --git a/lbrynet/analytics/manager.py b/lbrynet/analytics/manager.py new file mode 100644 index 000000000..fecc1a0d2 --- /dev/null +++ b/lbrynet/analytics/manager.py @@ -0,0 +1,67 @@ +from lbrynet.core import looping_call_manager + +from twisted.internet import defer +from twisted.internet import task + +import constants + + +class Manager(object): + def __init__(self, analytics_api, events_generator, track): + self.analytics_api = analytics_api + self.events_generator = events_generator + self.track = track + self.looping_call_manager = self.setup_looping_calls() + + def setup_looping_calls(self): + call_manager = looping_call_manager.LoopingCallManager() + looping_calls = [ + ('send_heartbeat', self._send_heartbeat), + ('update_tracked_metrics', self._update_tracked_metrics), + ] + for name, fn in looping_calls: + call_manager.register_looping_call(name, task.LoopingCall(fn)) + return call_manager + + def start(self): + self.looping_call_manager.start('send_heartbeat', 60) + self.looping_call_manager.start('update_tracked_metrics', 300) + + def shutdown(self): + self.looping_call_manager.shutdown() + + def send_download_started(self, name, stream_info=None): + event = self.events_generator.download_started(name, stream_info) + self.analytics_api.track(event) + + def register_repeating_metric(self, event_name, value_generator, frequency=300): + lcall = task.LoopingCall(self._send_repeating_metric, event_name, value_generator) + self.looping_call_manager.register_looping_call(event_name, lcall) + lcall.start(frequency) + + def _send_heartbeat(self): + heartbeat = self.events_generator.heartbeat() + self.analytics_api.track(heartbeat) + + def _update_tracked_metrics(self): + should_send, value = self.track.summarize_and_reset(constants.BLOB_BYTES_UPLOADED) + if should_send: + event = self.events_generator.metric_observed(constants.BLOB_BYTES_UPLOADED, value) + self.analytics_api.track(event) + + def _send_repeating_metric(self, event_name, value_generator): + result = value_generator() + if_deferred(result, self._send_repeating_metric_value, event_name) + + def _send_repeating_metric_value(self, result, event_name): + should_send, value = result + if should_send: + event = self.events_generator.metric_observed(event_name, value) + self.analytics_api.track(event) + + +def if_deferred(maybe_deferred, callback, *args, **kwargs): + if isinstance(maybe_deferred, defer.Deferred): + maybe_deferred.addCallback(callback, *args, **kwargs) + else: + callback(maybe_deferred, *args, **kwargs) diff --git a/lbrynet/analytics/track.py b/lbrynet/analytics/track.py new file mode 100644 index 000000000..7643ebce9 --- /dev/null +++ b/lbrynet/analytics/track.py @@ -0,0 +1,24 @@ +import collections + + +class Track(object): + """Track and summarize observations of metrics.""" + def __init__(self): + self.data = collections.defaultdict(list) + + def add_observation(self, metric, value): + self.data[metric].append(value) + + def summarize_and_reset(self, metric, op=sum): + """Apply `op` on the current values for `metric`. + + This operation also resets the metric. + + Returns: + a tuple (should_send, value) + """ + try: + values = self.data.pop(metric) + return True, op(values) + except KeyError: + return False, None diff --git a/lbrynet/analytics/utils.py b/lbrynet/analytics/utils.py deleted file mode 100644 index d147f8c34..000000000 --- a/lbrynet/analytics/utils.py +++ /dev/null @@ -1,8 +0,0 @@ -import datetime - -from lbrynet.core.utils import * - - -def now(): - """Return utc now in isoformat with timezone""" - return datetime.datetime.utcnow().isoformat() + 'Z' diff --git a/lbrynet/conf.py b/lbrynet/conf.py index 4f895d69b..9f46406e4 100644 --- a/lbrynet/conf.py +++ b/lbrynet/conf.py @@ -1,77 +1,236 @@ -""" -Some network wide and also application specific parameters -""" +import copy import os +import sys +from appdirs import user_data_dir -is_generous_host = True -IS_DEVELOPMENT_VERSION = False +LINUX = 1 +DARWIN = 2 +WINDOWS = 3 -MAX_HANDSHAKE_SIZE = 2**16 -MAX_REQUEST_SIZE = 2**16 -MAX_BLOB_REQUEST_SIZE = 2**16 -MAX_RESPONSE_INFO_SIZE = 2**16 -MAX_BLOB_INFOS_TO_REQUEST = 20 -BLOBFILES_DIR = ".blobfiles" -BLOB_SIZE = 2**21 - -MIN_BLOB_DATA_PAYMENT_RATE = .0001 # points/megabyte -MIN_BLOB_INFO_PAYMENT_RATE = .02 # points/1000 infos -MIN_VALUABLE_BLOB_INFO_PAYMENT_RATE = .05 # points/1000 infos -MIN_VALUABLE_BLOB_HASH_PAYMENT_RATE = .05 # points/1000 infos -MAX_CONNECTIONS_PER_STREAM = 5 - -KNOWN_DHT_NODES = [('104.236.42.182', 4000), - ('lbrynet1.lbry.io', 4444), - ('lbrynet2.lbry.io', 4444), - ('lbrynet3.lbry.io', 4444)] - -POINTTRADER_SERVER = 'http://ec2-54-187-192-68.us-west-2.compute.amazonaws.com:2424' -#POINTTRADER_SERVER = 'http://127.0.0.1:2424' - - -SEARCH_SERVERS = ["http://lighthouse1.lbry.io:50005", - "http://lighthouse2.lbry.io:50005", - "http://lighthouse3.lbry.io:50005"] - -REFLECTOR_SERVERS = [("reflector.lbry.io", 5566)] - -LOG_FILE_NAME = "lbrynet.log" -LOG_POST_URL = "https://lbry.io/log-upload" - -CRYPTSD_FILE_EXTENSION = ".cryptsd" - -API_INTERFACE = "localhost" -API_ADDRESS = "lbryapi" -API_PORT = 5279 -if os.name == "nt": - ICON_PATH = "icons" +if sys.platform.startswith("darwin"): + platform = DARWIN + default_download_directory = os.path.join(os.path.expanduser("~"), 'Downloads') + default_data_dir = user_data_dir("LBRY") + default_lbryum_dir = os.path.join(os.path.expanduser("~"), ".lbryum") +elif sys.platform.startswith("win"): + platform = WINDOWS + from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle + default_download_directory = get_path(FOLDERID.Downloads, UserHandle.current) + default_data_dir = os.path.join( + get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbrynet") + default_lbryum_dir = os.path.join( + get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbryum") else: - ICON_PATH = "app.icns" -APP_NAME = "LBRY" -API_CONNECTION_STRING = "http://%s:%i/%s" % (API_INTERFACE, API_PORT, API_ADDRESS) -UI_ADDRESS = "http://%s:%i" % (API_INTERFACE, API_PORT) -PROTOCOL_PREFIX = "lbry" + platform = LINUX + default_download_directory = os.path.join(os.path.expanduser("~"), 'Downloads') + default_data_dir = os.path.join(os.path.expanduser("~"), ".lbrynet") + default_lbryum_dir = os.path.join(os.path.expanduser("~"), ".lbryum") -DEFAULT_WALLET = "lbryum" -WALLET_TYPES = ["lbryum", "lbrycrd"] -DEFAULT_TIMEOUT = 30 -DEFAULT_MAX_SEARCH_RESULTS = 25 -DEFAULT_MAX_KEY_FEE = {'USD': {'amount': 25.0, 'address': ''}} -DEFAULT_SEARCH_TIMEOUT = 3.0 -DEFAULT_SD_DOWNLOAD_TIMEOUT = 3 -DEFAULT_CACHE_TIME = 150 -DEFAULT_UI_BRANCH = "master" -SOURCE_TYPES = ['lbry_sd_hash', 'url', 'btih'] -CURRENCIES = { - 'BTC': {'type': 'crypto'}, - 'LBC': {'type': 'crypto'}, - 'USD': {'type': 'fiat'}, - } +def convert_setting(env_val, current_val): + new_type = env_val.__class__ + current_type = current_val.__class__ + if current_type is bool: + if new_type is bool: + return env_val + elif str(env_val).lower() == "false": + return False + elif str(env_val).lower() == "true": + return True + else: + raise ValueError + elif current_type is int: + return int(env_val) + elif current_type is float: + return float(env_val) + elif current_type is str: + return str(env_val) + elif current_type is dict: + return dict(env_val) + elif current_type is list: + return list(env_val) + elif current_type is tuple: + return tuple(env_val) + else: + raise ValueError('Type {} cannot be converted'.format(current_type)) -LOGGLY_TOKEN = 'LJEzATH4AzRgAwxjAP00LwZ2YGx3MwVgZTMuBQZ3MQuxLmOv' -ANALYTICS_ENDPOINT = 'https://api.segment.io/v1' -ANALYTICS_TOKEN = 'Ax5LZzR1o3q3Z3WjATASDwR5rKyHH0qOIRIbLmMXn2H=' +def convert_env_setting(setting, value): + try: + env_val = os.environ[setting] + except KeyError: + return value + else: + return convert_setting(env_val, value) -LBRYUM_WALLET_DIR = os.environ.get('LBRYUM_WALLET_DIR') + +def get_env_settings(settings): + for setting, value in settings.iteritems(): + setting = 'LBRY_' + setting.upper() + yield convert_env_setting(setting, value) + + +def add_env_settings_to_dict(settings_dict): + for setting, env_setting in zip(settings_dict, get_env_settings(settings_dict)): + settings_dict.update({setting: env_setting}) + return settings_dict + + +class Setting(object): + __fixed = [] + __excluded = ['get_dict', 'update'] + + def __iter__(self): + for k in self.__dict__.iterkeys(): + if k.startswith('_') or k in self.__excluded: + continue + yield k + + def __getitem__(self, item): + assert item in self, IndexError + return self.__dict__[item] + + def __setitem__(self, key, value): + assert key in self and key not in self.__fixed, KeyError(key) + _value = convert_setting(value, self[key]) + self.__dict__.update({key: _value}) + + def __contains__(self, item): + return item in iter(self) + + def get_dict(self): + return {k: self[k] for k in self} + + def update(self, other): + for k, v in other.iteritems(): + try: + self.__setitem__(k, v) + except KeyError: + pass + except AssertionError: + pass + + +class AdjustableSettings(Setting): + def __init__(self): + self.is_generous_host = True + self.run_on_startup = False + self.download_directory = default_download_directory + self.max_upload = 0.0 + self.max_download = 0.0 + self.upload_log = True + self.delete_blobs_on_remove = True + self.use_upnp = True + self.start_lbrycrdd = True + self.run_reflector_server = False + self.startup_scripts = [] + self.last_version = {'lbrynet': '0.0.1', 'lbryum': '0.0.1'} + self.peer_port = 3333 + self.dht_node_port = 4444 + self.reflector_port = 5566 + self.download_timeout = 30 + self.max_search_results = 25 + self.search_timeout = 3.0 + self.cache_time = 150 + self.host_ui = True + self.check_ui_requirements = True + self.local_ui_path = False + self.api_port = 5279 + self.search_servers = ['lighthouse1.lbry.io:50005'] + self.data_rate = .0001 # points/megabyte + self.min_info_rate = .02 # points/1000 infos + self.min_valuable_info_rate = .05 # points/1000 infos + self.min_valuable_hash_rate = .05 # points/1000 infos + self.max_connections_per_stream = 5 + self.known_dht_nodes = [ + ('104.236.42.182', 4000), + ('lbrynet1.lbry.io', 4444), + ('lbrynet2.lbry.io', 4444), + ('lbrynet3.lbry.io', 4444) + ] + self.pointtrader_server = 'http://127.0.0.1:2424' + self.reflector_servers = [("reflector.lbry.io", 5566)] + self.wallet = "lbryum" + self.ui_branch = "master" + self.default_ui_branch = 'master' + self.data_dir = default_data_dir + self.lbryum_wallet_dir = default_lbryum_dir + self.use_auth_http = False + self.sd_download_timeout = 3 + self.max_key_fee = {'USD': {'amount': 25.0, 'address': ''}} + + +class ApplicationSettings(Setting): + def __init__(self): + self.MAX_HANDSHAKE_SIZE = 2**16 + self.MAX_REQUEST_SIZE = 2**16 + self.MAX_BLOB_REQUEST_SIZE = 2**16 + self.MAX_RESPONSE_INFO_SIZE = 2**16 + self.MAX_BLOB_INFOS_TO_REQUEST = 20 + self.BLOBFILES_DIR = "blobfiles" + self.BLOB_SIZE = 2**21 + self.LOG_FILE_NAME = "lbrynet.log" + self.LOG_POST_URL = "https://lbry.io/log-upload" + self.CRYPTSD_FILE_EXTENSION = ".cryptsd" + self.API_INTERFACE = "localhost" + self.API_ADDRESS = "lbryapi" + self.ICON_PATH = "icons" if platform is WINDOWS else "app.icns" + self.APP_NAME = "LBRY" + self.PROTOCOL_PREFIX = "lbry" + self.wallet_TYPES = ["lbryum", "lbrycrd"] + self.SOURCE_TYPES = ['lbry_sd_hash', 'url', 'btih'] + self.CURRENCIES = { + 'BTC': {'type': 'crypto'}, + 'LBC': {'type': 'crypto'}, + 'USD': {'type': 'fiat'}, + } + self.LOGGLY_TOKEN = 'LJEzATH4AzRgAwxjAP00LwZ2YGx3MwVgZTMuBQZ3MQuxLmOv' + self.ANALYTICS_ENDPOINT = 'https://api.segment.io/v1' + self.ANALYTICS_TOKEN = 'Ax5LZzR1o3q3Z3WjATASDwR5rKyHH0qOIRIbLmMXn2H=' + + +APPLICATION_SETTINGS = AdjustableSettings() +ADJUSTABLE_SETTINGS = AdjustableSettings() + + +class DefaultSettings(ApplicationSettings, AdjustableSettings): + __fixed = APPLICATION_SETTINGS.get_dict().keys() + + def __init__(self): + ApplicationSettings.__init__(self) + AdjustableSettings.__init__(self) + + +DEFAULT_SETTINGS = DefaultSettings() + + +class Config(DefaultSettings): + __shared_state = copy.deepcopy(DEFAULT_SETTINGS.get_dict()) + + def __init__(self): + self.__dict__ = add_env_settings_to_dict(self.__shared_state) + + @property + def ORIGIN(self): + return "http://%s:%i" % (DEFAULT_SETTINGS.API_INTERFACE, self.api_port) + + @property + def REFERER(self): + return "http://%s:%i/" % (DEFAULT_SETTINGS.API_INTERFACE, self.api_port) + + @property + def API_CONNECTION_STRING(self): + return "http://%s:%i/%s" % ( + DEFAULT_SETTINGS.API_INTERFACE, self.api_port, DEFAULT_SETTINGS.API_ADDRESS) + + @property + def UI_ADDRESS(self): + return "http://%s:%i" % (DEFAULT_SETTINGS.API_INTERFACE, self.api_port) + + +# TODO: don't load the configuration automatically. The configuration +# should be loaded at runtime, not at module import time. Module +# import should have no side-effects. This is also bad because +# it means that settings are read from the environment even for +# tests, which is rarely what you want to happen. +settings = Config() diff --git a/lbrynet/core/Error.py b/lbrynet/core/Error.py index dfc9bfe98..8714a30fe 100644 --- a/lbrynet/core/Error.py +++ b/lbrynet/core/Error.py @@ -90,5 +90,17 @@ class InvalidBlobHashError(Exception): pass +class InvalidHeaderError(Exception): + pass + + +class InvalidAuthenticationToken(Exception): + pass + + +class SubhandlerError(Exception): + pass + + class NegotiationError(Exception): - pass \ No newline at end of file + pass diff --git a/lbrynet/core/HashBlob.py b/lbrynet/core/HashBlob.py index 072d2c5a6..a0e95acfb 100644 --- a/lbrynet/core/HashBlob.py +++ b/lbrynet/core/HashBlob.py @@ -8,7 +8,7 @@ from twisted.internet import interfaces, defer, threads from twisted.protocols.basic import FileSender from twisted.python.failure import Failure from zope.interface import implements -from lbrynet.conf import BLOB_SIZE +from lbrynet.conf import settings from lbrynet.core.Error import DownloadCanceledError, InvalidDataError from lbrynet.core.cryptoutils import get_lbry_hash_obj @@ -87,7 +87,7 @@ class HashBlob(object): def set_length(self, length): if self.length is not None and length == self.length: return True - if self.length is None and 0 <= length <= BLOB_SIZE: + if self.length is None and 0 <= length <= settings.BLOB_SIZE: self.length = length return True log.warning("Got an invalid length. Previous length: %s, Invalid length: %s", str(self.length), str(length)) diff --git a/lbrynet/core/PaymentRateManager.py b/lbrynet/core/PaymentRateManager.py index 6e7c02047..726f3ba19 100644 --- a/lbrynet/core/PaymentRateManager.py +++ b/lbrynet/core/PaymentRateManager.py @@ -1,10 +1,10 @@ from lbrynet.core.Strategy import get_default_strategy -from lbrynet.conf import MIN_BLOB_DATA_PAYMENT_RATE, MIN_BLOB_INFO_PAYMENT_RATE, is_generous_host +from lbrynet.conf import settings from decimal import Decimal class BasePaymentRateManager(object): - def __init__(self, rate=MIN_BLOB_DATA_PAYMENT_RATE, info_rate=MIN_BLOB_INFO_PAYMENT_RATE): + def __init__(self, rate=settings.data_rate, info_rate=settings.min_info_rate): self.min_blob_data_payment_rate = rate self.min_blob_info_payment_rate = info_rate @@ -36,7 +36,7 @@ class PaymentRateManager(object): class NegotiatedPaymentRateManager(object): - def __init__(self, base, availability_tracker, generous=is_generous_host): + def __init__(self, base, availability_tracker, generous=settings.is_generous_host): """ @param base: a BasePaymentRateManager @param availability_tracker: a BlobAvailabilityTracker diff --git a/lbrynet/core/PriceModel.py b/lbrynet/core/PriceModel.py index 71ec87769..70e57ebd0 100644 --- a/lbrynet/core/PriceModel.py +++ b/lbrynet/core/PriceModel.py @@ -2,7 +2,7 @@ from zope.interface import implementer from decimal import Decimal from lbrynet.interfaces import IBlobPriceModel -from lbrynet.conf import MIN_BLOB_DATA_PAYMENT_RATE +from lbrynet.conf import settings def get_default_price_model(blob_tracker, base_price, **kwargs): @@ -21,7 +21,7 @@ class MeanAvailabilityWeightedPrice(object): """ implementer(IBlobPriceModel) - def __init__(self, tracker, base_price=MIN_BLOB_DATA_PAYMENT_RATE, alpha=1.0): + def __init__(self, tracker, base_price=settings.data_rate, alpha=1.0): self.blob_tracker = tracker self.base_price = Decimal(base_price) self.alpha = Decimal(alpha) diff --git a/lbrynet/core/Strategy.py b/lbrynet/core/Strategy.py index f25531652..6a3fc41c9 100644 --- a/lbrynet/core/Strategy.py +++ b/lbrynet/core/Strategy.py @@ -1,6 +1,6 @@ from zope.interface import implementer from decimal import Decimal -from lbrynet.conf import is_generous_host +from lbrynet.conf import settings from lbrynet.interfaces import INegotiationStrategy from lbrynet.core.Offer import Offer from lbrynet.core.PriceModel import MeanAvailabilityWeightedPrice @@ -16,7 +16,7 @@ class Strategy(object): """ implementer(INegotiationStrategy) - def __init__(self, price_model, max_rate, min_rate, is_generous=is_generous_host): + def __init__(self, price_model, max_rate, min_rate, is_generous=settings.is_generous_host): self.price_model = price_model self.is_generous = is_generous self.accepted_offers = {} @@ -101,7 +101,7 @@ class BasicAvailabilityWeightedStrategy(Strategy): implementer(INegotiationStrategy) def __init__(self, blob_tracker, acceleration=1.25, deceleration=0.9, max_rate=None, min_rate=0.0, - is_generous=is_generous_host, base_price=0.0001, alpha=1.0): + is_generous=settings.is_generous_host, base_price=0.0001, alpha=1.0): price_model = MeanAvailabilityWeightedPrice(blob_tracker, base_price=base_price, alpha=alpha) Strategy.__init__(self, price_model, max_rate, min_rate, is_generous) self._acceleration = Decimal(acceleration) # rate of how quickly to ramp offer diff --git a/lbrynet/core/Wallet.py b/lbrynet/core/Wallet.py index 528ad983f..1d00e9b13 100644 --- a/lbrynet/core/Wallet.py +++ b/lbrynet/core/Wallet.py @@ -153,7 +153,12 @@ class Wallet(object): log.info("Got a new balance: %s", str(balance)) self.wallet_balance = balance - d.addCallback(set_wallet_balance) + def log_error(err): + if isinstance(err, AttributeError): + log.warning("Failed to get an updated balance") + log.warning("Last balance update: %s", str(self.wallet_balance)) + + d.addCallbacks(set_wallet_balance, log_error) return d d.addCallback(lambda should_run: do_manage() if should_run else None) @@ -1169,7 +1174,8 @@ class LBRYumWallet(Wallet): self._start_check = None if self._catch_up_check is not None: - self._catch_up_check.stop() + if self._catch_up_check.running: + self._catch_up_check.stop() self._catch_up_check = None d = defer.Deferred() @@ -1241,6 +1247,9 @@ class LBRYumWallet(Wallet): self._caught_up_counter += 1 + def log_error(err): + log.warning(err.getErrorMessage()) + return defer.fail(err) self._catch_up_check = task.LoopingCall(check_caught_up) @@ -1248,6 +1257,7 @@ class LBRYumWallet(Wallet): d.addCallback(self._save_wallet) d.addCallback(lambda _: self.wallet.start_threads(self.network)) d.addCallback(lambda _: self._catch_up_check.start(.1)) + d.addErrback(log_error) d.addCallback(lambda _: blockchain_caught_d) return d diff --git a/lbrynet/core/client/ClientProtocol.py b/lbrynet/core/client/ClientProtocol.py index 1989f8c96..09e7a32ca 100644 --- a/lbrynet/core/client/ClientProtocol.py +++ b/lbrynet/core/client/ClientProtocol.py @@ -4,7 +4,7 @@ from decimal import Decimal from twisted.internet import error, defer from twisted.internet.protocol import Protocol, ClientFactory from twisted.python import failure -from lbrynet.conf import MAX_RESPONSE_INFO_SIZE as MAX_RESPONSE_SIZE +from lbrynet.conf import settings from lbrynet.core.Error import ConnectionClosedBeforeResponseError, NoResponseError from lbrynet.core.Error import DownloadCanceledError, MisbehavingPeerError from lbrynet.core.Error import RequestCanceledError @@ -48,7 +48,7 @@ class ClientProtocol(Protocol): self._blob_download_request.write(data) else: self._response_buff += data - if len(self._response_buff) > MAX_RESPONSE_SIZE: + if len(self._response_buff) > settings.MAX_RESPONSE_INFO_SIZE: log.warning("Response is too large. Size %s", len(self._response_buff)) self.transport.loseConnection() response, extra_data = self._get_valid_response(self._response_buff) diff --git a/lbrynet/core/client/ClientRequest.py b/lbrynet/core/client/ClientRequest.py index b5630f729..106982ccc 100644 --- a/lbrynet/core/client/ClientRequest.py +++ b/lbrynet/core/client/ClientRequest.py @@ -1,4 +1,4 @@ -from lbrynet.conf import BLOB_SIZE +from lbrynet.conf import settings class ClientRequest(object): @@ -17,7 +17,7 @@ class ClientBlobRequest(ClientPaidRequest): def __init__(self, request_dict, response_identifier, write_func, finished_deferred, cancel_func, blob): if blob.length is None: - max_pay_units = BLOB_SIZE + max_pay_units = settings.BLOB_SIZE else: max_pay_units = blob.length ClientPaidRequest.__init__(self, request_dict, response_identifier, max_pay_units) diff --git a/lbrynet/core/client/ConnectionManager.py b/lbrynet/core/client/ConnectionManager.py index 15e3ac686..d68af47b0 100644 --- a/lbrynet/core/client/ConnectionManager.py +++ b/lbrynet/core/client/ConnectionManager.py @@ -2,7 +2,7 @@ import logging from twisted.internet import defer from zope.interface import implements from lbrynet import interfaces -from lbrynet.conf import MAX_CONNECTIONS_PER_STREAM +from lbrynet.conf import settings from lbrynet.core.client.ClientProtocol import ClientProtocolFactory from lbrynet.core.Error import InsufficientFundsError @@ -183,7 +183,7 @@ class ConnectionManager(object): log.debug("Couldn't find a good peer to connect to") return None - if len(self._peer_connections) < MAX_CONNECTIONS_PER_STREAM: + if len(self._peer_connections) < settings.max_connections_per_stream: ordered_request_creators = self._rank_request_creator_connections() d = get_new_peers(ordered_request_creators) d.addCallback(pick_best_peer) diff --git a/lbrynet/core/log_support.py b/lbrynet/core/log_support.py index becb698c8..5c23d536a 100644 --- a/lbrynet/core/log_support.py +++ b/lbrynet/core/log_support.py @@ -7,7 +7,7 @@ import traceback from requests_futures.sessions import FuturesSession import lbrynet -from lbrynet import conf +from lbrynet.conf import settings from lbrynet.core import utils session = FuturesSession() @@ -98,7 +98,7 @@ def configure_file_handler(file_name, **kwargs): def get_loggly_url(token=None, version=None): - token = token or utils.deobfuscate(conf.LOGGLY_TOKEN) + token = token or utils.deobfuscate(settings.LOGGLY_TOKEN) version = version or lbrynet.__version__ return LOGGLY_URL.format(token=token, tag='lbrynet-' + version) @@ -106,19 +106,44 @@ def get_loggly_url(token=None, version=None): @_log_decorator def configure_loggly_handler(url=None, **kwargs): url = url or get_loggly_url() - json_format = { - "loggerName": "%(name)s", - "asciTime": "%(asctime)s", - "fileName": "%(filename)s", - "functionName": "%(funcName)s", - "levelNo": "%(levelno)s", - "lineNo": "%(lineno)d", - "levelName": "%(levelname)s", - "message": "%(message)s", - } - json_format.update(kwargs) - formatter = logging.Formatter(json.dumps(json_format)) + formatter = JsonFormatter(**kwargs) handler = HTTPSHandler(url) handler.setFormatter(formatter) handler.name = 'loggly' return handler + + +class JsonFormatter(logging.Formatter): + """Format log records using json serialization""" + def __init__(self, **kwargs): + self.attributes = kwargs + + def format(self, record): + data = { + 'loggerName': record.name, + 'asciTime': self.formatTime(record), + 'fileName': record.filename, + 'functionName': record.funcName, + 'levelNo': record.levelno, + 'lineNo': record.lineno, + 'levelName': record.levelname, + 'message': record.getMessage(), + } + data.update(self.attributes) + if record.exc_info: + data['exc_info'] = self.formatException(record.exc_info) + return json.dumps(data) + + +def failure(failure, log, msg, *args): + """Log a failure message from a deferred. + + Args: + failure: twisted.python.failure.Failure + log: a python logger instance + msg: the message to log. Can use normal logging string interpolation. + the last argument will be set to the error message from the failure. + args: values to substitute into `msg` + """ + args += (failure.getErrorMessage(),) + log.error(msg, *args, exc_info=failure.getTracebackObject()) diff --git a/lbrynet/core/looping_call_manager.py b/lbrynet/core/looping_call_manager.py new file mode 100644 index 000000000..7dbc9e022 --- /dev/null +++ b/lbrynet/core/looping_call_manager.py @@ -0,0 +1,20 @@ +class LoopingCallManager(object): + def __init__(self, calls=None): + self.calls = calls or {} + + def register_looping_call(self, name, call): + assert name not in self.calls, '{} is already registered'.format(name) + self.calls[name] = call + + def start(self, name, *args): + lcall = self.calls[name] + if not lcall.running: + lcall.start(*args) + + def stop(self, name): + self.calls[name].stop() + + def shutdown(self): + for lcall in self.calls.itervalues(): + if lcall.running: + lcall.stop() diff --git a/lbrynet/core/server/BlobRequestHandler.py b/lbrynet/core/server/BlobRequestHandler.py index 15874c215..adf60f5b9 100644 --- a/lbrynet/core/server/BlobRequestHandler.py +++ b/lbrynet/core/server/BlobRequestHandler.py @@ -5,7 +5,9 @@ from twisted.protocols.basic import FileSender from twisted.python.failure import Failure from zope.interface import implements + from lbrynet.core.Offer import Offer +from lbrynet import analytics from lbrynet.interfaces import IQueryHandlerFactory, IQueryHandler, IBlobSender @@ -15,15 +17,16 @@ log = logging.getLogger(__name__) class BlobRequestHandlerFactory(object): implements(IQueryHandlerFactory) - def __init__(self, blob_manager, wallet, payment_rate_manager): + def __init__(self, blob_manager, wallet, payment_rate_manager, track): self.blob_manager = blob_manager self.wallet = wallet self.payment_rate_manager = payment_rate_manager + self.track = track ######### IQueryHandlerFactory ######### def build_query_handler(self): - q_h = BlobRequestHandler(self.blob_manager, self.wallet, self.payment_rate_manager) + q_h = BlobRequestHandler(self.blob_manager, self.wallet, self.payment_rate_manager, self.track) return q_h def get_primary_query_identifier(self): @@ -39,11 +42,12 @@ class BlobRequestHandler(object): BLOB_QUERY = 'requested_blob' AVAILABILITY_QUERY = 'requested_blobs' - def __init__(self, blob_manager, wallet, payment_rate_manager): + def __init__(self, blob_manager, wallet, payment_rate_manager, track): self.blob_manager = blob_manager self.payment_rate_manager = payment_rate_manager self.wallet = wallet self.query_identifiers = [self.PAYMENT_RATE_QUERY, self.BLOB_QUERY, self.AVAILABILITY_QUERY] + self.track = track self.peer = None self.blob_data_payment_rate = None self.read_handle = None @@ -190,8 +194,10 @@ class BlobRequestHandler(object): return inner_d def count_bytes(data): - self.blob_bytes_uploaded += len(data) - self.peer.update_stats('blob_bytes_uploaded', len(data)) + uploaded = len(data) + self.blob_bytes_uploaded += uploaded + self.peer.update_stats('blob_bytes_uploaded', uploaded) + self.track.add_observation(analytics.BLOB_BYTES_UPLOADED, uploaded) return data def start_transfer(): diff --git a/lbrynet/core/server/ServerRequestHandler.py b/lbrynet/core/server/ServerRequestHandler.py index e1685c37b..2cb87ece6 100644 --- a/lbrynet/core/server/ServerRequestHandler.py +++ b/lbrynet/core/server/ServerRequestHandler.py @@ -9,9 +9,10 @@ log = logging.getLogger(__name__) class ServerRequestHandler(object): - """This class handles requests from clients. It can upload blobs and return request for information about - more blobs that are associated with streams""" - + """This class handles requests from clients. It can upload blobs and + return request for information about more blobs that are + associated with streams. + """ implements(interfaces.IPushProducer, interfaces.IConsumer, IRequestHandler) def __init__(self, consumer): @@ -90,20 +91,27 @@ class ServerRequestHandler(object): log.debug("Received data") log.debug("%s", str(data)) if self.request_received is False: - self.request_buff = self.request_buff + data - msg = self.try_to_parse_request(self.request_buff) - if msg is not None: - self.request_buff = '' - d = self.handle_request(msg) - if self.blob_sender is not None: - d.addCallback(lambda _: self.blob_sender.send_blob_if_requested(self)) - d.addCallbacks(lambda _: self.finished_response(), self.request_failure_handler) - else: - log.debug("Request buff not a valid json message") - log.debug("Request buff: %s", str(self.request_buff)) + return self._parse_data_and_maybe_send_blob(data) else: log.warning("The client sent data when we were uploading a file. This should not happen") + def _parse_data_and_maybe_send_blob(self, data): + self.request_buff = self.request_buff + data + msg = self.try_to_parse_request(self.request_buff) + if msg: + self.request_buff = '' + self._process_msg(msg) + else: + log.debug("Request buff not a valid json message") + log.debug("Request buff: %s", self.request_buff) + + def _process_msg(self, msg): + d = self.handle_request(msg) + if self.blob_sender: + d.addCallback(lambda _: self.blob_sender.send_blob_if_requested(self)) + d.addCallbacks(lambda _: self.finished_response(), self.request_failure_handler) + + ######### IRequestHandler ######### def register_query_handler(self, query_handler, query_identifiers): diff --git a/lbrynet/core/utils.py b/lbrynet/core/utils.py index 9491c26fa..ad88d3fe4 100644 --- a/lbrynet/core/utils.py +++ b/lbrynet/core/utils.py @@ -8,15 +8,35 @@ import os import socket import yaml +from lbrynet.conf import settings +from lbrynet.conf import AdjustableSettings from lbrynet.core.cryptoutils import get_lbry_hash_obj -blobhash_length = get_lbry_hash_obj().digest_size * 2 # digest_size is in bytes, and blob hashes are hex encoded +# digest_size is in bytes, and blob hashes are hex encoded +blobhash_length = get_lbry_hash_obj().digest_size * 2 log = logging.getLogger(__name__) +# defining these time functions here allows for easier overriding in testing +def now(): + return datetime.datetime.now() + + +def utcnow(): + return datetime.datetime.utcnow() + + +def isonow(): + """Return utc now in isoformat with timezone""" + return utcnow().isoformat() + 'Z' + +def today(): + return datetime.datetime.today() + + def generate_id(num=None): h = get_lbry_hash_obj() if num is not None: @@ -26,18 +46,19 @@ def generate_id(num=None): return h.digest() +def is_valid_hashcharacter(char): + return char in "0123456789abcdef" + + def is_valid_blobhash(blobhash): - """ + """Checks whether the blobhash is the correct length and contains only + valid characters (0-9, a-f) + @param blobhash: string, the blobhash to check - @return: Whether the blobhash is the correct length and contains only valid characters (0-9, a-f) + @return: True/False """ - if len(blobhash) != blobhash_length: - return False - for l in blobhash: - if l not in "0123456789abcdef": - return False - return True + return len(blobhash) == blobhash_length and all(is_valid_hashcharacter(l) for l in blobhash) def version_is_greater_than(a, b): @@ -66,28 +87,25 @@ settings_encoders = { '.yml': yaml.safe_dump } +ADJUSTABLE_SETTINGS = AdjustableSettings().get_dict() + def load_settings(path): ext = os.path.splitext(path)[1] - f = open(path, 'r') - data = f.read() - f.close() + with open(path, 'r') as settings_file: + data = settings_file.read() decoder = settings_decoders.get(ext, False) assert decoder is not False, "Unknown settings format .%s" % ext return decoder(data) -def save_settings(path, settings): +def save_settings(path): + to_save = {k: v for k, v in settings.__dict__.iteritems() if k in ADJUSTABLE_SETTINGS} ext = os.path.splitext(path)[1] encoder = settings_encoders.get(ext, False) assert encoder is not False, "Unknown settings format .%s" % ext - f = open(path, 'w') - f.write(encoder(settings)) - f.close() - - -def today(): - return datetime.datetime.today() + with open(path, 'w') as settings_file: + settings_file.write(encoder(to_save)) def check_connection(server="www.lbry.io", port=80): diff --git a/lbrynet/cryptstream/CryptBlob.py b/lbrynet/cryptstream/CryptBlob.py index aba149d9d..da94f631b 100644 --- a/lbrynet/cryptstream/CryptBlob.py +++ b/lbrynet/cryptstream/CryptBlob.py @@ -1,7 +1,7 @@ import binascii import logging from Crypto.Cipher import AES -from lbrynet.conf import BLOB_SIZE +from lbrynet.conf import settings from lbrynet.core.BlobInfo import BlobInfo @@ -67,7 +67,7 @@ class CryptStreamBlobMaker(object): self.length = 0 def write(self, data): - max_bytes_to_write = BLOB_SIZE - self.length - 1 + max_bytes_to_write = settings.BLOB_SIZE - self.length - 1 done = False if max_bytes_to_write <= len(data): num_bytes_to_write = max_bytes_to_write diff --git a/lbrynet/lbryfilemanager/EncryptedFileCreator.py b/lbrynet/lbryfilemanager/EncryptedFileCreator.py index 0acdf32cb..c9feaccf3 100644 --- a/lbrynet/lbryfilemanager/EncryptedFileCreator.py +++ b/lbrynet/lbryfilemanager/EncryptedFileCreator.py @@ -7,7 +7,7 @@ import logging import os from lbrynet.core.StreamDescriptor import PlainStreamDescriptorWriter from lbrynet.cryptstream.CryptStreamCreator import CryptStreamCreator -from lbrynet import conf +from lbrynet.conf import settings from lbrynet.lbryfile.StreamDescriptor import get_sd_info from lbrynet.core.cryptoutils import get_lbry_hash_obj from twisted.protocols.basic import FileSender @@ -130,7 +130,7 @@ def create_lbry_file(session, lbry_file_manager, file_name, file_handle, key=Non def make_stream_desc_file(stream_hash): log.debug("creating the stream descriptor file") - descriptor_file_path = os.path.join(session.db_dir, file_name + conf.CRYPTSD_FILE_EXTENSION) + descriptor_file_path = os.path.join(session.db_dir, file_name + settings.CRYPTSD_FILE_EXTENSION) descriptor_writer = PlainStreamDescriptorWriter(descriptor_file_path) d = get_sd_info(lbry_file_manager.stream_info_manager, stream_hash, True) diff --git a/lbrynet/lbrylive/LiveStreamCreator.py b/lbrynet/lbrylive/LiveStreamCreator.py index 0f1f0bb88..c8fd6e937 100644 --- a/lbrynet/lbrylive/LiveStreamCreator.py +++ b/lbrynet/lbrylive/LiveStreamCreator.py @@ -6,7 +6,7 @@ from lbrynet.core.cryptoutils import get_lbry_hash_obj, get_pub_key, sign_with_p from Crypto import Random import binascii import logging -from lbrynet.conf import CRYPTSD_FILE_EXTENSION +from lbrynet.conf import settings from twisted.internet import interfaces, defer from twisted.protocols.basic import FileSender from zope.interface import implements @@ -23,7 +23,7 @@ class LiveStreamCreator(CryptStreamCreator): self.stream_info_manager = stream_info_manager self.delete_after_num = delete_after_num self.secret_pass_phrase = secret_pass_phrase - self.file_extension = CRYPTSD_FILE_EXTENSION + self.file_extension = settings.CRYPTSD_FILE_EXTENSION self.finished_blob_hashes = {} def _save_stream(self): diff --git a/lbrynet/lbrylive/client/LiveStreamMetadataHandler.py b/lbrynet/lbrylive/client/LiveStreamMetadataHandler.py index 33a3ad381..b98ed72c1 100644 --- a/lbrynet/lbrylive/client/LiveStreamMetadataHandler.py +++ b/lbrynet/lbrylive/client/LiveStreamMetadataHandler.py @@ -3,7 +3,7 @@ import logging from zope.interface import implements from twisted.internet import defer from twisted.python.failure import Failure -from lbrynet.conf import MAX_BLOB_INFOS_TO_REQUEST +from lbrynet.conf import settings from lbrynet.core.client.ClientRequest import ClientRequest, ClientPaidRequest from lbrynet.lbrylive.LiveBlob import LiveBlobInfo from lbrynet.core.cryptoutils import get_lbry_hash_obj, verify_signature @@ -136,7 +136,7 @@ class LiveStreamMetadataHandler(object): if count is not None: further_blobs_request['count'] = count else: - further_blobs_request['count'] = MAX_BLOB_INFOS_TO_REQUEST + further_blobs_request['count'] = settings.MAX_BLOB_INFOS_TO_REQUEST log.debug("Requesting %s blob infos from %s", str(further_blobs_request['count']), str(peer)) r_dict = {'further_blobs': further_blobs_request} response_identifier = 'further_blobs' diff --git a/lbrynet/lbrynet_console/Console.py b/lbrynet/lbrynet_console/Console.py index 3f700222e..ab0841a8b 100644 --- a/lbrynet/lbrynet_console/Console.py +++ b/lbrynet/lbrynet_console/Console.py @@ -1,3 +1,5 @@ +# TODO: THERE IS A LOT OF CODE IN THIS MODULE THAT SHOULD BE REMOVED +# AS IT IS REPEATED IN THE LBRYDaemon MODULE import logging import os.path import argparse @@ -10,13 +12,14 @@ if sys.platform == "darwin": from appdirs import user_data_dir from yapsy.PluginManager import PluginManager from twisted.internet import defer, threads, stdio, task, error -from jsonrpc.proxy import JSONRPCProxy +from lbrynet.lbrynet_daemon.auth.client import LBRYAPIClient +from lbrynet import analytics from lbrynet.core.Session import Session from lbrynet.lbrynet_console.ConsoleControl import ConsoleControl from lbrynet.lbrynet_console.Settings import Settings from lbrynet.lbryfilemanager.EncryptedFileManager import EncryptedFileManager -from lbrynet.conf import MIN_BLOB_DATA_PAYMENT_RATE, API_CONNECTION_STRING # , MIN_BLOB_INFO_PAYMENT_RATE +from lbrynet.conf import settings from lbrynet.core.utils import generate_id from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier from lbrynet.core.PaymentRateManager import PaymentRateManager @@ -209,7 +212,7 @@ class Console(): def _get_session(self): def get_default_data_rate(): d = self.settings.get_default_data_payment_rate() - d.addCallback(lambda rate: {"default_data_payment_rate": rate if rate is not None else MIN_BLOB_DATA_PAYMENT_RATE}) + d.addCallback(lambda rate: {"default_data_payment_rate": rate if rate is not None else settings.data_rate}) return d def get_wallet(): @@ -366,11 +369,13 @@ class Console(): ] def get_blob_request_handler_factory(rate): - self.blob_request_payment_rate_manager = PaymentRateManager(self.session.base_payment_rate_manager, - rate) - handlers.append(BlobRequestHandlerFactory(self.session.blob_manager, - self.session.wallet, - self.blob_request_payment_rate_manager)) + self.blob_request_payment_rate_manager = PaymentRateManager( + self.session.base_payment_rate_manager, rate + ) + handlers.append(BlobRequestHandlerFactory( + self.session.blob_manager, self.session.wallet, + self.blob_request_payment_rate_manager, analytics.Track() + )) d1 = self.settings.get_server_data_payment_rate() d1.addCallback(get_blob_request_handler_factory) @@ -537,7 +542,7 @@ def launch_lbry_console(): os.mkdir(data_dir) created_data_dir = True - daemon = JSONRPCProxy.from_url(API_CONNECTION_STRING) + daemon = LBRYAPIClient.config() try: daemon.is_running() log.info("Attempt to start lbrynet-console while lbrynet-daemon is running") diff --git a/lbrynet/lbrynet_console/plugins/BlindRepeater/__init__.py b/lbrynet/lbrynet_console/plugins/BlindRepeater/__init__.py index 01686de4c..44966fea5 100644 --- a/lbrynet/lbrynet_console/plugins/BlindRepeater/__init__.py +++ b/lbrynet/lbrynet_console/plugins/BlindRepeater/__init__.py @@ -1,6 +1,6 @@ from lbrynet.lbrynet_console import Plugin from twisted.internet import defer -from lbrynet.conf import MIN_VALUABLE_BLOB_HASH_PAYMENT_RATE, MIN_VALUABLE_BLOB_INFO_PAYMENT_RATE +from lbrynet.conf import settings from BlindRepeater import BlindRepeater from BlindInfoManager import BlindInfoManager from BlindRepeaterSettings import BlindRepeaterSettings @@ -59,9 +59,9 @@ class BlindRepeaterPlugin(Plugin.Plugin): def get_payment_rate_manager(rates): data_rate = rates[0][1] if rates[0][0] is True else None info_rate = rates[1][1] if rates[1][0] is True else None - info_rate = info_rate if info_rate is not None else MIN_VALUABLE_BLOB_INFO_PAYMENT_RATE + info_rate = info_rate if info_rate is not None else settings.min_valuable_info_rate hash_rate = rates[2][1] if rates[2][0] is True else None - hash_rate = hash_rate if hash_rate is not None else MIN_VALUABLE_BLOB_HASH_PAYMENT_RATE + hash_rate = hash_rate if hash_rate is not None else settings.min_valuable_hash_rate self.payment_rate_manager = BlindRepeaterPaymentRateManager(default_payment_rate_manager, info_rate, hash_rate, blob_data_rate=data_rate) diff --git a/lbrynet/lbrynet_daemon/Daemon.py b/lbrynet/lbrynet_daemon/Daemon.py index 44fa1902b..bb0f0a9cf 100644 --- a/lbrynet/lbrynet_daemon/Daemon.py +++ b/lbrynet/lbrynet_daemon/Daemon.py @@ -5,7 +5,6 @@ import os import platform import random import re -import string import subprocess import sys import base58 @@ -20,43 +19,43 @@ from twisted.web import server from twisted.internet import defer, threads, error, reactor, task from twisted.internet.task import LoopingCall from txjsonrpc import jsonrpclib -from txjsonrpc.web import jsonrpc -from txjsonrpc.web.jsonrpc import Handler from jsonschema import ValidationError from lbrynet import __version__ as lbrynet_version +# TODO: importing this when internet is disabled raises a socket.gaierror from lbryum.version import LBRYUM_VERSION as lbryum_version + +from lbrynet import __version__ as lbrynet_version +from lbrynet.conf import settings as lbrynet_settings from lbrynet import analytics +from lbrynet import reflector +from lbrynet.metadata.Metadata import Metadata, verify_name_characters +from lbrynet.metadata.Fee import FeeValidator +from lbrynet.core import log_support +from lbrynet.core import utils +from lbrynet.core.utils import generate_id +from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob, BlobStreamDescriptorReader +from lbrynet.core.Session import Session +from lbrynet.core.looping_call_manager import LoopingCallManager from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory from lbrynet.core.server.ServerProtocol import ServerProtocolFactory from lbrynet.core.Error import UnknownNameError, InsufficientFundsError, InvalidNameError +from lbrynet.core.PTCWallet import PTCWallet +from lbrynet.core.Wallet import LBRYcrdWallet, LBRYumWallet +from lbrynet.lbrynet_console.Settings import Settings +from lbrynet.lbryfilemanager.EncryptedFileManager import EncryptedFileManager from lbrynet.lbryfile.StreamDescriptor import EncryptedFileStreamType from lbrynet.lbryfile.client.EncryptedFileDownloader import EncryptedFileSaverFactory, EncryptedFileOpenerFactory from lbrynet.lbryfile.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier +from lbrynet.lbryfile.EncryptedFileMetadataManager import DBEncryptedFileMetadataManager +from lbrynet.lbryfile.EncryptedFileMetadataManager import TempEncryptedFileMetadataManager from lbrynet.lbrynet_daemon.UIManager import UIManager from lbrynet.lbrynet_daemon.Downloader import GetStream from lbrynet.lbrynet_daemon.Publisher import Publisher from lbrynet.lbrynet_daemon.ExchangeRateManager import ExchangeRateManager from lbrynet.lbrynet_daemon.Lighthouse import LighthouseClient -from lbrynet.metadata.Metadata import Metadata, verify_name_characters -from lbrynet.core import log_support -from lbrynet.core import utils -from lbrynet.core.utils import generate_id -from lbrynet.lbrynet_console.Settings import Settings -from lbrynet.conf import MIN_BLOB_DATA_PAYMENT_RATE, DEFAULT_MAX_SEARCH_RESULTS, \ - KNOWN_DHT_NODES, DEFAULT_MAX_KEY_FEE, DEFAULT_WALLET, \ - DEFAULT_SEARCH_TIMEOUT, DEFAULT_CACHE_TIME, DEFAULT_UI_BRANCH, \ - LOG_POST_URL, LOG_FILE_NAME, REFLECTOR_SERVERS, SEARCH_SERVERS -from lbrynet.conf import DEFAULT_SD_DOWNLOAD_TIMEOUT -from lbrynet.conf import DEFAULT_TIMEOUT, is_generous_host -from lbrynet import conf -from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob, BlobStreamDescriptorReader -from lbrynet.core.Session import Session -from lbrynet.core.PTCWallet import PTCWallet -from lbrynet.core.Wallet import LBRYcrdWallet, LBRYumWallet -from lbrynet.lbryfilemanager.EncryptedFileManager import EncryptedFileManager -from lbrynet.lbryfile.EncryptedFileMetadataManager import DBEncryptedFileMetadataManager, TempEncryptedFileMetadataManager -from lbrynet import reflector +from lbrynet.lbrynet_daemon.auth.server import AuthJSONRPCServer + # TODO: this code snippet is everywhere. Make it go away @@ -68,7 +67,7 @@ else: if not os.path.isdir(log_dir): os.mkdir(log_dir) -lbrynet_log = os.path.join(log_dir, LOG_FILE_NAME) +lbrynet_log = os.path.join(log_dir, lbrynet_settings.LOG_FILE_NAME) log = logging.getLogger(__name__) @@ -80,7 +79,7 @@ else: INITIALIZING_CODE = 'initializing' LOADING_DB_CODE = 'loading_db' -LOADING_WALLET_CODE = 'loading_wallet' +LOADING_wallet_CODE = 'loading_wallet' LOADING_FILE_MANAGER_CODE = 'loading_file_manager' LOADING_SERVER_CODE = 'loading_server' STARTED_CODE = 'started' @@ -88,7 +87,7 @@ WAITING_FOR_FIRST_RUN_CREDITS = 'waiting_for_credits' STARTUP_STAGES = [ (INITIALIZING_CODE, 'Initializing...'), (LOADING_DB_CODE, 'Loading databases...'), - (LOADING_WALLET_CODE, 'Catching up with the blockchain... %s'), + (LOADING_wallet_CODE, 'Catching up with the blockchain... %s'), (LOADING_FILE_MANAGER_CODE, 'Setting up file manager'), (LOADING_SERVER_CODE, 'Starting lbrynet'), (STARTED_CODE, 'Started lbrynet'), @@ -109,25 +108,42 @@ STREAM_STAGES = [ CONNECT_CODE_VERSION_CHECK = 'version_check' CONNECT_CODE_NETWORK = 'network_connection' -CONNECT_CODE_WALLET = 'wallet_catchup_lag' +CONNECT_CODE_wallet = 'wallet_catchup_lag' CONNECTION_PROBLEM_CODES = [ (CONNECT_CODE_VERSION_CHECK, "There was a problem checking for updates on github"), (CONNECT_CODE_NETWORK, "Your internet connection appears to have been interrupted"), - (CONNECT_CODE_WALLET, "Synchronization with the blockchain is lagging... if this continues try restarting LBRY") + (CONNECT_CODE_wallet, "Synchronization with the blockchain is lagging... if this continues try restarting LBRY") ] -ALLOWED_DURING_STARTUP = ['is_running', 'is_first_run', - 'get_time_behind_blockchain', 'stop', - 'daemon_status', 'get_start_notice', - 'version', 'get_search_servers'] - BAD_REQUEST = 400 NOT_FOUND = 404 OK_CODE = 200 +PENDING_LBRY_ID = "not set" + + +class Checker: + """The looping calls the daemon runs""" + INTERNET_CONNECTION = 'internet_connection_checker' + VERSION = 'version_checker' + CONNECTION_PROBLEM = 'connection_problem_checker' + PENDING_CLAIM = 'pending_claim_checker' + + +class FileID: + """The different ways a file can be identified""" + NAME = 'name' + SD_HASH = 'sd_hash' + FILE_NAME = 'file_name' + + # TODO add login credentials in a conf file # TODO alert if your copy of a lbry file is out of date with the name record +REMOTE_SERVER = "www.lbry.io" + +class NoValidSearch(Exception): + pass class Parameters(object): @@ -135,220 +151,171 @@ class Parameters(object): self.__dict__.update(kwargs) -class Daemon(jsonrpc.JSONRPC): +class CheckInternetConnection(object): + def __init__(self, daemon): + self.daemon = daemon + + def __call__(self): + self.daemon.connected_to_internet = utils.check_connection() + + +class CheckRemoteVersions(object): + def __init__(self, daemon): + self.daemon = daemon + + def __call__(self): + d = self._get_lbrynet_version() + d.addCallback(lambda _: self._get_lbryum_version()) + + def _get_lbryum_version(self): + try: + version = get_lbryum_version_from_github() + log.info( + "remote lbryum %s > local lbryum %s = %s", + version, lbryum_version, + utils.version_is_greater_than(version, lbryum_version) + ) + self.daemon.git_lbryum_version = version + return defer.succeed(None) + except Exception: + log.info("Failed to get lbryum version from git") + self.daemon.git_lbryum_version = None + return defer.fail(None) + + def _get_lbrynet_version(self): + try: + version = get_lbrynet_version_from_github() + log.info( + "remote lbrynet %s > local lbrynet %s = %s", + version, lbrynet_version, + utils.version_is_greater_than(version, lbrynet_version) + ) + self.daemon.git_lbrynet_version = version + return defer.succeed(None) + except Exception: + log.info("Failed to get lbrynet version from git") + self.daemon.git_lbrynet_version = None + return defer.fail(None) + + +class AlwaysSend(object): + def __init__(self, value_generator, *args, **kwargs): + self.value_generator = value_generator + self.args = args + self.kwargs = kwargs + + def __call__(self): + d = defer.maybeDeferred(self.value_generator, *self.args, **self.kwargs) + d.addCallback(lambda v: (True, v)) + return d + + +def calculate_available_blob_size(blob_manager): + d = blob_manager.get_all_verified_blobs() + d.addCallback( + lambda blobs: defer.DeferredList([blob_manager.get_blob_length(b) for b in blobs])) + d.addCallback(lambda blob_lengths: sum(val for success, val in blob_lengths if success)) + return d + + +class Daemon(AuthJSONRPCServer): """ LBRYnet daemon, a jsonrpc interface to lbry functions """ - isLeaf = True - - def __init__(self, root, wallet_type=None): - jsonrpc.JSONRPC.__init__(self) + def __init__(self, root): + AuthJSONRPCServer.__init__(self, lbrynet_settings.use_auth_http) reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) + self.allowed_during_startup = ['is_running', 'is_first_run', + 'get_time_behind_blockchain', 'stop', + 'daemon_status', 'get_start_notice', + 'version', 'get_search_servers'] + last_version = {'last_version': {'lbrynet': lbrynet_version, 'lbryum': lbryum_version}} + lbrynet_settings.update(last_version) + self.db_dir = lbrynet_settings.data_dir + self.download_directory = lbrynet_settings.download_directory + self.created_data_dir = False + if not os.path.exists(self.db_dir): + os.mkdir(self.db_dir) + self.created_data_dir = True + if lbrynet_settings.BLOBFILES_DIR == "blobfiles": + self.blobfile_dir = os.path.join(self.db_dir, "blobfiles") + else: + log.info("Using non-default blobfiles directory: %s", lbrynet_settings.BLOBFILES_DIR) + self.blobfile_dir = lbrynet_settings.BLOBFILES_DIR + + self.run_on_startup = lbrynet_settings.run_on_startup + self.data_rate = lbrynet_settings.data_rate + self.max_key_fee = lbrynet_settings.max_key_fee + self.max_upload = lbrynet_settings.max_upload + self.max_download = lbrynet_settings.max_download + self.upload_log = lbrynet_settings.upload_log + self.search_timeout = lbrynet_settings.search_timeout + self.download_timeout = lbrynet_settings.download_timeout + self.max_search_results = lbrynet_settings.max_search_results + self.run_reflector_server = lbrynet_settings.run_reflector_server + self.wallet_type = lbrynet_settings.wallet + self.delete_blobs_on_remove = lbrynet_settings.delete_blobs_on_remove + self.peer_port = lbrynet_settings.peer_port + self.reflector_port = lbrynet_settings.reflector_port + self.dht_node_port = lbrynet_settings.dht_node_port + self.use_upnp = lbrynet_settings.use_upnp + self.start_lbrycrdd = lbrynet_settings.start_lbrycrdd + self.cache_time = lbrynet_settings.cache_time + self.startup_scripts = lbrynet_settings.startup_scripts + self.startup_status = STARTUP_STAGES[0] self.startup_message = None self.announced_startup = False self.connected_to_internet = True self.connection_problem = None - self.query_handlers = {} self.git_lbrynet_version = None self.git_lbryum_version = None self.ui_version = None self.ip = None - # TODO: this is confusing to set here, and then to be reset below. - self.wallet_type = wallet_type self.first_run = None self.log_file = lbrynet_log self.current_db_revision = 1 - self.run_server = True self.session = None - self.exchange_rate_manager = ExchangeRateManager() - self.lighthouse_client = LighthouseClient() - self.waiting_on = {} - self.streams = {} - self.pending_claims = {} - self.known_dht_nodes = KNOWN_DHT_NODES self.first_run_after_update = False self.uploaded_temp_files = [] self._session_id = base58.b58encode(generate_id()) - if os.name == "nt": - from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle - default_download_directory = get_path(FOLDERID.Downloads, UserHandle.current) - self.db_dir = os.path.join(get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbrynet") - try: - os.makedirs(self.db_dir) - except OSError: - if not os.path.isdir(self.db_dir): - raise - elif sys.platform == "darwin": - default_download_directory = os.path.join(os.path.expanduser("~"), 'Downloads') - self.db_dir = user_data_dir("LBRY") - else: - default_download_directory = os.path.join(os.path.expanduser("~"), 'Downloads') - self.db_dir = os.path.join(os.path.expanduser("~"), ".lbrynet") - try: - if not os.path.isdir(default_download_directory): - os.mkdir(default_download_directory) - except: - log.info("Couldn't make download directory, using home") - default_download_directory = os.path.expanduser("~") + self.analytics_manager = None + self.lbryid = PENDING_LBRY_ID - old_conf_path = os.path.join(self.db_dir, 'daemon_settings.json') self.daemon_conf = os.path.join(self.db_dir, 'daemon_settings.yml') - if os.path.isfile(old_conf_path): - log.info("Migrating .json config file to .yml") - tmp_settings = utils.load_settings(old_conf_path) - utils.save_settings(self.daemon_conf, tmp_settings) - try: - os.remove(old_conf_path) - log.info("Cleaned up old config file") - except: - log.warning("Failed to remove old config file") - - self.default_settings = { - 'run_on_startup': False, - 'data_rate': MIN_BLOB_DATA_PAYMENT_RATE, - 'max_key_fee': DEFAULT_MAX_KEY_FEE, - 'download_directory': default_download_directory, - 'max_upload': 0.0, - 'max_download': 0.0, - 'upload_log': True, - 'search_timeout': DEFAULT_SEARCH_TIMEOUT, - 'download_timeout': DEFAULT_TIMEOUT, - 'max_search_results': DEFAULT_MAX_SEARCH_RESULTS, - 'wallet_type': DEFAULT_WALLET, - 'delete_blobs_on_remove': True, - 'peer_port': 3333, - 'dht_node_port': 4444, - 'reflector_port': 5566, - 'use_upnp': True, - 'start_lbrycrdd': True, - 'requested_first_run_credits': False, - 'run_reflector_server': False, - 'cache_time': DEFAULT_CACHE_TIME, - 'startup_scripts': [], - 'last_version': {'lbrynet': lbrynet_version, 'lbryum': lbryum_version} - } if os.path.isfile(self.daemon_conf): - loaded_settings = utils.load_settings(self.daemon_conf) - missing_settings = {} - removed_settings = {} - for k in self.default_settings.keys(): - if k not in loaded_settings.keys(): - missing_settings[k] = self.default_settings[k] - for k in loaded_settings.keys(): - if not k in self.default_settings.keys(): - log.info("Removing unused setting: " + k + " with value: " + str(loaded_settings[k])) - removed_settings[k] = loaded_settings[k] - del loaded_settings[k] - for k in missing_settings.keys(): - log.info("Adding missing setting: " + k + " with default value: " + str(missing_settings[k])) - loaded_settings[k] = missing_settings[k] - if loaded_settings['wallet_type'] != self.wallet_type and self.wallet_type: - loaded_settings['wallet_type'] = self.wallet_type + conf_settings = utils.load_settings(self.daemon_conf) + if 'last_version' in conf_settings: + if utils.version_is_greater_than(lbrynet_version, conf_settings['last_version']['lbrynet']): + self.first_run_after_update = True + log.info("First run after update") + log.info("lbrynet %s --> %s", conf_settings['last_version']['lbrynet'], lbrynet_version) + log.info("lbryum %s --> %s", conf_settings['last_version']['lbryum'], lbryum_version) - if missing_settings or removed_settings: - log.info("Updated and loaded lbrynet-daemon configuration") - else: - log.info("Loaded lbrynet-daemon configuration") - self.session_settings = loaded_settings - else: - missing_settings = self.default_settings - log.info("Writing default settings : " + json.dumps(self.default_settings) + " --> " + str(self.daemon_conf)) - self.session_settings = self.default_settings + # utils.save_settings(self.daemon_conf) - if 'last_version' in missing_settings.keys(): - self.session_settings['last_version'] = None - - if self.session_settings['last_version'] != self.default_settings['last_version']: - self.session_settings['last_version'] = self.default_settings['last_version'] - self.first_run_after_update = True - log.info("First run after update") - log.info("lbrynet %s --> %s" % (self.session_settings['last_version']['lbrynet'], self.default_settings['last_version']['lbrynet'])) - log.info("lbryum %s --> %s" % (self.session_settings['last_version']['lbryum'], self.default_settings['last_version']['lbryum'])) - if "0.4.5" == self.default_settings['last_version']['lbrynet']: - log.info("Lowering name cache time") - self.session_settings['cache_time'] = DEFAULT_CACHE_TIME - - utils.save_settings(self.daemon_conf, self.session_settings) - - self.run_on_startup = self.session_settings['run_on_startup'] - self.data_rate = self.session_settings['data_rate'] - self.max_key_fee = self.session_settings['max_key_fee'] - self.download_directory = self.session_settings['download_directory'] - self.max_upload = self.session_settings['max_upload'] - self.max_download = self.session_settings['max_download'] - self.upload_log = self.session_settings['upload_log'] - self.search_timeout = self.session_settings['search_timeout'] - self.download_timeout = self.session_settings['download_timeout'] - self.max_search_results = self.session_settings['max_search_results'] - self.run_reflector_server = self.session_settings['run_reflector_server'] - #### - # - # Ignore the saved wallet type. Some users will have their wallet type - # saved as lbrycrd and we want wallets to be lbryum unless explicitly - # set on the command line to be lbrycrd. - # - # if self.session_settings['wallet_type'] in WALLET_TYPES and not wallet_type: - # self.wallet_type = self.session_settings['wallet_type'] - # log.info("Using wallet type %s from config" % self.wallet_type) - # else: - # self.wallet_type = wallet_type - # self.session_settings['wallet_type'] = wallet_type - # log.info("Using wallet type %s specified from command line" % self.wallet_type) - # - # Instead, if wallet is not set on the command line, default to the default wallet - # - if wallet_type: - log.info("Using wallet type %s specified from command line", wallet_type) - self.wallet_type = wallet_type - else: - log.info("Using the default wallet type %s", DEFAULT_WALLET) - self.wallet_type = DEFAULT_WALLET - if self.wallet_type not in conf.WALLET_TYPES: - raise ValueError('Wallet Type {} is not valid'.format(wallet_type)) - # - #### - self.delete_blobs_on_remove = self.session_settings['delete_blobs_on_remove'] - self.peer_port = self.session_settings['peer_port'] - self.reflector_port = self.session_settings['reflector_port'] - self.dht_node_port = self.session_settings['dht_node_port'] - self.use_upnp = self.session_settings['use_upnp'] - self.start_lbrycrdd = self.session_settings['start_lbrycrdd'] - self.requested_first_run_credits = self.session_settings['requested_first_run_credits'] - self.cache_time = self.session_settings['cache_time'] - self.startup_scripts = self.session_settings['startup_scripts'] - - if os.path.isfile(os.path.join(self.db_dir, "stream_info_cache.json")): - f = open(os.path.join(self.db_dir, "stream_info_cache.json"), "r") - self.name_cache = json.loads(f.read()) - f.close() - log.info("Loaded claim info cache") - else: - self.name_cache = {} - - self.set_wallet_attributes() - - self.created_data_dir = False - if not os.path.exists(self.db_dir): - os.mkdir(self.db_dir) - self.created_data_dir = True - - self.blobfile_dir = os.path.join(self.db_dir, "blobfiles") self.wallet_user = None self.wallet_password = None - - self.internet_connection_checker = LoopingCall(self._check_network_connection) - self.version_checker = LoopingCall(self._check_remote_versions) - self.connection_problem_checker = LoopingCall(self._check_connection_problems) - self.pending_claim_checker = LoopingCall(self._check_pending_claims) - self.send_heartbeat = LoopingCall(self._send_heartbeat) - # self.lbrynet_connection_checker = LoopingCall(self._check_lbrynet_connection) - + self.query_handlers = {} + self.waiting_on = {} + self.streams = {} + self.pending_claims = {} + self.name_cache = {} + self.set_wallet_attributes() + self.exchange_rate_manager = ExchangeRateManager() + self.lighthouse_client = LighthouseClient() + calls = { + Checker.INTERNET_CONNECTION: LoopingCall(CheckInternetConnection(self)), + Checker.VERSION: LoopingCall(CheckRemoteVersions(self)), + Checker.CONNECTION_PROBLEM: LoopingCall(self._check_connection_problems), + Checker.PENDING_CLAIM: LoopingCall(self._check_pending_claims), + } + self.looping_call_manager = LoopingCallManager(calls) self.sd_identifier = StreamDescriptorIdentifier() self.stream_info_manager = TempEncryptedFileMetadataManager() self.settings = Settings(self.db_dir) @@ -357,16 +324,17 @@ class Daemon(jsonrpc.JSONRPC): self.lbry_file_metadata_manager = None self.lbry_file_manager = None - if self.wallet_type == "lbrycrd": - if os.path.isfile(self.lbrycrd_conf): - log.info("Using lbrycrd.conf found at " + self.lbrycrd_conf) - else: - log.info("No lbrycrd.conf found at " + self.lbrycrd_conf + ". Generating now...") - password = "".join(random.SystemRandom().choice(string.ascii_letters + string.digits + "_") for i in range(20)) - with open(self.lbrycrd_conf, 'w') as f: - f.write("rpcuser=rpcuser\n") - f.write("rpcpassword=" + password) - log.info("Done writing lbrycrd.conf") + + @AuthJSONRPCServer.subhandler + def _exclude_lbrycrd_only_commands_from_lbryum_session(self, request): + request.content.seek(0, 0) + content = request.content.read() + parsed = jsonrpclib.loads(content) + function_path = parsed.get("method") + if self.wallet_type == "lbryum" and function_path in ['set_miner', 'get_miner_status']: + log.warning("Mining commands are not available in lbryum") + raise Exception("Command not available in lbryum") + return True def set_wallet_attributes(self): self.wallet_dir = None @@ -393,100 +361,7 @@ class Daemon(jsonrpc.JSONRPC): f.write(str(self.lbrycrdd_path)) f.close() - def _responseFailed(self, err, call): - log.debug(err.getTraceback()) - - def render(self, request): - origin = request.getHeader("Origin") - referer = request.getHeader("Referer") - - if origin not in [None, 'http://localhost:5279']: - log.warning("Attempted api call from %s", origin) - return server.failure - - if referer is not None and not referer.startswith('http://localhost:5279/'): - log.warning("Attempted api call from %s", referer) - return server.failure - - request.content.seek(0, 0) - # Unmarshal the JSON-RPC data. - content = request.content.read() - parsed = jsonrpclib.loads(content) - functionPath = parsed.get("method") - args = parsed.get('params') - - #TODO convert args to correct types if possible - - id = parsed.get('id') - version = parsed.get('jsonrpc') - if version: - version = int(float(version)) - elif id and not version: - version = jsonrpclib.VERSION_1 - else: - version = jsonrpclib.VERSION_PRE1 - # XXX this all needs to be re-worked to support logic for multiple - # versions... - - if not self.announced_startup: - if functionPath not in ALLOWED_DURING_STARTUP: - return server.failure - - if self.wallet_type == "lbryum" and functionPath in ['set_miner', 'get_miner_status']: - return server.failure - - try: - function = self._getFunction(functionPath) - except jsonrpclib.Fault, f: - self._cbRender(f, request, id, version) - else: - request.setHeader("Access-Control-Allow-Origin", "localhost") - request.setHeader("content-type", "text/json") - if args == [{}]: - d = defer.maybeDeferred(function) - else: - d = defer.maybeDeferred(function, *args) - - # cancel the response if the connection is broken - notify_finish = request.notifyFinish() - notify_finish.addErrback(self._responseFailed, d) - d.addErrback(self._ebRender, id) - d.addCallback(self._cbRender, request, id, version) - d.addErrback(notify_finish.errback) - return server.NOT_DONE_YET - - def _cbRender(self, result, request, id, version): - def default_decimal(obj): - if isinstance(obj, Decimal): - return float(obj) - - if isinstance(result, Handler): - result = result.result - - if isinstance(result, dict): - result = result['result'] - - if version == jsonrpclib.VERSION_PRE1: - if not isinstance(result, jsonrpclib.Fault): - result = (result,) - # Convert the result (python) to JSON-RPC - try: - s = jsonrpclib.dumps(result, version=version, default=default_decimal) - except: - f = jsonrpclib.Fault(self.FAILURE, "can't serialize output") - s = jsonrpclib.dumps(f, version=version) - - request.setHeader("content-length", str(len(s))) - request.write(s) - request.finish() - - def _ebRender(self, failure, id): - if isinstance(failure.value, jsonrpclib.Fault): - return failure.value - log.error(failure) - return jsonrpclib.Fault(self.FAILURE, "error") - - def setup(self, branch=DEFAULT_UI_BRANCH, user_specified=False, branch_specified=False, host_ui=True): + def setup(self): def _log_starting_vals(): log.info("Starting balance: " + str(self.session.wallet.wallet_balance)) return defer.succeed(None) @@ -519,25 +394,24 @@ class Daemon(jsonrpc.JSONRPC): log.info("Starting lbrynet-daemon") - self.internet_connection_checker.start(3600) - self.version_checker.start(3600 * 12) - self.connection_problem_checker.start(1) + self.looping_call_manager.start(Checker.INTERNET_CONNECTION, 3600) + self.looping_call_manager.start(Checker.VERSION, 3600 * 12) + self.looping_call_manager.start(Checker.CONNECTION_PROBLEM, 1) self.exchange_rate_manager.start() - if host_ui: - self.lbry_ui_manager.update_checker.start(1800, now=False) - d = defer.Deferred() - if host_ui: - d.addCallback(lambda _: self.lbry_ui_manager.setup(branch=branch, - user_specified=user_specified, - branch_specified=branch_specified)) + + if lbrynet_settings.host_ui: + self.lbry_ui_manager.update_checker.start(1800, now=False) + d.addCallback(lambda _: self.lbry_ui_manager.setup()) d.addCallback(lambda _: self._initial_setup()) d.addCallback(lambda _: threads.deferToThread(self._setup_data_directory)) d.addCallback(lambda _: self._check_db_migration()) d.addCallback(lambda _: self._get_settings()) + d.addCallback(lambda _: self._load_caches()) d.addCallback(lambda _: self._set_events()) d.addCallback(lambda _: self._get_session()) + d.addCallback(lambda _: self._get_analytics()) d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier)) d.addCallback(lambda _: self._setup_stream_identifier()) d.addCallback(lambda _: self._setup_lbry_file_manager()) @@ -545,26 +419,11 @@ class Daemon(jsonrpc.JSONRPC): d.addCallback(lambda _: self._setup_server()) d.addCallback(lambda _: _log_starting_vals()) d.addCallback(lambda _: _announce_startup()) - d.addCallback(lambda _: self._load_analytics_api()) - # TODO: handle errors here d.callback(None) - - return defer.succeed(None) - - def _load_analytics_api(self): - self.analytics_api = analytics.Api.load() - self.send_heartbeat.start(60) - - def _send_heartbeat(self): - heartbeat = self._events.heartbeat() - self.analytics_api.track(heartbeat) - - def _send_download_started(self, name, stream_info=None): - event = self._events.download_started(name, stream_info) - self.analytics_api.track(event) + return d def _get_platform(self): - r = { + r = { "processor": platform.processor(), "python_version": platform.python_version(), "platform": platform.platform(), @@ -591,6 +450,12 @@ class Daemon(jsonrpc.JSONRPC): d = _log_platform() return d + def _load_caches(self): + if os.path.isfile(os.path.join(self.db_dir, "stream_info_cache.json")): + with open(os.path.join(self.db_dir, "stream_info_cache.json"), "r") as stream_info_cache: + self.name_cache = json.loads(stream_info_cache.read()) + log.info("Loaded claim info cache") + def _set_events(self): context = analytics.make_context(self._get_platform(), self.wallet_type) self._events = analytics.Events(context, base58.b58encode(self.lbryid), self._session_id) @@ -608,43 +473,6 @@ class Daemon(jsonrpc.JSONRPC): d = download_sd_blob(self.session, wonderfullife_sh, self.session.base_payment_rate_manager) d.addCallbacks(lambda _: _log_success, lambda _: _log_failure) - def _check_remote_versions(self): - def _get_lbryum_version(): - try: - r = urlopen("https://raw.githubusercontent.com/lbryio/lbryum/master/lib/version.py").read().split('\n') - version = next(line.split("=")[1].split("#")[0].replace(" ", "") - for line in r if "LBRYUM_VERSION" in line) - version = version.replace("'", "") - log.info( - "remote lbryum %s > local lbryum %s = %s", - version, lbryum_version, - utils.version_is_greater_than(version, lbryum_version) - ) - self.git_lbryum_version = version - return defer.succeed(None) - except Exception: - log.info("Failed to get lbryum version from git") - self.git_lbryum_version = None - return defer.fail(None) - - def _get_lbrynet_version(): - try: - version = get_lbrynet_version_from_github() - log.info( - "remote lbrynet %s > local lbrynet %s = %s", - version, lbrynet_version, - utils.version_is_greater_than(version, lbrynet_version) - ) - self.git_lbrynet_version = version - return defer.succeed(None) - except Exception: - log.info("Failed to get lbrynet version from git") - self.git_lbrynet_version = None - return defer.fail(None) - - d = _get_lbrynet_version() - d.addCallback(lambda _: _get_lbryum_version()) - def _check_connection_problems(self): if not self.git_lbrynet_version or not self.git_lbryum_version: self.connection_problem = CONNECTION_PROBLEM_CODES[0] @@ -671,7 +499,7 @@ class Daemon(jsonrpc.JSONRPC): def _get_and_start_file(name): d = defer.succeed(self.pending_claims.pop(name)) - d.addCallback(lambda _: self._get_lbry_file("name", name, return_json=False)) + d.addCallback(lambda _: self._get_lbry_file(FileID.NAME, name, return_json=False)) d.addCallback(lambda l: _start_file(l) if l.stopped else "LBRY file was already running") def re_add_to_pending_claims(name): @@ -765,8 +593,12 @@ class Daemon(jsonrpc.JSONRPC): def _setup_query_handlers(self): handlers = [ - BlobRequestHandlerFactory(self.session.blob_manager, self.session.wallet, - self.session.payment_rate_manager), + BlobRequestHandlerFactory( + self.session.blob_manager, + self.session.wallet, + self.session.payment_rate_manager, + self.analytics_manager.track + ), self.session.wallet.get_wallet_info_query_handler_factory(), ] @@ -799,23 +631,24 @@ class Daemon(jsonrpc.JSONRPC): for lm, lp in [('lbrynet', lbrynet_log)]: if os.path.isfile(lp): if exclude_previous: - f = open(lp, "r") - f.seek(PREVIOUS_NET_LOG) - log_contents = f.read() - f.close() + with open( lp, "r") as f: + f.seek(PREVIOUS_NET_LOG) + log_contents = f.read() else: - f = open(lp, "r") - log_contents = f.read() - f.close() + with open(lp, "r") as f: + log_contents = f.read() + if self.lbryid is not PENDING_LBRY_ID: + id_hash = base58.b58encode(self.lbryid)[:20] + else: + id_hash = self.lbryid params = { - 'date': datetime.utcnow().strftime('%Y%m%d-%H%M%S'), - 'hash': base58.b58encode(self.lbryid)[:20], - 'sys': platform.system(), - 'type': "%s-%s" % (lm, log_type) if log_type else lm, - 'log': log_contents - } - requests.post(LOG_POST_URL, params) - + 'date': datetime.utcnow().strftime('%Y%m%d-%H%M%S'), + 'hash': id_hash, + 'sys': platform.system(), + 'type': "%s-%s" % (lm, log_type) if log_type else lm, + 'log': log_contents + } + requests.post(lbrynet_settings.LOG_POST_URL, params) return defer.succeed(None) else: return defer.succeed(None) @@ -830,22 +663,20 @@ class Daemon(jsonrpc.JSONRPC): def _shutdown(self): log.info("Closing lbrynet session") log.info("Status at time of shutdown: " + self.startup_status[0]) - if self.internet_connection_checker.running: - self.internet_connection_checker.stop() - if self.version_checker.running: - self.version_checker.stop() - if self.connection_problem_checker.running: - self.connection_problem_checker.stop() + self.looping_call_manager.shutdown() + if self.analytics_manager: + self.analytics_manager.shutdown() if self.lbry_ui_manager.update_checker.running: self.lbry_ui_manager.update_checker.stop() - if self.pending_claim_checker.running: - self.pending_claim_checker.stop() - if self.send_heartbeat.running: - self.send_heartbeat.stop() self._clean_up_temp_files() - d = self._upload_log(log_type="close", exclude_previous=False if self.first_run else True) + try: + d = self._upload_log( + log_type="close", exclude_previous=False if self.first_run else True) + except Exception: + log.warn('Failed to upload log', exc_info=True) + d = defer.succeed(None) d.addCallback(lambda _: self._stop_server()) d.addCallback(lambda _: self._stop_reflector()) d.addErrback(lambda err: True) @@ -857,87 +688,43 @@ class Daemon(jsonrpc.JSONRPC): return d def _update_settings(self, settings): - for k in settings.keys(): - if k == 'run_on_startup': - if type(settings['run_on_startup']) is bool: - self.session_settings['run_on_startup'] = settings['run_on_startup'] - else: - return defer.fail() - elif k == 'data_rate': - if type(settings['data_rate']) is float: - self.session_settings['data_rate'] = settings['data_rate'] - elif type(settings['data_rate']) is int: - self.session_settings['data_rate'] = float(settings['data_rate']) - else: - return defer.fail() - elif k == 'max_key_fee': - if type(settings['max_key_fee']) is float: - self.session_settings['max_key_fee'] = settings['max_key_fee'] - elif type(settings['max_key_fee']) is int: - self.session_settings['max_key_fee'] = float(settings['max_key_fee']) - else: - return defer.fail() - elif k == 'download_directory': - if type(settings['download_directory']) is unicode: - if os.path.isdir(settings['download_directory']): - self.session_settings['download_directory'] = settings['download_directory'] - else: - pass - else: - return defer.fail() - elif k == 'max_upload': - if type(settings['max_upload']) is float: - self.session_settings['max_upload'] = settings['max_upload'] - elif type(settings['max_upload']) is int: - self.session_settings['max_upload'] = float(settings['max_upload']) - else: - return defer.fail() - elif k == 'max_download': - if type(settings['max_download']) is float: - self.session_settings['max_download'] = settings['max_download'] - if type(settings['max_download']) is int: - self.session_settings['max_download'] = float(settings['max_download']) - else: - return defer.fail() - elif k == 'upload_log': - if type(settings['upload_log']) is bool: - self.session_settings['upload_log'] = settings['upload_log'] - else: - return defer.fail() - elif k == 'download_timeout': - if type(settings['download_timeout']) is int: - self.session_settings['download_timeout'] = settings['download_timeout'] - elif type(settings['download_timeout']) is float: - self.session_settings['download_timeout'] = int(settings['download_timeout']) - else: - return defer.fail() - elif k == 'search_timeout': - if type(settings['search_timeout']) is float: - self.session_settings['search_timeout'] = settings['search_timeout'] - elif type(settings['search_timeout']) is int: - self.session_settings['search_timeout'] = float(settings['search_timeout']) - else: - return defer.fail() - elif k == 'cache_time': - if type(settings['cache_time']) is int: - self.session_settings['cache_time'] = settings['cache_time'] - elif type(settings['cache_time']) is float: - self.session_settings['cache_time'] = int(settings['cache_time']) - else: - return defer.fail() + setting_types = { + 'run_on_startup': bool, + 'data_rate': float, + 'max_key_fee': float, + 'download_directory': str, + 'max_upload': float, + 'max_download': float, + 'upload_log': bool, + 'download_timeout': int, + 'search_timeout': float, + 'cache_time': int + } - self.run_on_startup = self.session_settings['run_on_startup'] - self.data_rate = self.session_settings['data_rate'] - self.max_key_fee = self.session_settings['max_key_fee'] - self.download_directory = self.session_settings['download_directory'] - self.max_upload = self.session_settings['max_upload'] - self.max_download = self.session_settings['max_download'] - self.upload_log = self.session_settings['upload_log'] - self.download_timeout = self.session_settings['download_timeout'] - self.search_timeout = self.session_settings['search_timeout'] - self.cache_time = self.session_settings['cache_time'] + for key, setting_type in setting_types.iteritems(): + if key in settings: + if isinstance(settings[key], setting_type): + lbrynet_settings.update({key: settings[key]}) + elif key == "max_key_fee" and isinstance(FeeValidator(settings[key]).amount, setting_type): + lbrynet_settings.update({key: settings[key]}) + else: + try: + converted = setting_type(settings[key]) + lbrynet_settings.update({key: converted}) + except Exception as err: + log.warning(err.message) + log.warning("error converting setting '%s' to type %s", key, setting_type) - utils.save_settings(self.daemon_conf, self.session_settings) + self.run_on_startup = lbrynet_settings.run_on_startup + self.data_rate = lbrynet_settings.data_rate + self.max_key_fee = lbrynet_settings.max_key_fee + self.download_directory = lbrynet_settings.download_directory + self.max_upload = lbrynet_settings.max_upload + self.max_download = lbrynet_settings.max_download + self.upload_log = lbrynet_settings.upload_log + self.download_timeout = lbrynet_settings.download_timeout + self.search_timeout = lbrynet_settings.search_timeout + self.cache_time = lbrynet_settings.cache_time return defer.succeed(True) @@ -945,10 +732,10 @@ class Daemon(jsonrpc.JSONRPC): self.startup_status = STARTUP_STAGES[1] log.info("Loading databases...") if self.created_data_dir: - db_revision = open(os.path.join(self.db_dir, "db_revision"), mode='w') - db_revision.write(str(self.current_db_revision)) - db_revision.close() - log.debug("Created the db revision file: %s", str(os.path.join(self.db_dir, "db_revision"))) + db_revision_path = os.path.join(self.db_dir, "db_revision") + with open(db_revision_path, mode='w') as db_revision: + db_revision.write(str(self.current_db_revision)) + log.debug("Created the db revision file: %s", db_revision_path) if not os.path.exists(self.blobfile_dir): os.mkdir(self.blobfile_dir) log.debug("Created the blobfile directory: %s", str(self.blobfile_dir)) @@ -958,6 +745,8 @@ class Daemon(jsonrpc.JSONRPC): db_revision_file = os.path.join(self.db_dir, "db_revision") if os.path.exists(db_revision_file): old_revision = int(open(db_revision_file).read().strip()) + if old_revision > self.current_db_revision: + return defer.fail(Exception('This version of lbrynet is not compatible with the database')) if old_revision < self.current_db_revision: from lbrynet.db_migrator import dbmigrator log.info("Upgrading your databases...") @@ -985,7 +774,7 @@ class Daemon(jsonrpc.JSONRPC): return d def _set_lbryid(self, lbryid): - if lbryid is None: + if lbryid is PENDING_LBRY_ID: return self._make_lbryid() else: log.info("LBRY ID: " + base58.b58encode(lbryid)) @@ -1003,7 +792,6 @@ class Daemon(jsonrpc.JSONRPC): session_id=self._session_id ) - def _setup_lbry_file_manager(self): self.startup_status = STARTUP_STAGES[3] self.lbry_file_metadata_manager = DBEncryptedFileMetadataManager(self.db_dir) @@ -1020,11 +808,25 @@ class Daemon(jsonrpc.JSONRPC): return d + def _get_analytics(self): + analytics_api = analytics.Api.load() + context = analytics.make_context(self._get_platform(), self.wallet_type) + events_generator = analytics.Events( + context, base58.b58encode(self.lbryid), self._session_id) + self.analytics_manager = analytics.Manager( + analytics_api, events_generator, analytics.Track()) + self.analytics_manager.start() + self.analytics_manager.register_repeating_metric( + analytics.BLOB_BYTES_AVAILABLE, + AlwaysSend(calculate_available_blob_size, self.session.blob_manager), + frequency=300 + ) + def _get_session(self): def get_default_data_rate(): d = self.settings.get_default_data_payment_rate() d.addCallback(lambda rate: {"default_data_payment_rate": rate if rate is not None else - MIN_BLOB_DATA_PAYMENT_RATE}) + lbrynet_settings.data_rate}) return d def get_wallet(): @@ -1038,8 +840,8 @@ class Daemon(jsonrpc.JSONRPC): elif self.wallet_type == "lbryum": log.info("Using lbryum wallet") config = {'auto-connect': True} - if conf.LBRYUM_WALLET_DIR: - config['lbryum_path'] = conf.LBRYUM_WALLET_DIR + if lbrynet_settings.lbryum_wallet_dir: + config['lbryum_path'] = lbrynet_settings.lbryum_wallet_dir d = defer.succeed(LBRYumWallet(self.db_dir, config)) elif self.wallet_type == "ptc": log.info("Using PTC wallet") @@ -1062,9 +864,8 @@ class Daemon(jsonrpc.JSONRPC): def create_session(results): self.session = Session(results['default_data_payment_rate'], db_dir=self.db_dir, lbryid=self.lbryid, blob_dir=self.blobfile_dir, dht_node_port=self.dht_node_port, - known_dht_nodes=self.known_dht_nodes, peer_port=self.peer_port, - use_upnp=self.use_upnp, wallet=results['wallet'], - is_generous=is_generous_host) + known_dht_nodes=lbrynet_settings.known_dht_nodes, peer_port=self.peer_port, + use_upnp=self.use_upnp, wallet=results['wallet']) self.startup_status = STARTUP_STAGES[2] dl = defer.DeferredList([d1, d2], fireOnOneErrback=True) @@ -1085,7 +886,7 @@ class Daemon(jsonrpc.JSONRPC): self.sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType, file_opener_factory) return defer.succeed(None) - def _download_sd_blob(self, sd_hash, timeout=DEFAULT_SD_DOWNLOAD_TIMEOUT): + def _download_sd_blob(self, sd_hash, timeout=lbrynet_settings.sd_download_timeout): def cb(result): if not r.called: r.callback(result) @@ -1103,13 +904,13 @@ class Daemon(jsonrpc.JSONRPC): return r - def _download_name(self, name, timeout=DEFAULT_TIMEOUT, download_directory=None, + def _download_name(self, name, timeout=lbrynet_settings.download_timeout, download_directory=None, file_name=None, stream_info=None, wait_for_write=True): """ Add a lbry file to the file manager, start the download, and return the new lbry file. If it already exists in the file manager, return the existing lbry file """ - self._send_download_started(name) + self.analytics_manager.send_download_started(name, stream_info) helper = _DownloadNameHelper( self, name, timeout, download_directory, file_name, wait_for_write) @@ -1239,108 +1040,13 @@ class Daemon(jsonrpc.JSONRPC): return defer.succeed(None) def _get_lbry_file(self, search_by, val, return_json=True): - def _log_get_lbry_file(f): - if f and val: - log.info("Found LBRY file for " + search_by + ": " + val) - elif val: - log.info("Did not find LBRY file for " + search_by + ": " + val) - return f - - def _get_json_for_return(f): - def _get_file_status(file_status): - message = STREAM_STAGES[2][1] % (file_status.name, file_status.num_completed, file_status.num_known, file_status.running_status) - return defer.succeed(message) - - def _generate_reply(size): - if f.key: - key = binascii.b2a_hex(f.key) - else: - key = None - - if os.path.isfile(os.path.join(self.download_directory, f.file_name)): - written_file = file(os.path.join(self.download_directory, f.file_name)) - written_file.seek(0, os.SEEK_END) - written_bytes = written_file.tell() - written_file.close() - else: - written_bytes = False - - if search_by == "name": - if val in self.streams.keys(): - status = self.streams[val].code - elif f in self.lbry_file_manager.lbry_files: - # if f.stopped: - # status = STREAM_STAGES[3] - # else: - status = STREAM_STAGES[2] - else: - status = [False, False] - else: - status = [False, False] - - if status[0] == DOWNLOAD_RUNNING_CODE: - d = f.status() - d.addCallback(_get_file_status) - d.addCallback(lambda message: {'completed': f.completed, 'file_name': f.file_name, - 'download_directory': f.download_directory, - 'download_path': os.path.join(f.download_directory, f.file_name), - 'mime_type': mimetypes.guess_type(os.path.join(f.download_directory, f.file_name))[0], - 'key': key, - 'points_paid': f.points_paid, 'stopped': f.stopped, - 'stream_hash': f.stream_hash, - 'stream_name': f.stream_name, - 'suggested_file_name': f.suggested_file_name, - 'upload_allowed': f.upload_allowed, 'sd_hash': f.sd_hash, - 'lbry_uri': f.uri, 'txid': f.txid, 'claim_id': f.claim_id, - 'total_bytes': size, - 'written_bytes': written_bytes, 'code': status[0], - 'message': message}) - else: - d = defer.succeed({'completed': f.completed, 'file_name': f.file_name, 'key': key, - 'download_directory': f.download_directory, - 'download_path': os.path.join(f.download_directory, f.file_name), - 'mime_type': mimetypes.guess_type(os.path.join(f.download_directory, f.file_name))[0], - 'points_paid': f.points_paid, 'stopped': f.stopped, 'stream_hash': f.stream_hash, - 'stream_name': f.stream_name, 'suggested_file_name': f.suggested_file_name, - 'upload_allowed': f.upload_allowed, 'sd_hash': f.sd_hash, 'total_bytes': size, - 'written_bytes': written_bytes, 'lbry_uri': f.uri, 'txid': f.txid, 'claim_id': f.claim_id, - 'code': status[0], 'message': status[1]}) - - return d - - def _add_metadata(message): - def _add_to_dict(metadata): - message['metadata'] = metadata - return defer.succeed(message) - - if f.txid: - d = self._resolve_name(f.uri) - d.addCallbacks(_add_to_dict, lambda _: _add_to_dict("Pending confirmation")) - else: - d = defer.succeed(message) - return d - - if f: - d = f.get_total_bytes() - d.addCallback(_generate_reply) - d.addCallback(_add_metadata) - return d - else: - return False - - if search_by == "name": - d = self._get_lbry_file_by_uri(val) - elif search_by == "sd_hash": - d = self._get_lbry_file_by_sd_hash(val) - elif search_by == "file_name": - d = self._get_lbry_file_by_file_name(val) - # d.addCallback(_log_get_lbry_file) - if return_json: - d.addCallback(_get_json_for_return) - return d + return _GetFileHelper(self, search_by, val, return_json).retrieve_file() def _get_lbry_files(self): - d = defer.DeferredList([self._get_lbry_file('sd_hash', l.sd_hash) for l in self.lbry_file_manager.lbry_files]) + d = defer.DeferredList([ + self._get_lbry_file(FileID.SD_HASH, l.sd_hash) + for l in self.lbry_file_manager.lbry_files + ]) return d def _reflect(self, lbry_file): @@ -1348,13 +1054,13 @@ class Daemon(jsonrpc.JSONRPC): return defer.fail(Exception("no lbry file given to reflect")) stream_hash = lbry_file.stream_hash - + if stream_hash is None: return defer.fail(Exception("no stream hash")) log.info("Reflecting stream: %s" % stream_hash) - reflector_server = random.choice(REFLECTOR_SERVERS) + reflector_server = random.choice(lbrynet_settings.reflector_servers) reflector_address, reflector_port = reflector_server[0], reflector_server[1] log.info("Start reflector client") factory = reflector.ClientFactory( @@ -1373,7 +1079,7 @@ class Daemon(jsonrpc.JSONRPC): log.info("Reflecting %i blobs" % len(blob_hashes)) - reflector_server = random.choice(REFLECTOR_SERVERS) + reflector_server = random.choice(lbrynet_settings.reflector_servers) reflector_address, reflector_port = reflector_server[0], reflector_server[1] log.info("Start reflector client") factory = reflector.BlobClientFactory( @@ -1396,9 +1102,9 @@ class Daemon(jsonrpc.JSONRPC): log.info("Removing one time startup scripts") remaining_scripts = [s for s in self.startup_scripts if 'run_once' not in s.keys()] startup_scripts = self.startup_scripts - self.startup_scripts = self.session_settings['startup_scripts'] = remaining_scripts - - utils.save_settings(self.daemon_conf, self.session_settings) + self.startup_scripts = lbrynet_settings.startup_scripts = remaining_scripts + conf = os.path.join(lbrynet_settings.data_dir, "daemon_settings.yml") + utils.save_settings(conf) for script in startup_scripts: if script['script_name'] == 'migrateto025': @@ -1416,9 +1122,6 @@ class Daemon(jsonrpc.JSONRPC): def _search(self, search): return self.lighthouse_client.search(search) - def _render_response(self, result, code): - return defer.succeed({'result': result, 'code': code}) - def jsonrpc_is_running(self): """ Check if lbrynet daemon is running @@ -1455,7 +1158,7 @@ class Daemon(jsonrpc.JSONRPC): r['problem_code'] = self.connection_problem[0] r['message'] = self.connection_problem[1] r['is_lagging'] = True - elif self.startup_status[0] == LOADING_WALLET_CODE: + elif self.startup_status[0] == LOADING_wallet_CODE: if self.wallet_type == 'lbryum': if self.session.wallet.blocks_behind_alert != 0: r['message'] = r['message'] % (str(self.session.wallet.blocks_behind_alert) + " blocks behind") @@ -1466,7 +1169,6 @@ class Daemon(jsonrpc.JSONRPC): else: r['message'] = "Catching up with the blockchain" r['progress'] = 0 - log.info("daemon status: " + str(r)) return self._render_response(r, OK_CODE) def jsonrpc_is_first_run(self): @@ -1578,8 +1280,9 @@ class Daemon(jsonrpc.JSONRPC): """ log.info("Get daemon settings") - return self._render_response(self.session_settings, OK_CODE) + return self._render_response(lbrynet_settings.configurable_settings, OK_CODE) + @AuthJSONRPCServer.auth_required def jsonrpc_set_settings(self, p): """ Set lbrynet daemon settings @@ -1598,12 +1301,12 @@ class Daemon(jsonrpc.JSONRPC): """ def _log_settings_change(): - log.info("Set daemon settings to " + json.dumps(self.session_settings)) + log.info("Set daemon settings to " + json.dumps(lbrynet_settings.configurable_settings)) d = self._update_settings(p) d.addErrback(lambda err: log.info(err.getTraceback())) d.addCallback(lambda _: _log_settings_change()) - d.addCallback(lambda _: self._render_response(self.session_settings, OK_CODE)) + d.addCallback(lambda _: self._render_response(lbrynet_settings.configurable_settings, OK_CODE)) return d @@ -1621,12 +1324,12 @@ class Daemon(jsonrpc.JSONRPC): """ if not p: - return self._render_response(self._listFunctions(), OK_CODE) + return self._render_response(self.callable_methods.keys(), OK_CODE) elif 'callable_during_start' in p.keys(): - return self._render_response(ALLOWED_DURING_STARTUP, OK_CODE) + return self._render_response(self.allowed_during_startup, OK_CODE) elif 'function' in p.keys(): func_path = p['function'] - function = self._getFunction(func_path) + function = self.callable_methods.get(func_path) return self._render_response(function.__doc__, OK_CODE) else: return self._render_response(self.jsonrpc_help.__doc__, OK_CODE) @@ -1690,8 +1393,7 @@ class Daemon(jsonrpc.JSONRPC): return d def jsonrpc_get_lbry_file(self, p): - """ - Get lbry file + """Get lbry file Args: 'name': get file by lbry uri, @@ -1709,15 +1411,18 @@ class Daemon(jsonrpc.JSONRPC): 'upload_allowed': bool 'sd_hash': string """ - - if p.keys()[0] in ['name', 'sd_hash', 'file_name']: - search_type = p.keys()[0] - d = self._get_lbry_file(search_type, p[search_type]) - else: - d = defer.fail() + d = self._get_deferred_for_lbry_file(p) d.addCallback(lambda r: self._render_response(r, OK_CODE)) return d + def _get_deferred_for_lbry_file(self, p): + try: + searchtype, value = get_lbry_file_search_value(p) + except NoValidSearch: + return defer.fail() + else: + return self._get_lbry_file(searchtype, value) + def jsonrpc_resolve_name(self, p): """ Resolve stream info from a LBRY uri @@ -1730,15 +1435,15 @@ class Daemon(jsonrpc.JSONRPC): force = p.get('force', False) - if 'name' in p: - name = p['name'] - else: + name = p.get(FileID.NAME) + if not name: return self._render_response(None, BAD_REQUEST) d = self._resolve_name(name, force_refresh=force) d.addCallbacks(lambda info: self._render_response(info, OK_CODE), lambda _: server.failure) return d + @AuthJSONRPCServer.auth_required def jsonrpc_get_my_claim(self, p): """ Return existing claim for a given name @@ -1749,7 +1454,7 @@ class Daemon(jsonrpc.JSONRPC): claim info, False if no such claim exists """ - name = p['name'] + name = p[FileID.NAME] d = self.session.wallet.get_my_claim(name) d.addCallback(lambda r: self._render_response(r, OK_CODE)) return d @@ -1771,7 +1476,7 @@ class Daemon(jsonrpc.JSONRPC): r['amount'] = float(r['amount']) / 10**8 return r - name = p['name'] + name = p[FileID.NAME] txid = p.get('txid', None) d = self.session.wallet.get_claim_info(name, txid) d.addCallback(_convert_amount_to_float) @@ -1784,11 +1489,11 @@ class Daemon(jsonrpc.JSONRPC): # can spec what parameters it expects and how to set default values timeout = p.get('timeout', self.download_timeout) download_directory = p.get('download_directory', self.download_directory) - file_name = p.get('file_name') + file_name = p.get(FileID.FILE_NAME) stream_info = p.get('stream_info') sd_hash = get_sd_hash(stream_info) wait_for_write = p.get('wait_for_write', True) - name = p.get('name') + name = p.get(FileID.NAME) return Parameters( timeout=timeout, download_directory=download_directory, @@ -1799,6 +1504,7 @@ class Daemon(jsonrpc.JSONRPC): name=name ) + @AuthJSONRPCServer.auth_required def jsonrpc_get(self, p): """Download stream from a LBRY uri. @@ -1829,6 +1535,7 @@ class Daemon(jsonrpc.JSONRPC): d.addCallback(lambda message: self._render_response(message, OK_CODE)) return d + @AuthJSONRPCServer.auth_required def jsonrpc_stop_lbry_file(self, p): """ Stop lbry file @@ -1842,18 +1549,25 @@ class Daemon(jsonrpc.JSONRPC): """ def _stop_file(f): - d = self.lbry_file_manager.toggle_lbry_file_running(f) - d.addCallback(lambda _: "Stopped LBRY file") - return d + if f.stopped: + return "LBRY file wasn't running" + else: + d = self.lbry_file_manager.toggle_lbry_file_running(f) + d.addCallback(lambda _: "Stopped LBRY file") + return d - if p.keys()[0] in ['name', 'sd_hash', 'file_name']: - search_type = p.keys()[0] - d = self._get_lbry_file(search_type, p[search_type], return_json=False) - d.addCallback(lambda l: _stop_file(l) if not l.stopped else "LBRY file wasn't running") + try: + searchtype, value = get_lbry_file_search_value(p) + except NoValidSearch: + d = defer.fail() + else: + d = self._get_lbry_file(searchtype, value, return_json=False) + d.addCallback(_stop_file) d.addCallback(lambda r: self._render_response(r, OK_CODE)) return d + @AuthJSONRPCServer.auth_required def jsonrpc_start_lbry_file(self, p): """ Stop lbry file @@ -1867,13 +1581,19 @@ class Daemon(jsonrpc.JSONRPC): """ def _start_file(f): - d = self.lbry_file_manager.toggle_lbry_file_running(f) - return defer.succeed("Started LBRY file") + if f.stopped: + d = self.lbry_file_manager.toggle_lbry_file_running(f) + return defer.succeed("Started LBRY file") + else: + return "LBRY file was already running" - if p.keys()[0] in ['name', 'sd_hash', 'file_name']: - search_type = p.keys()[0] - d = self._get_lbry_file(search_type, p[search_type], return_json=False) - d.addCallback(lambda l: _start_file(l) if l.stopped else "LBRY file was already running") + try: + searchtype, value = get_lbry_file_search_value(p) + except NoValidSearch: + d = defer.fail() + else: + d = self._get_lbry_file(searchtype, value, return_json=False) + d.addCallback(_start_file) d.addCallback(lambda r: self._render_response(r, OK_CODE)) return d @@ -1888,7 +1608,7 @@ class Daemon(jsonrpc.JSONRPC): estimated cost """ - name = p['name'] + name = p[FileID.NAME] d = self._get_est_cost(name) d.addCallback(lambda r: self._render_response(r, OK_CODE)) @@ -1930,6 +1650,7 @@ class Daemon(jsonrpc.JSONRPC): return d + @AuthJSONRPCServer.auth_required def jsonrpc_delete_lbry_file(self, p): """ Delete a lbry file @@ -1940,25 +1661,28 @@ class Daemon(jsonrpc.JSONRPC): confirmation message """ - if 'delete_target_file' in p.keys(): - delete_file = p['delete_target_file'] - else: - delete_file = True + delete_file = p.get('delete_target_file', True) def _delete_file(f): + if not f: + return False file_name = f.file_name d = self._delete_lbry_file(f, delete_file=delete_file) d.addCallback(lambda _: "Deleted LBRY file" + file_name) return d - if 'name' in p.keys() or 'sd_hash' in p.keys() or 'file_name' in p.keys(): - search_type = [k for k in p.keys() if k != 'delete_target_file'][0] - d = self._get_lbry_file(search_type, p[search_type], return_json=False) - d.addCallback(lambda l: _delete_file(l) if l else False) + try: + searchtype, value = get_lbry_file_search_value(p) + except NoValidSearch: + d = defer.fail() + else: + d = self._get_lbry_file(searchtype, value, return_json=False) + d.addCallback(_delete_file) d.addCallback(lambda r: self._render_response(r, OK_CODE)) return d + @AuthJSONRPCServer.auth_required def jsonrpc_publish(self, p): """ Make a new name claim and publish associated data to lbrynet @@ -1979,12 +1703,12 @@ class Daemon(jsonrpc.JSONRPC): return m def _reflect_if_possible(sd_hash, txid): - d = self._get_lbry_file('sd_hash', sd_hash, return_json=False) + d = self._get_lbry_file(FileID.SD_HASH, sd_hash, return_json=False) d.addCallback(self._reflect) d.addCallback(lambda _: txid) return d - name = p['name'] + name = p[FileID.NAME] log.info("Publish: ") log.info(p) @@ -2012,8 +1736,7 @@ class Daemon(jsonrpc.JSONRPC): if not os.path.isfile(file_path): return defer.fail(Exception("Specified file for publish doesnt exist: %s" % file_path)) - if not self.pending_claim_checker.running: - self.pending_claim_checker.start(30) + self.looping_call_manager.start(Checker.PENDING_CLAIM, 30) d = self._resolve_name(name, force_refresh=True) d.addErrback(lambda _: None) @@ -2040,6 +1763,7 @@ class Daemon(jsonrpc.JSONRPC): return d + @AuthJSONRPCServer.auth_required def jsonrpc_abandon_claim(self, p): """ Abandon a name and reclaim credits from the claim @@ -2066,6 +1790,7 @@ class Daemon(jsonrpc.JSONRPC): return d + @AuthJSONRPCServer.auth_required def jsonrpc_abandon_name(self, p): """ DEPRECIATED, use abandon_claim @@ -2078,7 +1803,7 @@ class Daemon(jsonrpc.JSONRPC): return self.jsonrpc_abandon_claim(p) - + @AuthJSONRPCServer.auth_required def jsonrpc_support_claim(self, p): """ Support a name claim @@ -2091,13 +1816,14 @@ class Daemon(jsonrpc.JSONRPC): txid """ - name = p['name'] + name = p[FileID.NAME] claim_id = p['claim_id'] amount = p['amount'] d = self.session.wallet.support_claim(name, claim_id, amount) d.addCallback(lambda r: self._render_response(r, OK_CODE)) return d + @AuthJSONRPCServer.auth_required def jsonrpc_get_name_claims(self): """ Get my name claims @@ -2131,11 +1857,12 @@ class Daemon(jsonrpc.JSONRPC): list of name claims """ - name = p['name'] + name = p[FileID.NAME] d = self.session.wallet.get_claims_for_name(name) d.addCallback(lambda r: self._render_response(r, OK_CODE)) return d + @AuthJSONRPCServer.auth_required def jsonrpc_get_transaction_history(self): """ Get transaction history @@ -2166,6 +1893,7 @@ class Daemon(jsonrpc.JSONRPC): d.addCallback(lambda r: self._render_response(r, OK_CODE)) return d + @AuthJSONRPCServer.auth_required def jsonrpc_address_is_mine(self, p): """ Checks if an address is associated with the current wallet. @@ -2183,7 +1911,7 @@ class Daemon(jsonrpc.JSONRPC): return d - + @AuthJSONRPCServer.auth_required def jsonrpc_get_public_key_from_wallet(self, p): """ Get public key from wallet address @@ -2221,6 +1949,7 @@ class Daemon(jsonrpc.JSONRPC): return d + @AuthJSONRPCServer.auth_required def jsonrpc_get_new_address(self): """ Generate a new wallet address @@ -2240,6 +1969,7 @@ class Daemon(jsonrpc.JSONRPC): d.addCallback(lambda address: self._render_response(address, OK_CODE)) return d + @AuthJSONRPCServer.auth_required def jsonrpc_send_amount_to_address(self, p): """ Send credits to an address @@ -2319,6 +2049,7 @@ class Daemon(jsonrpc.JSONRPC): d.addCallback(lambda r: self._render_response(r, OK_CODE)) return d + @AuthJSONRPCServer.auth_required def jsonrpc_download_descriptor(self, p): """ Download and return a sd blob @@ -2328,11 +2059,12 @@ class Daemon(jsonrpc.JSONRPC): Returns sd blob, dict """ - sd_hash = p['sd_hash'] - timeout = p.get('timeout', DEFAULT_SD_DOWNLOAD_TIMEOUT) - + sd_hash = p[FileID.SD_HASH] + timeout = p.get('timeout', lbrynet_settings.sd_download_timeout) d = self._download_sd_blob(sd_hash, timeout) - d.addCallbacks(lambda r: self._render_response(r, OK_CODE), lambda _: self._render_response(False, OK_CODE)) + d.addCallbacks( + lambda r: self._render_response(r, OK_CODE), + lambda _: self._render_response(False, OK_CODE)) return d def jsonrpc_get_nametrie(self): @@ -2350,6 +2082,7 @@ class Daemon(jsonrpc.JSONRPC): d.addCallback(lambda r: self._render_response(r, OK_CODE)) return d + @AuthJSONRPCServer.auth_required def jsonrpc_set_miner(self, p): """ Start of stop the miner, function only available when lbrycrd is set as the wallet @@ -2438,6 +2171,7 @@ class Daemon(jsonrpc.JSONRPC): d.addCallback(lambda _: self._render_response(True, OK_CODE)) return d + @AuthJSONRPCServer.auth_required def jsonrpc_configure_ui(self, p): """ Configure the UI being hosted @@ -2462,6 +2196,7 @@ class Daemon(jsonrpc.JSONRPC): return d + @AuthJSONRPCServer.auth_required def jsonrpc_reveal(self, p): """ Reveal a file or directory in file browser @@ -2522,8 +2257,8 @@ class Daemon(jsonrpc.JSONRPC): True or traceback """ - sd_hash = p['sd_hash'] - d = self._get_lbry_file('sd_hash', sd_hash, return_json=False) + sd_hash = p[FileID.SD_HASH] + d = self._get_lbry_file(FileID.SD_HASH, sd_hash, return_json=False) d.addCallback(self._reflect) d.addCallbacks(lambda _: self._render_response(True, OK_CODE), lambda err: self._render_response(err.getTraceback(), OK_CODE)) return d @@ -2560,14 +2295,13 @@ class Daemon(jsonrpc.JSONRPC): def jsonrpc_get_search_servers(self): """ Get list of lighthouse servers - Args: None Returns: List of address:port """ - d = self._render_response(SEARCH_SERVERS, OK_CODE) + d = self._render_response(lbrynet_settings.search_servers, OK_CODE) return d def jsonrpc_get_mean_availability(self): @@ -2604,7 +2338,7 @@ class Daemon(jsonrpc.JSONRPC): else: return 0.0 - name = p['name'] + name = p[FileID.NAME] d = self._resolve_name(name, force_refresh=True) d.addCallback(get_sd_hash) @@ -2617,6 +2351,20 @@ class Daemon(jsonrpc.JSONRPC): return d + @AuthJSONRPCServer.auth_required + def jsonrpc_test_api_authentication(self): + if self._use_authentication: + return self._render_response(True, OK_CODE) + return self._render_response("Not using authentication", OK_CODE) + + +def get_lbryum_version_from_github(): + r = urlopen("https://raw.githubusercontent.com/lbryio/lbryum/master/lib/version.py").read().split('\n') + version = next(line.split("=")[1].split("#")[0].replace(" ", "") + for line in r if "LBRYUM_VERSION" in line) + version = version.replace("'", "") + return version + def get_lbrynet_version_from_github(): """Return the latest released version from github.""" @@ -2673,7 +2421,7 @@ def get_darwin_lbrycrdd_path(): class _DownloadNameHelper(object): - def __init__(self, daemon, name, timeout=DEFAULT_TIMEOUT, download_directory=None, + def __init__(self, daemon, name, timeout=lbrynet_settings.download_timeout, download_directory=None, file_name=None, wait_for_write=True): self.daemon = daemon self.name = name @@ -2834,3 +2582,130 @@ class _ResolveNameHelper(object): def is_cached_name_expired(self): time_in_cache = self.now() - self.name_data['timestamp'] return time_in_cache >= self.daemon.cache_time + + +class _GetFileHelper(object): + def __init__(self, daemon, search_by, val, return_json=True): + self.daemon = daemon + self.search_by = search_by + self.val = val + self.return_json = return_json + + def retrieve_file(self): + d = self.search_for_file() + if self.return_json: + d.addCallback(self._get_json) + return d + + def search_for_file(self): + if self.search_by == FileID.NAME: + return self.daemon._get_lbry_file_by_uri(self.val) + elif self.search_by == FileID.SD_HASH: + return self.daemon._get_lbry_file_by_sd_hash(self.val) + elif self.search_by == FileID.FILE_NAME: + return self.daemon._get_lbry_file_by_file_name(self.val) + raise Exception('{} is not a valid search operation'.format(self.search_by)) + + def _get_json(self, lbry_file): + if lbry_file: + d = lbry_file.get_total_bytes() + d.addCallback(self._generate_reply, lbry_file) + d.addCallback(self._add_metadata, lbry_file) + return d + else: + return False + + def _generate_reply(self, size, lbry_file): + written_bytes = self._get_written_bytes(lbry_file) + code, message = self._get_status(lbry_file) + + if code == DOWNLOAD_RUNNING_CODE: + d = lbry_file.status() + d.addCallback(self._get_msg_for_file_status) + d.addCallback( + lambda msg: self._get_properties_dict(lbry_file, code, msg, written_bytes, size)) + else: + d = defer.succeed( + self._get_properties_dict(lbry_file, code, message, written_bytes, size)) + return d + + def _get_msg_for_file_status(self, file_status): + message = STREAM_STAGES[2][1] % ( + file_status.name, file_status.num_completed, file_status.num_known, + file_status.running_status) + return defer.succeed(message) + + def _get_key(self, lbry_file): + return binascii.b2a_hex(lbry_file.key) if lbry_file.key else None + + def _full_path(self, lbry_file): + return os.path.join(lbry_file.download_directory, lbry_file.file_name) + + def _get_status(self, lbry_file): + if self.search_by == FileID.NAME: + if self.val in self.daemon.streams.keys(): + status = self.daemon.streams[self.val].code + elif lbry_file in self.daemon.lbry_file_manager.lbry_files: + status = STREAM_STAGES[2] + else: + status = [False, False] + else: + status = [False, False] + return status + + def _get_written_bytes(self, lbry_file): + full_path = self._full_path(lbry_file) + if os.path.isfile(full_path): + with open(full_path) as written_file: + written_file.seek(0, os.SEEK_END) + written_bytes = written_file.tell() + else: + written_bytes = False + return written_bytes + + def _get_properties_dict(self, lbry_file, code, message, written_bytes, size): + key = self._get_key(lbry_file) + full_path = self._full_path(lbry_file) + mime_type = mimetypes.guess_type(full_path)[0] + return { + 'completed': lbry_file.completed, + 'file_name': lbry_file.file_name, + 'download_directory': lbry_file.download_directory, + 'points_paid': lbry_file.points_paid, + 'stopped': lbry_file.stopped, + 'stream_hash': lbry_file.stream_hash, + 'stream_name': lbry_file.stream_name, + 'suggested_file_name': lbry_file.suggested_file_name, + 'upload_allowed': lbry_file.upload_allowed, + 'sd_hash': lbry_file.sd_hash, + 'lbry_uri': lbry_file.uri, + 'txid': lbry_file.txid, + 'claim_id': lbry_file.claim_id, + 'download_path': full_path, + 'mime_type': mime_type, + 'key': key, + 'total_bytes': size, + 'written_bytes': written_bytes, + 'code': code, + 'message': message + } + + def _add_metadata(self, message, lbry_file): + def _add_to_dict(metadata): + message['metadata'] = metadata + return defer.succeed(message) + + if lbry_file.txid: + d = self.daemon._resolve_name(lbry_file.uri) + d.addCallbacks(_add_to_dict, lambda _: _add_to_dict("Pending confirmation")) + else: + d = defer.succeed(message) + return d + + +def get_lbry_file_search_value(p): + for searchtype in (FileID.SD_HASH, FileID.NAME, FileID.FILE_NAME): + value = p.get(searchtype) + if value: + return searchtype, value + raise NoValidSearch() diff --git a/lbrynet/lbrynet_daemon/DaemonCLI.py b/lbrynet/lbrynet_daemon/DaemonCLI.py index ea4f2234d..2f9fb49ba 100644 --- a/lbrynet/lbrynet_daemon/DaemonCLI.py +++ b/lbrynet/lbrynet_daemon/DaemonCLI.py @@ -2,8 +2,8 @@ import sys import json import argparse -from lbrynet.conf import API_CONNECTION_STRING -from jsonrpc.proxy import JSONRPCProxy +from lbrynet.conf import settings +from lbrynet.lbrynet_daemon.auth.client import LBRYAPIClient help_msg = "Usage: lbrynet-cli method json-args\n" \ + "Examples: " \ @@ -36,13 +36,20 @@ def get_params_from_kwargs(params): def main(): - api = JSONRPCProxy.from_url(API_CONNECTION_STRING) + api = LBRYAPIClient.config() try: - s = api.is_running() - except: - print "lbrynet-daemon isn't running" - sys.exit(1) + status = api.daemon_status() + assert status.get('code', False) == "started" + except Exception: + try: + settings.update({'use_auth_http': not settings.use_auth_http}) + api = LBRYAPIClient.config() + status = api.daemon_status() + assert status.get('code', False) == "started" + except Exception: + print "lbrynet-daemon isn't running" + sys.exit(1) parser = argparse.ArgumentParser() parser.add_argument('method', nargs=1) @@ -72,11 +79,16 @@ def main(): if meth in api.help(): try: if params: - r = api.call(meth, params) + result = LBRYAPIClient.config(service=meth, params=params) else: - r = api.call(meth) - print json.dumps(r, sort_keys=True) + result = LBRYAPIClient.config(service=meth, params=params) + print json.dumps(result, sort_keys=True) except: + # TODO: The api should return proper error codes + # and messages so that they can be passed along to the user + # instead of this generic message. + # https://app.asana.com/0/158602294500137/200173944358192 + print "Something went wrong, here's the usage for %s:" % meth print api.help({'function': meth}) else: diff --git a/lbrynet/lbrynet_daemon/DaemonControl.py b/lbrynet/lbrynet_daemon/DaemonControl.py index 3c244b6eb..474110ab5 100644 --- a/lbrynet/lbrynet_daemon/DaemonControl.py +++ b/lbrynet/lbrynet_daemon/DaemonControl.py @@ -1,32 +1,29 @@ import argparse -import logging import logging.handlers import os import webbrowser import sys -from appdirs import user_data_dir -from twisted.web import server -from twisted.internet import reactor, defer +from twisted.web import server, guard +from twisted.internet import defer, reactor +from twisted.cred import portal + from jsonrpc.proxy import JSONRPCProxy +from lbrynet.lbrynet_daemon.auth.auth import PasswordChecker, HttpPasswordRealm +from lbrynet.lbrynet_daemon.auth.util import initialize_api_key_file from lbrynet.core import log_support from lbrynet.core import utils from lbrynet.lbrynet_daemon.DaemonServer import DaemonServer from lbrynet.lbrynet_daemon.DaemonRequest import DaemonRequest -from lbrynet.conf import API_CONNECTION_STRING, API_INTERFACE, API_PORT, \ - UI_ADDRESS, DEFAULT_UI_BRANCH, LOG_FILE_NAME +from lbrynet.conf import settings -# TODO: stop it! -if sys.platform != "darwin": - log_dir = os.path.join(os.path.expanduser("~"), ".lbrynet") -else: - log_dir = user_data_dir("LBRY") +log_dir = settings.data_dir if not os.path.isdir(log_dir): os.mkdir(log_dir) -lbrynet_log = os.path.join(log_dir, LOG_FILE_NAME) +lbrynet_log = os.path.join(log_dir, settings.LOG_FILE_NAME) log = logging.getLogger(__name__) @@ -48,7 +45,7 @@ def stop(): log.info("Attempt to shut down lbrynet-daemon from command line when daemon isn't running") d = defer.Deferred(None) - d.addCallback(lambda _: JSONRPCProxy.from_url(API_CONNECTION_STRING).stop()) + d.addCallback(lambda _: JSONRPCProxy.from_url(settings.API_CONNECTION_STRING).stop()) d.addCallbacks(lambda _: _disp_shutdown(), lambda _: _disp_not_running()) d.callback(None) @@ -58,18 +55,37 @@ def start(): parser.add_argument("--wallet", help="lbrycrd or lbryum, default lbryum", type=str, - default='') + default='lbryum') + parser.add_argument("--ui", help="path to custom UI folder", default=None) + parser.add_argument("--branch", - help="Branch of lbry-web-ui repo to use, defaults on master") - parser.add_argument('--no-launch', dest='launchui', action="store_false") - parser.add_argument('--log-to-console', dest='logtoconsole', action="store_true") - parser.add_argument('--quiet', dest='quiet', action="store_true") - parser.add_argument('--verbose', action='store_true', + help="Branch of lbry-web-ui repo to use, defaults on master", + default=settings.ui_branch) + + parser.add_argument("--http-auth", + dest="useauth", + action="store_true") + + parser.add_argument('--no-launch', + dest='launchui', + action="store_false") + + parser.add_argument('--log-to-console', + dest='logtoconsole', + action="store_true") + + parser.add_argument('--quiet', + dest='quiet', + action="store_true") + + parser.add_argument('--verbose', + action='store_true', help='enable more debug output for the console') - parser.set_defaults(branch=False, launchui=True, logtoconsole=False, quiet=False) + + parser.set_defaults(branch=False, launchui=True, logtoconsole=False, quiet=False, useauth=settings.use_auth_http) args = parser.parse_args() log_support.configure_file_handler(lbrynet_log) @@ -80,13 +96,27 @@ def start(): if not args.verbose: log_support.disable_noisy_loggers() + to_pass = {} + settings_path = os.path.join(settings.data_dir, "daemon_settings.yml") + if os.path.isfile(settings_path): + to_pass.update(utils.load_settings(settings_path)) + log.info("Loaded settings file") + if args.ui: + to_pass.update({'local_ui_path': args.ui}) + if args.branch: + to_pass.update({'ui_branch': args.branch}) + to_pass.update({'use_auth_http': args.useauth}) + to_pass.update({'wallet': args.wallet}) + print to_pass + settings.update(to_pass) + try: - JSONRPCProxy.from_url(API_CONNECTION_STRING).is_running() + JSONRPCProxy.from_url(settings.API_CONNECTION_STRING).is_running() log.info("lbrynet-daemon is already running") if not args.logtoconsole: print "lbrynet-daemon is already running" if args.launchui: - webbrowser.open(UI_ADDRESS) + webbrowser.open(settings.UI_ADDRESS) return except: pass @@ -96,23 +126,34 @@ def start(): if not args.logtoconsole and not args.quiet: print "Starting lbrynet-daemon from command line" print "To view activity, view the log file here: " + lbrynet_log - print "Web UI is available at http://%s:%i" % (API_INTERFACE, API_PORT) - print "JSONRPC API is available at " + API_CONNECTION_STRING + print "Web UI is available at http://%s:%i" % (settings.API_INTERFACE, settings.api_port) + print "JSONRPC API is available at " + settings.API_CONNECTION_STRING print "To quit press ctrl-c or call 'stop' via the API" if test_internet_connection(): lbry = DaemonServer() - d = lbry.start(branch=args.branch if args.branch else DEFAULT_UI_BRANCH, - user_specified=args.ui, - wallet=args.wallet, - branch_specified=True if args.branch else False) + d = lbry.start() if args.launchui: - d.addCallback(lambda _: webbrowser.open(UI_ADDRESS)) + d.addCallback(lambda _: webbrowser.open(settings.UI_ADDRESS)) + d.addErrback(log_and_kill) - lbrynet_server = server.Site(lbry.root) + if settings.use_auth_http: + log.info("Using authenticated API") + pw_path = os.path.join(settings.data_dir, ".api_keys") + initialize_api_key_file(pw_path) + checker = PasswordChecker.load_file(pw_path) + realm = HttpPasswordRealm(lbry.root) + portal_to_realm = portal.Portal(realm, [checker, ]) + factory = guard.BasicCredentialFactory('Login to lbrynet api') + _lbrynet_server = guard.HTTPAuthSessionWrapper(portal_to_realm, [factory, ]) + else: + log.info("Using non-authenticated API") + _lbrynet_server = server.Site(lbry.root) + + lbrynet_server = server.Site(_lbrynet_server) lbrynet_server.requestFactory = DaemonRequest - reactor.listenTCP(API_PORT, lbrynet_server, interface=API_INTERFACE) + reactor.listenTCP(settings.api_port, lbrynet_server, interface=settings.API_INTERFACE) reactor.run() if not args.logtoconsole and not args.quiet: @@ -123,5 +164,11 @@ def start(): print "Not connected to internet, unable to start" return + +def log_and_kill(failure): + log_support.failure(failure, log, 'Failed to startup: %s') + reactor.stop() + + if __name__ == "__main__": start() diff --git a/lbrynet/lbrynet_daemon/DaemonServer.py b/lbrynet/lbrynet_daemon/DaemonServer.py index 39268f5bd..0980ece2c 100644 --- a/lbrynet/lbrynet_daemon/DaemonServer.py +++ b/lbrynet/lbrynet_daemon/DaemonServer.py @@ -6,7 +6,7 @@ from appdirs import user_data_dir from twisted.internet import defer from lbrynet.lbrynet_daemon.Daemon import Daemon from lbrynet.lbrynet_daemon.Resources import LBRYindex, HostedEncryptedFile, EncryptedFileUpload -from lbrynet.conf import API_ADDRESS, DEFAULT_UI_BRANCH, LOG_FILE_NAME +from lbrynet.conf import settings # TODO: omg, this code is essentially duplicated in Daemon @@ -17,20 +17,20 @@ else: if not os.path.isdir(data_dir): os.mkdir(data_dir) -lbrynet_log = os.path.join(data_dir, LOG_FILE_NAME) +lbrynet_log = os.path.join(data_dir, settings.LOG_FILE_NAME) log = logging.getLogger(__name__) class DaemonServer(object): - def _setup_server(self, wallet): + def _setup_server(self): self.root = LBRYindex(os.path.join(os.path.join(data_dir, "lbry-ui"), "active")) - self._api = Daemon(self.root, wallet_type=wallet) + self._api = Daemon(self.root) self.root.putChild("view", HostedEncryptedFile(self._api)) self.root.putChild("upload", EncryptedFileUpload(self._api)) - self.root.putChild(API_ADDRESS, self._api) + self.root.putChild(settings.API_ADDRESS, self._api) return defer.succeed(True) - def start(self, branch=DEFAULT_UI_BRANCH, user_specified=False, branch_specified=False, wallet=None): - d = self._setup_server(wallet) - d.addCallback(lambda _: self._api.setup(branch, user_specified, branch_specified)) + def start(self): + d = self._setup_server() + d.addCallback(lambda _: self._api.setup()) return d diff --git a/lbrynet/lbrynet_daemon/Downloader.py b/lbrynet/lbrynet_daemon/Downloader.py index f4056bc9e..5d7c364d7 100644 --- a/lbrynet/lbrynet_daemon/Downloader.py +++ b/lbrynet/lbrynet_daemon/Downloader.py @@ -12,7 +12,7 @@ from lbrynet.core.Error import InsufficientFundsError, KeyFeeAboveMaxAllowed from lbrynet.core.StreamDescriptor import download_sd_blob from lbrynet.metadata.Fee import FeeValidator from lbrynet.lbryfilemanager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory -from lbrynet.conf import DEFAULT_TIMEOUT, LOG_FILE_NAME +from lbrynet.conf import settings INITIALIZING_CODE = 'initializing' DOWNLOAD_METADATA_CODE = 'downloading_metadata' @@ -35,13 +35,13 @@ else: if not os.path.isdir(log_dir): os.mkdir(log_dir) -lbrynet_log = os.path.join(log_dir, LOG_FILE_NAME) +lbrynet_log = os.path.join(log_dir, settings.LOG_FILE_NAME) log = logging.getLogger(__name__) class GetStream(object): def __init__(self, sd_identifier, session, wallet, lbry_file_manager, exchange_rate_manager, - max_key_fee, data_rate=0.5, timeout=DEFAULT_TIMEOUT, download_directory=None, file_name=None): + max_key_fee, data_rate=0.5, timeout=settings.download_timeout, download_directory=None, file_name=None): self.wallet = wallet self.resolved_name = None self.description = None diff --git a/lbrynet/lbrynet_daemon/ExchangeRateManager.py b/lbrynet/lbrynet_daemon/ExchangeRateManager.py index fe9b293e1..8748d5cd0 100644 --- a/lbrynet/lbrynet_daemon/ExchangeRateManager.py +++ b/lbrynet/lbrynet_daemon/ExchangeRateManager.py @@ -226,4 +226,4 @@ class DummyExchangeRateManager(object): 'amount': self.convert_currency(fee_in.currency_symbol, "LBC", fee_in.amount), 'address': fee_in.address } - }) \ No newline at end of file + }) diff --git a/lbrynet/lbrynet_daemon/Lighthouse.py b/lbrynet/lbrynet_daemon/Lighthouse.py index 0c9dadb4c..fb574e55c 100644 --- a/lbrynet/lbrynet_daemon/Lighthouse.py +++ b/lbrynet/lbrynet_daemon/Lighthouse.py @@ -1,14 +1,14 @@ import logging import random from txjsonrpc.web.jsonrpc import Proxy -from lbrynet.conf import SEARCH_SERVERS +from lbrynet.conf import settings log = logging.getLogger(__name__) class LighthouseClient(object): def __init__(self, servers=None): - self.servers = servers or SEARCH_SERVERS + self.servers = servers or settings.search_servers def _get_random_server(self): return Proxy(random.choice(self.servers)) diff --git a/lbrynet/lbrynet_daemon/Publisher.py b/lbrynet/lbrynet_daemon/Publisher.py index 1836befbf..8cd956d42 100644 --- a/lbrynet/lbrynet_daemon/Publisher.py +++ b/lbrynet/lbrynet_daemon/Publisher.py @@ -12,7 +12,7 @@ from lbrynet.lbryfile.StreamDescriptor import publish_sd_blob from lbrynet.metadata.Metadata import Metadata from lbrynet.lbryfilemanager.EncryptedFileDownloader import ManagedEncryptedFileDownloader from lbrynet import reflector -from lbrynet.conf import LOG_FILE_NAME, REFLECTOR_SERVERS +from lbrynet.conf import settings from twisted.internet import threads, defer, reactor if sys.platform != "darwin": @@ -23,7 +23,7 @@ else: if not os.path.isdir(log_dir): os.mkdir(log_dir) -lbrynet_log = os.path.join(log_dir, LOG_FILE_NAME) +lbrynet_log = os.path.join(log_dir, settings.LOG_FILE_NAME) log = logging.getLogger(__name__) @@ -41,7 +41,7 @@ class Publisher(object): self.lbry_file = None self.txid = None self.stream_hash = None - reflector_server = random.choice(REFLECTOR_SERVERS) + reflector_server = random.choice(settings.reflector_servers) self.reflector_server, self.reflector_port = reflector_server[0], reflector_server[1] self.metadata = {} @@ -74,7 +74,7 @@ class Publisher(object): return d def start_reflector(self): - reflector_server = random.choice(REFLECTOR_SERVERS) + reflector_server = random.choice(settings.reflector_servers) reflector_address, reflector_port = reflector_server[0], reflector_server[1] log.info("Reflecting new publication") factory = reflector.ClientFactory( diff --git a/lbrynet/lbrynet_daemon/Resources.py b/lbrynet/lbrynet_daemon/Resources.py index ef994efd5..b400e519b 100644 --- a/lbrynet/lbrynet_daemon/Resources.py +++ b/lbrynet/lbrynet_daemon/Resources.py @@ -10,7 +10,7 @@ from appdirs import user_data_dir from twisted.web import server, static, resource from twisted.internet import defer, error -from lbrynet.conf import UI_ADDRESS +from lbrynet.conf import settings from lbrynet.lbrynet_daemon.FileStreamer import EncryptedFileStreamer # TODO: omg, this code is essentially duplicated in Daemon @@ -80,10 +80,10 @@ class HostedEncryptedFile(resource.Resource): d = self._api._download_name(request.args['name'][0]) d.addCallback(lambda stream: self._make_stream_producer(request, stream)) elif request.args['name'][0] in self._api.waiting_on.keys(): - request.redirect(UI_ADDRESS + "/?watch=" + request.args['name'][0]) + request.redirect(settings.UI_ADDRESS + "/?watch=" + request.args['name'][0]) request.finish() else: - request.redirect(UI_ADDRESS) + request.redirect(settings.UI_ADDRESS) request.finish() return server.NOT_DONE_YET diff --git a/lbrynet/lbrynet_daemon/UIManager.py b/lbrynet/lbrynet_daemon/UIManager.py index 09ab4755f..c0e524eab 100644 --- a/lbrynet/lbrynet_daemon/UIManager.py +++ b/lbrynet/lbrynet_daemon/UIManager.py @@ -8,7 +8,7 @@ from urllib2 import urlopen from StringIO import StringIO from twisted.internet import defer from twisted.internet.task import LoopingCall -from lbrynet.conf import DEFAULT_UI_BRANCH, LOG_FILE_NAME +from lbrynet.conf import settings from lbrynet.lbrynet_daemon.Resources import NoCacheStaticFile from lbrynet import __version__ as lbrynet_version from lbryum.version import LBRYUM_VERSION as lbryum_version @@ -23,7 +23,7 @@ else: if not os.path.isdir(log_dir): os.mkdir(log_dir) -lbrynet_log = os.path.join(log_dir, LOG_FILE_NAME) +lbrynet_log = os.path.join(log_dir, settings.LOG_FILE_NAME) log = logging.getLogger(__name__) @@ -74,29 +74,29 @@ class UIManager(object): self.loaded_branch = None self.loaded_requirements = None - def setup(self, branch=DEFAULT_UI_BRANCH, user_specified=None, branch_specified=False, check_requirements=None): - if check_requirements is not None: - self.check_requirements = check_requirements - if self.branch is not None: - self.branch = branch - if user_specified: - if os.path.isdir(user_specified): - log.info("Checking user specified UI directory: " + str(user_specified)) + def setup(self, branch=None, check_requirements=None, user_specified=None): + local_ui_path = settings.local_ui_path or user_specified + self.branch = settings.ui_branch or branch + self.check_requirements = settings.check_ui_requirements or check_requirements + + if local_ui_path: + if os.path.isdir(local_ui_path): + log.info("Checking user specified UI directory: " + str(local_ui_path)) self.branch = "user-specified" self.loaded_git_version = "user-specified" - d = self.migrate_ui(source=user_specified) + d = self.migrate_ui(source=local_ui_path) d.addCallback(lambda _: self._load_ui()) return d else: log.info("User specified UI directory doesn't exist, using " + self.branch) - elif self.loaded_branch == "user-specified" and not branch_specified: + elif self.loaded_branch == "user-specified": log.info("Loading user provided UI") d = self._load_ui() return d else: - log.info("Checking for updates for UI branch: " + branch) - self._git_url = "https://s3.amazonaws.com/lbry-ui/{}/data.json".format(branch) - self._dist_url = "https://s3.amazonaws.com/lbry-ui/{}/dist.zip".format(branch) + log.info("Checking for updates for UI branch: " + self.branch) + self._git_url = "https://s3.amazonaws.com/lbry-ui/{}/data.json".format(self.branch) + self._dist_url = "https://s3.amazonaws.com/lbry-ui/{}/dist.zip".format(self.branch) d = self._up_to_date() d.addCallback(lambda r: self._download_ui() if not r else self._load_ui()) @@ -104,9 +104,12 @@ class UIManager(object): def _up_to_date(self): def _get_git_info(): - response = urlopen(self._git_url) - data = json.loads(response.read()) - return defer.succeed(data['sha']) + try: + response = urlopen(self._git_url) + data = json.loads(response.read()) + return defer.succeed(data['sha']) + except Exception: + return defer.fail() def _set_git(version): self.git_version = version.replace('\n', '') diff --git a/lbrynet/lbrynet_daemon/auth/__init__.py b/lbrynet/lbrynet_daemon/auth/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lbrynet/lbrynet_daemon/auth/auth.py b/lbrynet/lbrynet_daemon/auth/auth.py new file mode 100644 index 000000000..fc61929ea --- /dev/null +++ b/lbrynet/lbrynet_daemon/auth/auth.py @@ -0,0 +1,48 @@ +import logging +from zope.interface import implementer +from twisted.cred import portal, checkers, credentials, error as cred_error +from twisted.internet import defer +from twisted.web import resource +from lbrynet.lbrynet_daemon.auth.util import load_api_keys + +log = logging.getLogger(__name__) + + +@implementer(portal.IRealm) +class HttpPasswordRealm(object): + def __init__(self, resource): + self.resource = resource + + def requestAvatar(self, avatarId, mind, *interfaces): + log.debug("Processing request for %s", avatarId) + if resource.IResource in interfaces: + return (resource.IResource, self.resource, lambda: None) + raise NotImplementedError() + + +@implementer(checkers.ICredentialsChecker) +class PasswordChecker(object): + credentialInterfaces = (credentials.IUsernamePassword,) + + def __init__(self, passwords): + self.passwords = passwords + + @classmethod + def load_file(cls, key_path): + keys = load_api_keys(key_path) + return cls.load(keys) + + @classmethod + def load(cls, password_dict): + passwords = {key: password_dict[key].secret for key in password_dict} + return cls(passwords) + + def requestAvatarId(self, creds): + if creds.username in self.passwords: + pw = self.passwords.get(creds.username) + pw_match = creds.checkPassword(pw) + if pw_match: + return defer.succeed(creds.username) + log.warning('Incorrect username or password') + return defer.fail(cred_error.UnauthorizedLogin('Incorrect username or password')) + diff --git a/lbrynet/lbrynet_daemon/auth/client.py b/lbrynet/lbrynet_daemon/auth/client.py new file mode 100644 index 000000000..e0032a8b6 --- /dev/null +++ b/lbrynet/lbrynet_daemon/auth/client.py @@ -0,0 +1,172 @@ +import urlparse +import logging +import requests +import os +import base64 +import json + +from lbrynet.lbrynet_daemon.auth.util import load_api_keys, APIKey, API_KEY_NAME, get_auth_message +from lbrynet.conf import settings +from jsonrpc.proxy import JSONRPCProxy + +log = logging.getLogger(__name__) +USER_AGENT = "AuthServiceProxy/0.1" +TWISTED_SESSION = "TWISTED_SESSION" +LBRY_SECRET = "LBRY_SECRET" +HTTP_TIMEOUT = 30 + + +class JSONRPCException(Exception): + def __init__(self, rpc_error): + Exception.__init__(self) + self.error = rpc_error + + +class AuthAPIClient(object): + def __init__(self, key, timeout, connection, count, service, cookies, auth, url, login_url): + self.__service_name = service + self.__api_key = key + self.__service_url = login_url + self.__id_count = count + self.__url = url + self.__auth_header = auth + self.__conn = connection + self.__cookies = cookies + + def __getattr__(self, name): + if name.startswith('__') and name.endswith('__'): + # Python internal stuff + raise AttributeError + if self.__service_name is not None: + name = "%s.%s" % (self.__service_name, name) + return AuthAPIClient(key=self.__api_key, + timeout=HTTP_TIMEOUT, + connection=self.__conn, + count=self.__id_count, + service=name, + cookies=self.__cookies, + auth=self.__auth_header, + url=self.__url, + login_url=self.__service_url) + + def __call__(self, *args): + self.__id_count += 1 + pre_auth_postdata = {'version': '1.1', + 'method': self.__service_name, + 'params': args, + 'id': self.__id_count} + to_auth = get_auth_message(pre_auth_postdata) + token = self.__api_key.get_hmac(to_auth) + pre_auth_postdata.update({'hmac': token}) + postdata = json.dumps(pre_auth_postdata) + service_url = self.__service_url + auth_header = self.__auth_header + cookies = self.__cookies + host = self.__url.hostname + + req = requests.Request(method='POST', + url=service_url, + data=postdata, + headers={'Host': host, + 'User-Agent': USER_AGENT, + 'Authorization': auth_header, + 'Content-type': 'application/json'}, + cookies=cookies) + r = req.prepare() + http_response = self.__conn.send(r) + cookies = http_response.cookies + headers = http_response.headers + next_secret = headers.get(LBRY_SECRET, False) + if next_secret: + self.__api_key.secret = next_secret + self.__cookies = cookies + + if http_response is None: + raise JSONRPCException({ + 'code': -342, 'message': 'missing HTTP response from server'}) + + http_response.raise_for_status() + + response = http_response.json() + + if response['error'] is not None: + raise JSONRPCException(response['error']) + elif 'result' not in response: + raise JSONRPCException({ + 'code': -343, 'message': 'missing JSON-RPC result'}) + else: + return response['result'] + + @classmethod + def config(cls, key_name=None, key=None, pw_path=None, timeout=HTTP_TIMEOUT, connection=None, count=0, + service=None, cookies=None, auth=None, url=None, login_url=None): + + api_key_name = API_KEY_NAME if not key_name else key_name + pw_path = os.path.join(settings.data_dir, ".api_keys") if not pw_path else pw_path + if not key: + keys = load_api_keys(pw_path) + api_key = keys.get(api_key_name, False) + else: + api_key = APIKey(name=api_key_name, secret=key) + if login_url is None: + service_url = "http://%s:%s@%s:%i/%s" % (api_key_name, + api_key.secret, + settings.API_INTERFACE, + settings.api_port, + settings.API_ADDRESS) + else: + service_url = login_url + id_count = count + + if auth is None and connection is None and cookies is None and url is None: + # This is a new client instance, initialize the auth header and start a session + url = urlparse.urlparse(service_url) + (user, passwd) = (url.username, url.password) + try: + user = user.encode('utf8') + except AttributeError: + pass + try: + passwd = passwd.encode('utf8') + except AttributeError: + pass + authpair = user + b':' + passwd + auth_header = b'Basic ' + base64.b64encode(authpair) + conn = requests.Session() + conn.auth = (user, passwd) + req = requests.Request(method='POST', + url=service_url, + auth=conn.auth, + headers={'Host': url.hostname, + 'User-Agent': USER_AGENT, + 'Authorization': auth_header, + 'Content-type': 'application/json'},) + r = req.prepare() + http_response = conn.send(r) + cookies = http_response.cookies + uid = cookies.get(TWISTED_SESSION) + api_key = APIKey.new(seed=uid) + else: + # This is a client that already has a session, use it + auth_header = auth + conn = connection + assert cookies.get(LBRY_SECRET, False), "Missing cookie" + secret = cookies.get(LBRY_SECRET) + api_key = APIKey(secret, api_key_name) + return cls(api_key, timeout, conn, id_count, service, cookies, auth_header, url, service_url) + + +class LBRYAPIClient(object): + @staticmethod + def config(service=None, params=None): + if settings.use_auth_http: + if service is None: + return AuthAPIClient.config() + log.error("Try auth") + if params is not None: + return AuthAPIClient.config(service=service)(params) + return AuthAPIClient.config(service=service)() + url = settings.API_CONNECTION_STRING + if service is None: + return JSONRPCProxy.from_url(url) + return JSONRPCProxy.from_url(url).call(service, params) diff --git a/lbrynet/lbrynet_daemon/auth/server.py b/lbrynet/lbrynet_daemon/auth/server.py new file mode 100644 index 000000000..4a3637fe6 --- /dev/null +++ b/lbrynet/lbrynet_daemon/auth/server.py @@ -0,0 +1,274 @@ +import logging + +from decimal import Decimal +from zope.interface import implements +from twisted.web import server, resource +from twisted.internet import defer +from txjsonrpc import jsonrpclib + +from lbrynet.core.Error import InvalidAuthenticationToken, InvalidHeaderError, SubhandlerError +from lbrynet.conf import settings +from lbrynet.lbrynet_daemon.auth.util import APIKey, get_auth_message +from lbrynet.lbrynet_daemon.auth.client import LBRY_SECRET + +log = logging.getLogger(__name__) + + +def default_decimal(obj): + if isinstance(obj, Decimal): + return float(obj) + + +class AuthorizedBase(object): + def __init__(self): + self.authorized_functions = [] + self.subhandlers = [] + self.callable_methods = {} + + for methodname in dir(self): + if methodname.startswith("jsonrpc_"): + method = getattr(self, methodname) + self.callable_methods.update({methodname.split("jsonrpc_")[1]: method}) + if hasattr(method, '_auth_required'): + self.authorized_functions.append(methodname.split("jsonrpc_")[1]) + elif not methodname.startswith("__"): + method = getattr(self, methodname) + if hasattr(method, '_subhandler'): + self.subhandlers.append(method) + + @staticmethod + def auth_required(f): + f._auth_required = True + return f + + @staticmethod + def subhandler(f): + f._subhandler = True + return f + + +class AuthJSONRPCServer(AuthorizedBase): + """ + Authorized JSONRPC server used as the base class for the LBRY API + + API methods are named with a leading "jsonrpc_" + + Decorators: + @AuthJSONRPCServer.auth_required: this requires the client include a valid hmac authentication token in their + request + + @AuthJSONRPCServer.subhandler: include the tagged method in the processing of requests, to allow inheriting + classes to modify request handling. Tagged methods will be passed the request + object, and return True when finished to indicate success + + Attributes: + allowed_during_startup (list): list of api methods that are callable before the server has finished + startup + + sessions (dict): dictionary of active session_id: lbrynet.lbrynet_daemon.auth.util.APIKey values + + authorized_functions (list): list of api methods that require authentication + + subhandlers (list): list of subhandlers + + callable_methods (dict): dictionary of api_callable_name: method values + """ + implements(resource.IResource) + + isLeaf = True + OK = 200 + UNAUTHORIZED = 401 + NOT_FOUND = 8001 + FAILURE = 8002 + + def __init__(self, use_authentication=settings.use_auth_http): + AuthorizedBase.__init__(self) + self._use_authentication = use_authentication + self.allowed_during_startup = [] + self.sessions = {} + + def setup(self): + return NotImplementedError() + + def render(self, request): + assert self._check_headers(request), InvalidHeaderError + + session = request.getSession() + session_id = session.uid + + if self._use_authentication: + # if this is a new session, send a new secret and set the expiration, otherwise, session.touch() + if self._initialize_session(session_id): + def expire_session(): + self._unregister_user_session(session_id) + session.startCheckingExpiration() + session.notifyOnExpire(expire_session) + message = "OK" + request.setResponseCode(self.OK) + self._set_headers(request, message, True) + self._render_message(request, message) + return server.NOT_DONE_YET + session.touch() + + request.content.seek(0, 0) + content = request.content.read() + try: + parsed = jsonrpclib.loads(content) + except ValueError: + return server.failure + + function_name = parsed.get('method') + args = parsed.get('params') + id = parsed.get('id') + token = parsed.pop('hmac', None) + version = self._get_jsonrpc_version(parsed.get('jsonrpc'), id) + + try: + self._run_subhandlers(request) + except SubhandlerError: + return server.failure + + reply_with_next_secret = False + if self._use_authentication: + if function_name in self.authorized_functions: + try: + self._verify_token(session_id, parsed, token) + except InvalidAuthenticationToken: + log.warning("API validation failed") + request.setResponseCode(self.UNAUTHORIZED) + request.finish() + return server.NOT_DONE_YET + self._update_session_secret(session_id) + reply_with_next_secret = True + + try: + function = self._get_jsonrpc_method(function_name) + except Exception: + log.warning("Unknown method: %s", function_name) + return server.failure + + d = defer.maybeDeferred(function) if args == [{}] else defer.maybeDeferred(function, *args) + # cancel the response if the connection is broken + notify_finish = request.notifyFinish() + notify_finish.addErrback(self._response_failed, d) + d.addErrback(self._errback_render, id) + d.addCallback(self._callback_render, request, id, version, reply_with_next_secret) + d.addErrback(notify_finish.errback) + + return server.NOT_DONE_YET + + def _register_user_session(self, session_id): + """ + Add or update a HMAC secret for a session + + @param session_id: + @return: secret + """ + log.info("Register api session") + token = APIKey.new(seed=session_id) + self.sessions.update({session_id: token}) + + def _unregister_user_session(self, session_id): + log.info("Unregister API session") + del self.sessions[session_id] + + def _response_failed(self, err, call): + log.debug(err.getTraceback()) + + def _set_headers(self, request, data, update_secret=False): + request.setHeader("Access-Control-Allow-Origin", settings.API_INTERFACE) + request.setHeader("Content-Type", "text/json") + request.setHeader("Content-Length", str(len(data))) + if update_secret: + session_id = request.getSession().uid + request.setHeader(LBRY_SECRET, self.sessions.get(session_id).secret) + + def _render_message(self, request, message): + request.write(message) + request.finish() + + def _check_headers(self, request): + origin = request.getHeader("Origin") + referer = request.getHeader("Referer") + if origin not in [None, settings.ORIGIN]: + log.warning("Attempted api call from %s", origin) + return False + if referer is not None and not referer.startswith(settings.REFERER): + log.warning("Attempted api call from %s", referer) + return False + return True + + def _check_function_path(self, function_path): + if function_path not in self.callable_methods: + log.warning("Unknown method: %s", function_path) + return False + if not self.announced_startup: + if function_path not in self.allowed_during_startup: + log.warning("Cannot call %s during startup", function_path) + return False + return True + + def _get_jsonrpc_method(self, function_path): + assert self._check_function_path(function_path) + return self.callable_methods.get(function_path) + + def _initialize_session(self, session_id): + if not self.sessions.get(session_id, False): + self._register_user_session(session_id) + return True + return False + + def _verify_token(self, session_id, message, token): + to_auth = get_auth_message(message) + api_key = self.sessions.get(session_id) + assert api_key.compare_hmac(to_auth, token), InvalidAuthenticationToken + + def _update_session_secret(self, session_id): + # log.info("Generating new token for next request") + self.sessions.update({session_id: APIKey.new(name=session_id)}) + + def _get_jsonrpc_version(self, version=None, id=None): + if version: + version_for_return = int(float(version)) + elif id and not version: + version_for_return = jsonrpclib.VERSION_1 + else: + version_for_return = jsonrpclib.VERSION_PRE1 + return version_for_return + + def _run_subhandlers(self, request): + for handler in self.subhandlers: + try: + assert handler(request) + except Exception as err: + log.error(err.message) + raise SubhandlerError + + def _callback_render(self, result, request, id, version, auth_required=False): + result_for_return = result if not isinstance(result, dict) else result['result'] + + if version == jsonrpclib.VERSION_PRE1: + if not isinstance(result, jsonrpclib.Fault): + result_for_return = (result_for_return,) + # Convert the result (python) to JSON-RPC + try: + encoded_message = jsonrpclib.dumps(result_for_return, version=version, default=default_decimal) + self._set_headers(request, encoded_message, auth_required) + self._render_message(request, encoded_message) + except: + fault = jsonrpclib.Fault(self.FAILURE, "can't serialize output") + encoded_message = jsonrpclib.dumps(fault, version=version) + self._set_headers(request, encoded_message) + self._render_message(request, encoded_message) + + def _errback_render(self, failure, id): + log.error("Request failed:") + log.error(failure) + log.error(failure.value) + log.error(id) + if isinstance(failure.value, jsonrpclib.Fault): + return failure.value + return server.failure + + def _render_response(self, result, code): + return defer.succeed({'result': result, 'code': code}) diff --git a/lbrynet/lbrynet_daemon/auth/util.py b/lbrynet/lbrynet_daemon/auth/util.py new file mode 100644 index 000000000..8a1e078a4 --- /dev/null +++ b/lbrynet/lbrynet_daemon/auth/util.py @@ -0,0 +1,93 @@ +import base58 +import hmac +import hashlib +import yaml +import os +import logging + +log = logging.getLogger(__name__) + +API_KEY_NAME = "api" + + +def sha(x): + h = hashlib.sha256(x).digest() + return base58.b58encode(h) + + +def generate_key(x=None): + if x is None: + return sha(os.urandom(256)) + else: + return sha(x) + + +class APIKey(object): + def __init__(self, secret, name, expiration=None): + self.secret = secret + self.name = name + self.expiration = expiration + + @classmethod + def new(cls, seed=None, name=None, expiration=None): + secret = generate_key(seed) + key_name = name if name else sha(secret) + return APIKey(secret, key_name, expiration) + + def _raw_key(self): + return base58.b58decode(self.secret) + + def get_hmac(self, message): + decoded_key = self._raw_key() + signature = hmac.new(decoded_key, message, hashlib.sha256) + return base58.b58encode(signature.digest()) + + def compare_hmac(self, message, token): + decoded_token = base58.b58decode(token) + target = base58.b58decode(self.get_hmac(message)) + try: + assert len(decoded_token) == len(target), "Length mismatch" + r = hmac.compare_digest(decoded_token, target) + except: + return False + return r + + +def load_api_keys(path): + if not os.path.isfile(path): + raise Exception("Invalid api key path") + + with open(path, "r") as f: + data = yaml.load(f.read()) + + keys_for_return = {} + for key_name in data: + key = data[key_name] + secret = key['secret'] + expiration = key['expiration'] + keys_for_return.update({key_name: APIKey(secret, key_name, expiration)}) + + return keys_for_return + + +def save_api_keys(keys, path): + with open(path, "w") as f: + key_dict = {keys[key_name].name: {'secret': keys[key_name].secret, + 'expiration': keys[key_name].expiration} + for key_name in keys} + data = yaml.safe_dump(key_dict) + f.write(data) + + +def initialize_api_key_file(key_path): + if not os.path.isfile(key_path): + keys = {} + new_api_key = APIKey.new(name=API_KEY_NAME) + keys.update({new_api_key.name: new_api_key}) + save_api_keys(keys, key_path) + + +def get_auth_message(message_dict): + to_auth = message_dict.get('method').encode('hex') + to_auth += str(message_dict.get('id')).encode('hex') + return to_auth.decode('hex') \ No newline at end of file diff --git a/lbrynet/pointtraderclient/pointtraderclient.py b/lbrynet/pointtraderclient/pointtraderclient.py index 0499a8b9e..1c533843c 100644 --- a/lbrynet/pointtraderclient/pointtraderclient.py +++ b/lbrynet/pointtraderclient/pointtraderclient.py @@ -1,4 +1,4 @@ -from lbrynet.conf import POINTTRADER_SERVER +from lbrynet.conf import settings from twisted.web.client import Agent, FileBodyProducer, Headers, ResponseDone from twisted.internet import threads, defer, protocol @@ -46,7 +46,7 @@ def get_body_from_request(path, data): jsondata = FileBodyProducer(StringIO(json.dumps(data))) agent = Agent(reactor) - d = agent.request('POST', POINTTRADER_SERVER + path, Headers({'Content-Type': ['application/json']}), jsondata) + d = agent.request('POST', settings.pointtrader_server + path, Headers({'Content-Type': ['application/json']}), jsondata) d.addCallback(get_body) return d diff --git a/packaging/osx/lbry-osx-app/lbrygui/LBRYApp.py b/packaging/osx/lbry-osx-app/lbrygui/LBRYApp.py index acc5f13f6..ab43236f3 100644 --- a/packaging/osx/lbry-osx-app/lbrygui/LBRYApp.py +++ b/packaging/osx/lbry-osx-app/lbrygui/LBRYApp.py @@ -3,16 +3,11 @@ import webbrowser import sys import os import logging -import socket import platform import shutil from appdirs import user_data_dir - -from PyObjCTools import AppHelper - from twisted.internet import reactor from twisted.web import server - import Foundation bundle = Foundation.NSBundle.mainBundle() lbrycrdd_path = bundle.pathForResource_ofType_('lbrycrdd', None) @@ -29,8 +24,7 @@ if not os.path.isfile(lbrycrdd_path_conf): from lbrynet.lbrynet_daemon.DaemonServer import DaemonServer from lbrynet.lbrynet_daemon.DaemonRequest import DaemonRequest -from lbrynet.conf import API_PORT, API_INTERFACE, ICON_PATH, APP_NAME -from lbrynet.conf import UI_ADDRESS +from lbrynet.conf import settings from lbrynet.core import utils @@ -49,7 +43,7 @@ class LBRYDaemonApp(AppKit.NSApplication): self.connection = False statusbar = AppKit.NSStatusBar.systemStatusBar() self.statusitem = statusbar.statusItemWithLength_(AppKit.NSVariableStatusItemLength) - self.icon = AppKit.NSImage.alloc().initByReferencingFile_(ICON_PATH) + self.icon = AppKit.NSImage.alloc().initByReferencingFile_(settings.ICON_PATH) self.icon.setScalesWhenResized_(True) self.icon.setSize_((20, 20)) self.statusitem.setImage_(self.icon) @@ -59,7 +53,7 @@ class LBRYDaemonApp(AppKit.NSApplication): self.quit = AppKit.NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Quit", "replyToApplicationShouldTerminate:", "") self.menubarMenu.addItem_(self.quit) self.statusitem.setMenu_(self.menubarMenu) - self.statusitem.setToolTip_(APP_NAME) + self.statusitem.setToolTip_(settings.APP_NAME) if test_internet_connection(): @@ -70,16 +64,15 @@ class LBRYDaemonApp(AppKit.NSApplication): LBRYNotify("LBRY needs an internet connection to start, try again when one is available") sys.exit(0) - lbry = DaemonServer() - d = lbry.start() - d.addCallback(lambda _: webbrowser.open(UI_ADDRESS)) + d = lbry.start(use_authentication=False) + d.addCallback(lambda _: webbrowser.open(settings.UI_ADDRESS)) lbrynet_server = server.Site(lbry.root) lbrynet_server.requestFactory = DaemonRequest - reactor.listenTCP(API_PORT, lbrynet_server, interface=API_INTERFACE) + reactor.listenTCP(settings.api_port, lbrynet_server, interface=settings.API_INTERFACE) def openui_(self, sender): - webbrowser.open(UI_ADDRESS) + webbrowser.open(settings.UI_ADDRESS) def replyToApplicationShouldTerminate_(self, shouldTerminate): if platform.mac_ver()[0] >= "10.10": diff --git a/packaging/osx/lbry-osx-app/setup_app.py b/packaging/osx/lbry-osx-app/setup_app.py index 1f4cba717..29169a9da 100644 --- a/packaging/osx/lbry-osx-app/setup_app.py +++ b/packaging/osx/lbry-osx-app/setup_app.py @@ -2,14 +2,14 @@ import os from setuptools import setup -from lbrynet.conf import APP_NAME, ICON_PATH +from lbrynet.conf import settings APP = [os.path.join('lbrygui', 'main.py')] DATA_FILES = [] DATA_FILES.append('app.icns') OPTIONS = { - 'iconfile': ICON_PATH, + 'iconfile': settings.ICON_PATH, 'plist': { 'CFBundleIdentifier': 'io.lbry.LBRY', 'LSUIElement': True, @@ -22,7 +22,7 @@ OPTIONS = { setup( - name=APP_NAME, + name=settings.APP_NAME, app=APP, options={'py2app': OPTIONS}, data_files=DATA_FILES, diff --git a/packaging/osx/lbry-osx-app/setup_uri_handler.py b/packaging/osx/lbry-osx-app/setup_uri_handler.py index 21b2050a9..62d185e08 100644 --- a/packaging/osx/lbry-osx-app/setup_uri_handler.py +++ b/packaging/osx/lbry-osx-app/setup_uri_handler.py @@ -1,6 +1,6 @@ from setuptools import setup import os -from lbrynet.conf import PROTOCOL_PREFIX +from lbrynet.conf import settings APP = [os.path.join('lbry_uri_handler', 'LBRYURIHandler.py')] DATA_FILES = [] @@ -12,7 +12,7 @@ OPTIONS = {'argv_emulation': True, 'CFBundleURLTypes': [ { 'CFBundleURLTypes': 'LBRYURIHandler', - 'CFBundleURLSchemes': [PROTOCOL_PREFIX] + 'CFBundleURLSchemes': [settings.PROTOCOL_PREFIX] } ] } @@ -23,4 +23,4 @@ setup( data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], -) \ No newline at end of file +) diff --git a/packaging/ubuntu/lbry b/packaging/ubuntu/lbry index 86d55b7eb..f99ea6b82 100755 --- a/packaging/ubuntu/lbry +++ b/packaging/ubuntu/lbry @@ -15,8 +15,6 @@ if [ ! -f "$LBRYCRDCONF" ]; then echo -e "rpcuser=lbryrpc\nrpcpassword=$(env LC_CTYPE=C LC_ALL=C tr -dc A-Za-z0-9 < /dev/urandom | head -c 16 | xargs)" > "$LBRYCRDCONF" fi -WEB_UI_BRANCH='master' - urlencode() { local LANG=C local length="${#1}" @@ -42,7 +40,7 @@ DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" if [ -z "$(pgrep lbrynet-daemon)" ]; then echo "running lbrynet-daemon..." - $DIR/lbrynet-daemon --no-launch --branch="$WEB_UI_BRANCH" & + $DIR/lbrynet-daemon --no-launch & sleep 3 # let the daemon load before connecting fi diff --git a/packaging/ubuntu/lbry.desktop b/packaging/ubuntu/lbry.desktop index aff145ad6..a9afd3599 100644 --- a/packaging/ubuntu/lbry.desktop +++ b/packaging/ubuntu/lbry.desktop @@ -1,5 +1,5 @@ [Desktop Entry] -Version=0.6.9 +Version=0.6.10 Name=LBRY Comment=The world's first user-owned content marketplace Icon=lbry diff --git a/packaging/uri_handler/LBRYURIHandler.py b/packaging/uri_handler/LBRYURIHandler.py index dd1c5b13e..f8d9b081f 100644 --- a/packaging/uri_handler/LBRYURIHandler.py +++ b/packaging/uri_handler/LBRYURIHandler.py @@ -4,15 +4,14 @@ import subprocess import sys from time import sleep -from jsonrpc.proxy import JSONRPCProxy - -from lbrynet.conf import UI_ADDRESS, API_CONNECTION_STRING +from lbrynet.lbrynet_daemon.auth.client import LBRYAPIClient +from lbrynet.conf import settings class LBRYURIHandler(object): def __init__(self): self.started_daemon = False - self.daemon = JSONRPCProxy.from_url(API_CONNECTION_STRING) + self.daemon = LBRYAPIClient.config() def handle_osx(self, lbry_name): self.check_daemon() @@ -70,9 +69,9 @@ class LBRYURIHandler(object): @staticmethod def open_address(lbry_name): if lbry_name == "lbry" or lbry_name == "" or lbry_name is None: - webbrowser.open(UI_ADDRESS) + webbrowser.open(settings.UI_ADDRESS) else: - webbrowser.open(UI_ADDRESS + "/?show=" + lbry_name) + webbrowser.open(settings.UI_ADDRESS + "/?show=" + lbry_name) def main(args): diff --git a/packaging/windows/lbry-win32-app/LBRYWin32App.py b/packaging/windows/lbry-win32-app/LBRYWin32App.py index 885fda283..d3591dcab 100644 --- a/packaging/windows/lbry-win32-app/LBRYWin32App.py +++ b/packaging/windows/lbry-win32-app/LBRYWin32App.py @@ -21,8 +21,7 @@ except ImportError: from lbrynet.core import utils from lbrynet.lbrynet_daemon.DaemonServer import DaemonServer from lbrynet.lbrynet_daemon.DaemonRequest import DaemonRequest -from lbrynet.conf import API_PORT, API_INTERFACE, ICON_PATH, APP_NAME -from lbrynet.conf import UI_ADDRESS, API_CONNECTION_STRING, LOG_FILE_NAME +from lbrynet.conf import settings from packaging.uri_handler.LBRYURIHandler import LBRYURIHandler @@ -31,7 +30,7 @@ data_dir = os.path.join(os.path.expanduser("~"), ".lbrynet") if not os.path.isdir(data_dir): os.mkdir(data_dir) -lbrynet_log = os.path.join(data_dir, LOG_FILE_NAME) +lbrynet_log = os.path.join(data_dir, settings.LOG_FILE_NAME) log = logging.getLogger(__name__) if getattr(sys, 'frozen', False) and os.name == "nt": @@ -252,7 +251,7 @@ def main(lbry_name=None): return SysTrayIcon(icon, hover_text, menu_options, on_quit=stop) def openui_(sender): - webbrowser.open(UI_ADDRESS) + webbrowser.open(settings.UI_ADDRESS) def replyToApplicationShouldTerminate_(): try: @@ -264,11 +263,11 @@ def main(lbry_name=None): replyToApplicationShouldTerminate_() if getattr(sys, 'frozen', False) and os.name == "nt": - icon = os.path.join(os.path.dirname(sys.executable), ICON_PATH, 'lbry16.ico') + icon = os.path.join(os.path.dirname(sys.executable), settings.ICON_PATH, 'lbry16.ico') else: - icon = os.path.join(ICON_PATH, 'lbry16.ico') + icon = os.path.join(settings.ICON_PATH, 'lbry16.ico') - hover_text = APP_NAME + hover_text = settings.APP_NAME menu_options = (('Open', icon, openui_),) if not test_internet_connection(): @@ -280,19 +279,19 @@ def main(lbry_name=None): systray_thread.start() lbry = DaemonServer() - d = lbry.start() + d = lbry.start(use_authentication=False) d.addCallback(lambda _: LBRYURIHandler.open_address(lbry_name)) lbrynet_server = server.Site(lbry.root) lbrynet_server.requestFactory = DaemonRequest try: - reactor.listenTCP(API_PORT, lbrynet_server, interface=API_INTERFACE) + reactor.listenTCP(settings.api_port, lbrynet_server, interface=settings.API_INTERFACE) except error.CannotListenError: log.info('Daemon already running, exiting app') sys.exit(1) reactor.run() if __name__ == '__main__': - lbry_daemon = JSONRPCProxy.from_url(API_CONNECTION_STRING) + lbry_daemon = JSONRPCProxy.from_url(settings.API_CONNECTION_STRING) try: daemon_running = lbry_daemon.is_running() diff --git a/requirements.txt b/requirements.txt index 276b82289..b5f035da4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -29,4 +29,5 @@ wsgiref==0.1.2 zope.interface==4.1.3 base58==0.2.2 googlefinance==0.7 -pyyaml==3.12 \ No newline at end of file +pyyaml==3.12 +service_identity==16.0.0 \ No newline at end of file diff --git a/tests/dht/runalltests.py b/tests/dht/runalltests.py deleted file mode 100755 index 74f2a1e3f..000000000 --- a/tests/dht/runalltests.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python -# -# This library is free software, distributed under the terms of -# the GNU Lesser General Public License Version 3, or any later version. -# See the COPYING file included in this archive - -""" Wrapper script to run all included test scripts """ - -import os, sys -import unittest - -def runTests(): - testRunner = unittest.TextTestRunner() - testRunner.run(additional_tests()) - -def additional_tests(): - """ Used directly by setuptools to run unittests """ - sys.path.insert(0, os.path.dirname(__file__)) - suite = unittest.TestSuite() - tests = os.listdir(os.path.dirname(__file__)) - tests = [n[:-3] for n in tests if n.startswith('test') and n.endswith('.py')] - for test in tests: - m = __import__(test) - if hasattr(m, 'suite'): - suite.addTest(m.suite()) - sys.path.pop(0) - return suite - - -if __name__ == '__main__': - # Add parent folder to sys path so it's easier to use - sys.path.insert(0,os.path.abspath('..')) - runTests() diff --git a/tests/dht/testContact.py b/tests/dht/testContact.py deleted file mode 100644 index 6c475cf28..000000000 --- a/tests/dht/testContact.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python -# -# This library is free software, distributed under the terms of -# the GNU Lesser General Public License Version 3, or any later version. -# See the COPYING file included in this archive - -import unittest - -import lbrynet.dht.contact - -class ContactOperatorsTest(unittest.TestCase): - """ Basic tests case for boolean operators on the Contact class """ - def setUp(self): - self.firstContact = lbrynet.dht.contact.Contact('firstContactID', '127.0.0.1', 1000, None, 1) - self.secondContact = lbrynet.dht.contact.Contact('2ndContactID', '192.168.0.1', 1000, None, 32) - self.secondContactCopy = lbrynet.dht.contact.Contact('2ndContactID', '192.168.0.1', 1000, None, 32) - self.firstContactDifferentValues = lbrynet.dht.contact.Contact('firstContactID', '192.168.1.20', 1000, None, 50) - - def testBoolean(self): - """ Test "equals" and "not equals" comparisons """ - self.failIfEqual(self.firstContact, self.secondContact, 'Contacts with different IDs should not be equal.') - self.failUnlessEqual(self.firstContact, self.firstContactDifferentValues, 'Contacts with same IDs should be equal, even if their other values differ.') - self.failUnlessEqual(self.secondContact, self.secondContactCopy, 'Different copies of the same Contact instance should be equal') - - def testStringComparisons(self): - """ Test comparisons of Contact objects with str types """ - self.failUnlessEqual('firstContactID', self.firstContact, 'The node ID string must be equal to the contact object') - self.failIfEqual('some random string', self.firstContact, "The tested string should not be equal to the contact object (not equal to it's ID)") - - def testIllogicalComparisons(self): - """ Test comparisons with non-Contact and non-str types """ - for item in (123, [1,2,3], {'key': 'value'}): - self.failIfEqual(self.firstContact, item, '"eq" operator: Contact object should not be equal to %s type' % type(item).__name__) - self.failUnless(self.firstContact != item, '"ne" operator: Contact object should not be equal to %s type' % type(item).__name__) - - def testCompactIP(self): - self.assertEqual(self.firstContact.compact_ip(), '\x7f\x00\x00\x01') - self.assertEqual(self.secondContact.compact_ip(), '\xc0\xa8\x00\x01') - -def suite(): - suite = unittest.TestSuite() - suite.addTest(unittest.makeSuite(ContactOperatorsTest)) - return suite - -if __name__ == '__main__': - # If this module is executed from the commandline, run all its tests - unittest.TextTestRunner().run(suite()) diff --git a/tests/functional/test_misc.py b/tests/functional/test_misc.py index 8b8ec0de8..0729af819 100644 --- a/tests/functional/test_misc.py +++ b/tests/functional/test_misc.py @@ -1,23 +1,32 @@ -import shutil -from multiprocessing import Process, Event, Queue +import io import logging +from multiprocessing import Process, Event, Queue +import os import platform +import shutil import sys import random -import io import unittest from Crypto.PublicKey import RSA from Crypto import Random from Crypto.Hash import MD5 -from lbrynet.conf import MIN_BLOB_DATA_PAYMENT_RATE +from lbrynet.conf import settings from lbrynet.lbrylive.LiveStreamCreator import FileLiveStreamCreator from lbrynet.lbrylive.LiveStreamMetadataManager import DBLiveStreamMetadataManager from lbrynet.lbrylive.LiveStreamMetadataManager import TempLiveStreamMetadataManager -from lbrynet.lbryfile.EncryptedFileMetadataManager import TempEncryptedFileMetadataManager, DBEncryptedFileMetadataManager +from lbrynet.lbryfile.EncryptedFileMetadataManager import TempEncryptedFileMetadataManager, \ + DBEncryptedFileMetadataManager +from lbrynet import analytics +from lbrynet.lbrylive.LiveStreamCreator import FileLiveStreamCreator +from lbrynet.lbrylive.LiveStreamMetadataManager import DBLiveStreamMetadataManager +from lbrynet.lbrylive.LiveStreamMetadataManager import TempLiveStreamMetadataManager +from lbrynet.lbryfile.EncryptedFileMetadataManager import TempEncryptedFileMetadataManager +from lbrynet.lbryfile.EncryptedFileMetadataManager import DBEncryptedFileMetadataManager from lbrynet.lbryfilemanager.EncryptedFileManager import EncryptedFileManager from lbrynet.core.PTCWallet import PointTraderKeyQueryHandlerFactory, PointTraderKeyExchanger from lbrynet.core.Session import Session +from lbrynet.core.server.BlobAvailabilityHandler import BlobAvailabilityHandlerFactory from lbrynet.core.client.StandaloneBlobDownloader import StandaloneBlobDownloader from lbrynet.core.StreamDescriptor import BlobStreamDescriptorWriter from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier @@ -28,17 +37,28 @@ from lbrynet.lbryfile.StreamDescriptor import get_sd_info from twisted.internet import defer, threads, task from twisted.trial.unittest import TestCase from twisted.python.failure import Failure -import os + from lbrynet.dht.node import Node -from tests.mocks import DummyBlobAvailabilityTracker from lbrynet.core.PeerManager import PeerManager from lbrynet.core.RateLimiter import DummyRateLimiter, RateLimiter from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory from lbrynet.core.server.ServerProtocol import ServerProtocolFactory + from lbrynet.lbrylive.server.LiveBlobInfoQueryHandler import CryptBlobInfoQueryHandlerFactory from lbrynet.lbrylive.client.LiveStreamOptions import add_live_stream_to_sd_identifier from lbrynet.lbrylive.client.LiveStreamDownloader import add_full_live_stream_downloader_to_sd_identifier +from tests import mocks + + +FakeNode = mocks.Node +FakeWallet = mocks.Wallet +FakePeerFinder = mocks.PeerFinder +FakeAnnouncer = mocks.Announcer +GenFile = mocks.GenFile +test_create_stream_sd_file = mocks.create_stream_sd_file +DummyBlobAvailabilityTracker = mocks.BlobAvailabilityTracker + log_format = "%(funcName)s(): %(message)s" logging.basicConfig(level=logging.WARNING, format=log_format) @@ -47,167 +67,14 @@ logging.basicConfig(level=logging.WARNING, format=log_format) def require_system(system): def wrapper(fn): return fn + if platform.system() == system: return wrapper else: return unittest.skip("Skipping. Test can only be run on " + system) -class FakeNode(object): - def __init__(self, *args, **kwargs): - pass - - def joinNetwork(self, *args): - pass - - def stop(self): - pass - - -class FakeWallet(object): - def __init__(self): - self.private_key = RSA.generate(1024) - self.encoded_public_key = self.private_key.publickey().exportKey() - - def start(self): - return defer.succeed(True) - - def stop(self): - return defer.succeed(True) - - def get_info_exchanger(self): - return PointTraderKeyExchanger(self) - - def get_wallet_info_query_handler_factory(self): - return PointTraderKeyQueryHandlerFactory(self) - - def reserve_points(self, *args): - return True - - def cancel_point_reservation(self, *args): - pass - - def send_points(self, *args): - return defer.succeed(True) - - def add_expected_payment(self, *args): - pass - - def get_balance(self): - return defer.succeed(1000) - - def set_public_key_for_peer(self, peer, public_key): - pass - - def get_claim_metadata_for_sd_hash(self, sd_hash): - return "fakeuri", "faketxid" - - -class FakePeerFinder(object): - def __init__(self, start_port, peer_manager, num_peers): - self.start_port = start_port - self.peer_manager = peer_manager - self.num_peers = num_peers - self.count = 0 - - def find_peers_for_blob(self, *args): - peer_port = self.start_port + self.count - self.count += 1 - if self.count >= self.num_peers: - self.count = 0 - return defer.succeed([self.peer_manager.get_peer("127.0.0.1", peer_port)]) - - def run_manage_loop(self): - pass - - def stop(self): - pass - - -class FakeAnnouncer(object): - - def __init__(self, *args): - pass - - def add_supplier(self, supplier): - pass - - def immediate_announce(self, *args): - pass - - def run_manage_loop(self): - pass - - def stop(self): - pass - - -class GenFile(io.RawIOBase): - def __init__(self, size, pattern): - io.RawIOBase.__init__(self) - self.size = size - self.pattern = pattern - self.read_so_far = 0 - self.buff = b'' - self.last_offset = 0 - - def readable(self): - return True - - def writable(self): - return False - - def read(self, n=-1): - if n > -1: - bytes_to_read = min(n, self.size - self.read_so_far) - else: - bytes_to_read = self.size - self.read_so_far - output, self.buff = self.buff[:bytes_to_read], self.buff[bytes_to_read:] - bytes_to_read -= len(output) - while bytes_to_read > 0: - self.buff = self._generate_chunk() - new_output, self.buff = self.buff[:bytes_to_read], self.buff[bytes_to_read:] - bytes_to_read -= len(new_output) - output += new_output - self.read_so_far += len(output) - return output - - def readall(self): - return self.read() - - def _generate_chunk(self, n=2**10): - output = self.pattern[self.last_offset:self.last_offset + n] - n_left = n - len(output) - whole_patterns = n_left / len(self.pattern) - output += self.pattern * whole_patterns - self.last_offset = n - len(output) - output += self.pattern[:self.last_offset] - return output - - -test_create_stream_sd_file = { - 'stream_name': '746573745f66696c65', - 'blobs': [ - {'length': 2097152, 'blob_num': 0, - 'blob_hash': - 'dc4708f76a5e7af0f1cae0ee96b824e2ed9250c9346c093b441f0a20d3607c17948b6fcfb4bc62020fe5286693d08586', - 'iv': '30303030303030303030303030303031'}, - {'length': 2097152, 'blob_num': 1, - 'blob_hash': - 'f4067522c1b49432a2a679512e3917144317caa1abba0c041e0cd2cf9f635d4cf127ce1824fa04189b63916174951f70', - 'iv': '30303030303030303030303030303032'}, - {'length': 1015056, 'blob_num': 2, - 'blob_hash': - '305486c434260484fcb2968ce0e963b72f81ba56c11b08b1af0789b55b44d78422600f9a38e3cf4f2e9569897e5646a9', - 'iv': '30303030303030303030303030303033'}, - {'length': 0, 'blob_num': 3, 'iv': '30303030303030303030303030303034'}], - 'stream_type': 'lbryfile', - 'key': '30313233343536373031323334353637', - 'suggested_file_name': '746573745f66696c65', - 'stream_hash': '6d27fbe10c86d81aacfb897c7a426d0a2214f5a299455a6d315c0f998c4b3545c2dc60906122d94653c23b1898229e3f'} - - -def start_lbry_uploader(sd_hash_queue, kill_event, dead_event, file_size, ul_rate_limit=None, is_generous=False): +def use_epoll_on_linux(): if sys.platform.startswith("linux"): sys.modules = sys.modules.copy() del sys.modules['twisted.internet.reactor'] @@ -215,47 +82,63 @@ def start_lbry_uploader(sd_hash_queue, kill_event, dead_event, file_size, ul_rat twisted.internet.reactor = twisted.internet.epollreactor.EPollReactor() sys.modules['twisted.internet.reactor'] = twisted.internet.reactor - from twisted.internet import reactor - logging.debug("Starting the uploader") +class LbryUploader(object): + def __init__(self, sd_hash_queue, kill_event, dead_event, + file_size, ul_rate_limit=None, is_generous=False): + self.sd_hash_queue = sd_hash_queue + self.kill_event = kill_event + self.dead_event = dead_event + self.file_size = file_size + self.ul_rate_limit = ul_rate_limit + self.is_generous = is_generous + # these attributes get defined in `start` + self.reactor = None + self.sd_identifier = None + self.session = None + self.lbry_file_manager = None + self.server_port = None + self.kill_check = None - Random.atfork() + def start(self): + use_epoll_on_linux() + from twisted.internet import reactor + self.reactor = reactor + logging.debug("Starting the uploader") + Random.atfork() + r = random.Random() + r.seed("start_lbry_uploader") + wallet = FakeWallet() + peer_manager = PeerManager() + peer_finder = FakePeerFinder(5553, peer_manager, 1) + hash_announcer = FakeAnnouncer() + rate_limiter = RateLimiter() + self.sd_identifier = StreamDescriptorIdentifier() + db_dir = "server" + os.mkdir(db_dir) + self.session = Session( + settings.data_rate, db_dir=db_dir, lbryid="abcd", + peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553, + use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, + blob_tracker_class=DummyBlobAvailabilityTracker, + dht_node_class=Node, is_generous=self.is_generous) + stream_info_manager = TempEncryptedFileMetadataManager() + self.lbry_file_manager = EncryptedFileManager( + self.session, stream_info_manager, self.sd_identifier) + if self.ul_rate_limit is not None: + self.session.rate_limiter.set_ul_limit(self.ul_rate_limit) + reactor.callLater(1, self.start_all) + if not reactor.running: + reactor.run() - r = random.Random() - r.seed("start_lbry_uploader") - - wallet = FakeWallet() - peer_manager = PeerManager() - peer_finder = FakePeerFinder(5553, peer_manager, 1) - hash_announcer = FakeAnnouncer() - rate_limiter = RateLimiter() - sd_identifier = StreamDescriptorIdentifier() - - - db_dir = "server" - os.mkdir(db_dir) - - session = Session(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd", - peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, - dht_node_class=Node, is_generous=is_generous) - - stream_info_manager = TempEncryptedFileMetadataManager() - - lbry_file_manager = EncryptedFileManager(session, stream_info_manager, sd_identifier) - - if ul_rate_limit is not None: - session.rate_limiter.set_ul_limit(ul_rate_limit) - - def start_all(): - - d = session.setup() - d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) - d.addCallback(lambda _: lbry_file_manager.setup()) - d.addCallback(lambda _: start_server()) - d.addCallback(lambda _: create_stream()) - d.addCallback(create_stream_descriptor) - d.addCallback(put_sd_hash_on_queue) + def start_all(self): + d = self.session.setup() + d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier)) + d.addCallback(lambda _: self.lbry_file_manager.setup()) + d.addCallback(lambda _: self.start_server()) + d.addCallback(lambda _: self.create_stream()) + d.addCallback(self.create_stream_descriptor) + d.addCallback(self.put_sd_hash_on_queue) def print_error(err): logging.critical("Server error: %s", err.getErrorMessage()) @@ -263,71 +146,60 @@ def start_lbry_uploader(sd_hash_queue, kill_event, dead_event, file_size, ul_rat d.addErrback(print_error) return d - def start_server(): - - server_port = None - + def start_server(self): + session = self.session query_handler_factories = { - BlobRequestHandlerFactory(session.blob_manager, session.wallet, - session.payment_rate_manager): True, + BlobAvailabilityHandlerFactory(session.blob_manager): True, + BlobRequestHandlerFactory( + session.blob_manager, session.wallet, + session.payment_rate_manager, + analytics.Track()): True, session.wallet.get_wallet_info_query_handler_factory(): True, } - server_factory = ServerProtocolFactory(session.rate_limiter, query_handler_factories, session.peer_manager) - - server_port = reactor.listenTCP(5553, server_factory) + self.server_port = self.reactor.listenTCP(5553, server_factory) logging.debug("Started listening") - - def kill_server(): - ds = [] - ds.append(session.shut_down()) - ds.append(lbry_file_manager.stop()) - if server_port: - ds.append(server_port.stopListening()) - kill_check.stop() - dead_event.set() - dl = defer.DeferredList(ds) - dl.addCallback(lambda _: reactor.stop()) - return dl - - def check_for_kill(): - if kill_event.is_set(): - kill_server() - - kill_check = task.LoopingCall(check_for_kill) - kill_check.start(1.0) + self.kill_check = task.LoopingCall(self.check_for_kill) + self.kill_check.start(1.0) return True - def create_stream(): - test_file = GenFile(file_size, b''.join([chr(i) for i in xrange(0, 64, 6)])) - d = create_lbry_file(session, lbry_file_manager, "test_file", test_file) + def kill_server(self): + session = self.session + ds = [] + ds.append(session.shut_down()) + ds.append(self.lbry_file_manager.stop()) + if self.server_port: + ds.append(self.server_port.stopListening()) + self.kill_check.stop() + self.dead_event.set() + dl = defer.DeferredList(ds) + dl.addCallback(lambda _: self.reactor.stop()) + return dl + + def check_for_kill(self): + if self.kill_event.is_set(): + self.kill_server() + + def create_stream(self): + test_file = GenFile(self.file_size, b''.join([chr(i) for i in xrange(0, 64, 6)])) + d = create_lbry_file(self.session, self.lbry_file_manager, "test_file", test_file) return d - def create_stream_descriptor(stream_hash): - descriptor_writer = BlobStreamDescriptorWriter(session.blob_manager) - d = get_sd_info(lbry_file_manager.stream_info_manager, stream_hash, True) + def create_stream_descriptor(self, stream_hash): + descriptor_writer = BlobStreamDescriptorWriter(self.session.blob_manager) + d = get_sd_info(self.lbry_file_manager.stream_info_manager, stream_hash, True) d.addCallback(descriptor_writer.create_descriptor) return d - def put_sd_hash_on_queue(sd_hash): - sd_hash_queue.put(sd_hash) - - reactor.callLater(1, start_all) - if not reactor.running: - reactor.run() + def put_sd_hash_on_queue(self, sd_hash): + self.sd_hash_queue.put(sd_hash) -def start_lbry_reuploader(sd_hash, kill_event, dead_event, ready_event, n, ul_rate_limit=None, is_generous=False): - - if sys.platform.startswith("linux"): - sys.modules = sys.modules.copy() - del sys.modules['twisted.internet.reactor'] - import twisted.internet - twisted.internet.reactor = twisted.internet.epollreactor.EPollReactor() - sys.modules['twisted.internet.reactor'] = twisted.internet.reactor - +def start_lbry_reuploader(sd_hash, kill_event, dead_event, + ready_event, n, ul_rate_limit=None, is_generous=False): + use_epoll_on_linux() from twisted.internet import reactor logging.debug("Starting the uploader") @@ -335,7 +207,7 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event, ready_event, n, ul_ra Random.atfork() r = random.Random() - r.seed("start_lbry_uploader") + r.seed("start_lbry_reuploader") wallet = FakeWallet() peer_port = 5553 + n @@ -350,11 +222,11 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event, ready_event, n, ul_ra os.mkdir(db_dir) os.mkdir(blob_dir) - session = Session(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd" + str(n), - peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=None, peer_port=peer_port, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=is_generous) + session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd" + str(n), + peer_finder=peer_finder, hash_announcer=hash_announcer, + blob_dir=None, peer_port=peer_port, + use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, + blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=settings.is_generous_host) stream_info_manager = TempEncryptedFileMetadataManager() @@ -394,8 +266,11 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event, ready_event, n, ul_ra server_port = None query_handler_factories = { - BlobRequestHandlerFactory(session.blob_manager, session.wallet, - session.payment_rate_manager): True, + BlobAvailabilityHandlerFactory(session.blob_manager): True, + BlobRequestHandlerFactory( + session.blob_manager, session.wallet, + session.payment_rate_manager, + analytics.Track()): True, session.wallet.get_wallet_info_query_handler_factory(): True, } @@ -434,14 +309,7 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event, ready_event, n, ul_ra def start_live_server(sd_hash_queue, kill_event, dead_event): - - if sys.platform.startswith("linux"): - sys.modules = sys.modules.copy() - del sys.modules['twisted.internet.reactor'] - import twisted.internet - twisted.internet.reactor = twisted.internet.epollreactor.EPollReactor() - sys.modules['twisted.internet.reactor'] = twisted.internet.reactor - + use_epoll_on_linux() from twisted.internet import reactor logging.debug("In start_server.") @@ -458,14 +326,13 @@ def start_live_server(sd_hash_queue, kill_event, dead_event): rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() - db_dir = "server" os.mkdir(db_dir) - session = Session(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd", + session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker) + blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=settings.is_generous_host) stream_info_manager = DBLiveStreamMetadataManager(session.db_dir, hash_announcer) logging.debug("Created the session") @@ -478,7 +345,8 @@ def start_live_server(sd_hash_queue, kill_event, dead_event): CryptBlobInfoQueryHandlerFactory(stream_info_manager, session.wallet, session.payment_rate_manager): True, BlobRequestHandlerFactory(session.blob_manager, session.wallet, - session.payment_rate_manager): True, + session.payment_rate_manager, + analytics.Track()): True, session.wallet.get_wallet_info_query_handler_factory(): True, } @@ -566,14 +434,7 @@ def start_live_server(sd_hash_queue, kill_event, dead_event): def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_generous=False): - - if sys.platform.startswith("linux"): - sys.modules = sys.modules.copy() - del sys.modules['twisted.internet.reactor'] - import twisted.internet - twisted.internet.reactor = twisted.internet.epollreactor.EPollReactor() - sys.modules['twisted.internet.reactor'] = twisted.internet.reactor - + use_epoll_on_linux() from twisted.internet import reactor logging.debug("Starting the uploader") @@ -596,14 +457,14 @@ def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_genero os.mkdir(db_dir) os.mkdir(blob_dir) - session = Session(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="efgh", - peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=blob_dir, peer_port=peer_port, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=is_generous) + session = Session(settings.data_rate, db_dir=db_dir, lbryid="efgh", + peer_finder=peer_finder, hash_announcer=hash_announcer, + blob_dir=blob_dir, peer_port=peer_port, + use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, + blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=settings.is_generous_host) if slow is True: - session.rate_limiter.set_ul_limit(2**11) + session.rate_limiter.set_ul_limit(2 ** 11) def start_all(): d = session.setup() @@ -622,7 +483,10 @@ def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_genero server_port = None query_handler_factories = { - BlobRequestHandlerFactory(session.blob_manager, session.wallet, session.payment_rate_manager): True, + BlobAvailabilityHandlerFactory(session.blob_manager): True, + BlobRequestHandlerFactory(session.blob_manager, session.wallet, + session.payment_rate_manager, + analytics.Track()): True, session.wallet.get_wallet_info_query_handler_factory(): True, } @@ -654,7 +518,7 @@ def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_genero def create_single_blob(): blob_creator = session.blob_manager.get_blob_creator() - blob_creator.write("0" * 2**21) + blob_creator.write("0" * 2 ** 21) return blob_creator.close() def put_blob_hash_on_queue(blob_hash): @@ -751,7 +615,8 @@ class TestTransfer(TestCase): sd_hash_queue = Queue() kill_event = Event() dead_event = Event() - uploader = Process(target=start_lbry_uploader, args=(sd_hash_queue, kill_event, dead_event, 5209343)) + lbry_uploader = LbryUploader(sd_hash_queue, kill_event, dead_event, 5209343) + uploader = Process(target=lbry_uploader.start) uploader.start() self.server_processes.append(uploader) @@ -764,27 +629,30 @@ class TestTransfer(TestCase): rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() - db_dir = "client" blob_dir = os.path.join(db_dir, "blobfiles") os.mkdir(db_dir) os.mkdir(blob_dir) - self.session = Session(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd", - peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=blob_dir, peer_port=5553, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, - dht_node_class=Node, is_generous=self.is_generous) + self.session = Session( + settings.data_rate, db_dir=db_dir, lbryid="abcd", + peer_finder=peer_finder, hash_announcer=hash_announcer, + blob_dir=blob_dir, peer_port=5553, + use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, + blob_tracker_class=DummyBlobAvailabilityTracker, + dht_node_class=Node, is_generous=self.is_generous) self.stream_info_manager = TempEncryptedFileMetadataManager() - self.lbry_file_manager = EncryptedFileManager(self.session, self.stream_info_manager, sd_identifier) + self.lbry_file_manager = EncryptedFileManager( + self.session, self.stream_info_manager, sd_identifier) def make_downloader(metadata, prm): info_validator = metadata.validator options = metadata.options factories = metadata.factories - chosen_options = [o.default_value for o in options.get_downloader_options(info_validator, prm)] + chosen_options = [ + o.default_value for o in options.get_downloader_options(info_validator, prm)] return factories[0].make_downloader(metadata, chosen_options, prm) def download_file(sd_hash): @@ -856,10 +724,12 @@ class TestTransfer(TestCase): db_dir = "client" os.mkdir(db_dir) - self.session = Session(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd", - peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=None, - peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, dht_node_class=Node) + self.session = Session( + settings.data_rate, db_dir=db_dir, lbryid="abcd", + peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=None, + peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, + blob_tracker_class=DummyBlobAvailabilityTracker, dht_node_class=Node + ) self.stream_info_manager = TempLiveStreamMetadataManager(hash_announcer) @@ -869,7 +739,8 @@ class TestTransfer(TestCase): info_validator = metadata.validator options = metadata.options factories = metadata.factories - chosen_options = [o.default_value for o in options.get_downloader_options(info_validator, prm)] + chosen_options = [ + o.default_value for o in options.get_downloader_options(info_validator, prm)] return factories[0].make_downloader(metadata, chosen_options, prm) def start_lbry_file(lbry_file): @@ -928,7 +799,6 @@ class TestTransfer(TestCase): return d def test_last_blob_retrieval(self): - kill_event = Event() dead_event_1 = Event() blob_hash_queue_1 = Queue() @@ -951,16 +821,18 @@ class TestTransfer(TestCase): hash_announcer = FakeAnnouncer() rate_limiter = DummyRateLimiter() - db_dir = "client" blob_dir = os.path.join(db_dir, "blobfiles") os.mkdir(db_dir) os.mkdir(blob_dir) - self.session = Session(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd", - peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=blob_dir, peer_port=5553, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker) + self.session = Session( + settings.data_rate, db_dir=db_dir, lbryid="abcd", + peer_finder=peer_finder, hash_announcer=hash_announcer, + blob_dir=blob_dir, peer_port=5553, + use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, + blob_tracker_class=DummyBlobAvailabilityTracker, + is_generous=settings.is_generous_host) d1 = self.wait_for_hash_from_queue(blob_hash_queue_1) d2 = self.wait_for_hash_from_queue(blob_hash_queue_2) @@ -974,8 +846,8 @@ class TestTransfer(TestCase): def download_blob(blob_hash): prm = self.session.payment_rate_manager - downloader = StandaloneBlobDownloader(blob_hash, self.session.blob_manager, peer_finder, - rate_limiter, prm, wallet) + downloader = StandaloneBlobDownloader( + blob_hash, self.session.blob_manager, peer_finder, rate_limiter, prm, wallet) d = downloader.download() return d @@ -1000,23 +872,20 @@ class TestTransfer(TestCase): d1 = self.wait_for_event(dead_event_1, 15) d2 = self.wait_for_event(dead_event_2, 15) dl = defer.DeferredList([d1, d2]) - def print_shutting_down(): logging.info("Client is shutting down") - dl.addCallback(lambda _: print_shutting_down()) dl.addCallback(lambda _: arg) return dl - d.addBoth(stop) - return d def test_double_download(self): sd_hash_queue = Queue() kill_event = Event() dead_event = Event() - uploader = Process(target=start_lbry_uploader, args=(sd_hash_queue, kill_event, dead_event, 5209343)) + lbry_uploader = LbryUploader(sd_hash_queue, kill_event, dead_event, 5209343) + uploader = Process(target=lbry_uploader.start) uploader.start() self.server_processes.append(uploader) @@ -1029,7 +898,6 @@ class TestTransfer(TestCase): rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() - downloaders = [] db_dir = "client" @@ -1037,10 +905,11 @@ class TestTransfer(TestCase): os.mkdir(db_dir) os.mkdir(blob_dir) - self.session = Session(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd", - peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=blob_dir, peer_port=5553, use_upnp=False, - rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker) + self.session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd", + peer_finder=peer_finder, hash_announcer=hash_announcer, + blob_dir=blob_dir, peer_port=5553, use_upnp=False, + rate_limiter=rate_limiter, wallet=wallet, + blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=settings.is_generous_host) self.stream_info_manager = DBEncryptedFileMetadataManager(self.session.db_dir) self.lbry_file_manager = EncryptedFileManager(self.session, self.stream_info_manager, sd_identifier) @@ -1075,7 +944,8 @@ class TestTransfer(TestCase): logging.debug("deleting the file...") d = self.lbry_file_manager.delete_lbry_file(downloaders[0]) d.addCallback(lambda _: self.lbry_file_manager.get_count_for_stream_hash(downloaders[0].stream_hash)) - d.addCallback(lambda c: self.stream_info_manager.delete_stream(downloaders[1].stream_hash) if c == 0 else True) + d.addCallback( + lambda c: self.stream_info_manager.delete_stream(downloaders[1].stream_hash) if c == 0 else True) return d def check_lbry_file(): @@ -1132,8 +1002,9 @@ class TestTransfer(TestCase): kill_event = Event() dead_events = [Event() for _ in range(num_uploaders)] ready_events = [Event() for _ in range(1, num_uploaders)] - uploader = Process(target=start_lbry_uploader, args=(sd_hash_queue, kill_event, dead_events[0], - 9373419, 2**22)) + lbry_uploader = LbryUploader( + sd_hash_queue, kill_event, dead_events[0], 5209343, 9373419, 2**22) + uploader = Process(target=lbry_uploader.start) uploader.start() self.server_processes.append(uploader) @@ -1146,25 +1017,27 @@ class TestTransfer(TestCase): rate_limiter = DummyRateLimiter() sd_identifier = StreamDescriptorIdentifier() - db_dir = "client" blob_dir = os.path.join(db_dir, "blobfiles") os.mkdir(db_dir) os.mkdir(blob_dir) - self.session = Session(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd", - peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=None, peer_port=5553, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker) + self.session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd", + peer_finder=peer_finder, hash_announcer=hash_announcer, + blob_dir=None, peer_port=5553, + use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, + blob_tracker_class=DummyBlobAvailabilityTracker, + is_generous=settings.is_generous_host) self.stream_info_manager = TempEncryptedFileMetadataManager() - self.lbry_file_manager = EncryptedFileManager(self.session, self.stream_info_manager, sd_identifier) + self.lbry_file_manager = EncryptedFileManager( + self.session, self.stream_info_manager, sd_identifier) def start_additional_uploaders(sd_hash): for i in range(1, num_uploaders): uploader = Process(target=start_lbry_reuploader, - args=(sd_hash, kill_event, dead_events[i], ready_events[i-1], i, 2**10)) + args=(sd_hash, kill_event, dead_events[i], ready_events[i - 1], i, 2 ** 10)) uploader.start() self.server_processes.append(uploader) return defer.succeed(True) @@ -1228,140 +1101,3 @@ class TestTransfer(TestCase): d.addBoth(stop) return d - - -class TestStreamify(TestCase): - - def setUp(self): - self.session = None - self.stream_info_manager = None - self.lbry_file_manager = None - self.addCleanup(self.take_down_env) - self.is_generous = True - - def take_down_env(self): - - d = defer.succeed(True) - if self.lbry_file_manager is not None: - d.addCallback(lambda _: self.lbry_file_manager.stop()) - if self.session is not None: - d.addCallback(lambda _: self.session.shut_down()) - if self.stream_info_manager is not None: - d.addCallback(lambda _: self.stream_info_manager.stop()) - - def delete_test_env(): - shutil.rmtree('client') - if os.path.exists("test_file"): - os.remove("test_file") - - d.addCallback(lambda _: threads.deferToThread(delete_test_env)) - return d - - def test_create_stream(self): - wallet = FakeWallet() - peer_manager = PeerManager() - peer_finder = FakePeerFinder(5553, peer_manager, 2) - hash_announcer = FakeAnnouncer() - rate_limiter = DummyRateLimiter() - sd_identifier = StreamDescriptorIdentifier() - - - db_dir = "client" - blob_dir = os.path.join(db_dir, "blobfiles") - os.mkdir(db_dir) - os.mkdir(blob_dir) - - self.session = Session(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd", - peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=blob_dir, peer_port=5553, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=self.is_generous) - - self.stream_info_manager = TempEncryptedFileMetadataManager() - - self.lbry_file_manager = EncryptedFileManager(self.session, self.stream_info_manager, sd_identifier) - - d = self.session.setup() - d.addCallback(lambda _: self.stream_info_manager.setup()) - d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) - d.addCallback(lambda _: self.lbry_file_manager.setup()) - - def verify_equal(sd_info): - self.assertEqual(sd_info, test_create_stream_sd_file) - - def verify_stream_descriptor_file(stream_hash): - d = get_sd_info(self.lbry_file_manager.stream_info_manager, stream_hash, True) - d.addCallback(verify_equal) - return d - - def iv_generator(): - iv = 0 - while 1: - iv += 1 - yield "%016d" % iv - - def create_stream(): - test_file = GenFile(5209343, b''.join([chr(i + 3) for i in xrange(0, 64, 6)])) - d = create_lbry_file(self.session, self.lbry_file_manager, "test_file", test_file, - key="0123456701234567", iv_generator=iv_generator()) - return d - - d.addCallback(lambda _: create_stream()) - d.addCallback(verify_stream_descriptor_file) - return d - - def test_create_and_combine_stream(self): - - wallet = FakeWallet() - peer_manager = PeerManager() - peer_finder = FakePeerFinder(5553, peer_manager, 2) - hash_announcer = FakeAnnouncer() - rate_limiter = DummyRateLimiter() - sd_identifier = StreamDescriptorIdentifier() - - db_dir = "client" - blob_dir = os.path.join(db_dir, "blobfiles") - os.mkdir(db_dir) - os.mkdir(blob_dir) - - self.session = Session(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd", - peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=blob_dir, peer_port=5553, - use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker) - - self.stream_info_manager = DBEncryptedFileMetadataManager(self.session.db_dir) - - self.lbry_file_manager = EncryptedFileManager(self.session, self.stream_info_manager, sd_identifier) - - def start_lbry_file(lbry_file): - logging.debug("Calling lbry_file.start()") - d = lbry_file.start() - return d - - def combine_stream(stream_hash): - - prm = self.session.payment_rate_manager - d = self.lbry_file_manager.add_lbry_file(stream_hash, prm) - d.addCallback(start_lbry_file) - - def check_md5_sum(): - f = open('test_file') - hashsum = MD5.new() - hashsum.update(f.read()) - self.assertEqual(hashsum.hexdigest(), "68959747edc73df45e45db6379dd7b3b") - - d.addCallback(lambda _: check_md5_sum()) - return d - - def create_stream(): - test_file = GenFile(53209343, b''.join([chr(i + 5) for i in xrange(0, 64, 6)])) - return create_lbry_file(self.session, self.lbry_file_manager, "test_file", test_file, - suggested_file_name="test_file") - - d = self.session.setup() - d.addCallback(lambda _: self.stream_info_manager.setup()) - d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) - d.addCallback(lambda _: self.lbry_file_manager.setup()) - d.addCallback(lambda _: create_stream()) - d.addCallback(combine_stream) - return d diff --git a/tests/functional/test_reflector.py b/tests/functional/test_reflector.py index 7dd0ad6e7..6603bc6d3 100644 --- a/tests/functional/test_reflector.py +++ b/tests/functional/test_reflector.py @@ -4,7 +4,7 @@ import shutil from twisted.internet import defer, threads, error from twisted.trial import unittest -from lbrynet import conf +from lbrynet.conf import settings from lbrynet import lbryfile from lbrynet import reflector from lbrynet.core import BlobManager @@ -83,7 +83,7 @@ class TestReflector(unittest.TestCase): os.mkdir(db_dir) self.session = Session.Session( - conf.MIN_BLOB_DATA_PAYMENT_RATE, + settings.data_rate, db_dir=db_dir, lbryid="abcd", peer_finder=peer_finder, @@ -93,7 +93,7 @@ class TestReflector(unittest.TestCase): use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, - blob_tracker_class=mocks.DummyBlobAvailabilityTracker, + blob_tracker_class=mocks.BlobAvailabilityTracker, dht_node_class=Node ) diff --git a/tests/functional/test_streamify.py b/tests/functional/test_streamify.py new file mode 100644 index 000000000..c6378956d --- /dev/null +++ b/tests/functional/test_streamify.py @@ -0,0 +1,172 @@ +import logging +import os +import shutil + +from Crypto.Hash import MD5 +from twisted.trial.unittest import TestCase +from twisted.internet import defer, threads + +from lbrynet.conf import settings +from lbrynet.lbryfile.EncryptedFileMetadataManager import TempEncryptedFileMetadataManager +from lbrynet.lbryfile.EncryptedFileMetadataManager import DBEncryptedFileMetadataManager +from lbrynet.lbryfilemanager.EncryptedFileManager import EncryptedFileManager +from lbrynet.core.Session import Session +from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier +from lbrynet.lbryfilemanager.EncryptedFileCreator import create_lbry_file +from lbrynet.lbryfile.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier +from lbrynet.lbryfile.StreamDescriptor import get_sd_info +from lbrynet.core.PeerManager import PeerManager +from lbrynet.core.RateLimiter import DummyRateLimiter, RateLimiter + +from tests import mocks + + +FakeNode = mocks.Node +FakeWallet = mocks.Wallet +FakePeerFinder = mocks.PeerFinder +FakeAnnouncer = mocks.Announcer +GenFile = mocks.GenFile +test_create_stream_sd_file = mocks.create_stream_sd_file +DummyBlobAvailabilityTracker = mocks.BlobAvailabilityTracker + + +class TestStreamify(TestCase): + def setUp(self): + self.session = None + self.stream_info_manager = None + self.lbry_file_manager = None + self.addCleanup(self.take_down_env) + self.is_generous = True + + def take_down_env(self): + d = defer.succeed(True) + if self.lbry_file_manager is not None: + d.addCallback(lambda _: self.lbry_file_manager.stop()) + if self.session is not None: + d.addCallback(lambda _: self.session.shut_down()) + if self.stream_info_manager is not None: + d.addCallback(lambda _: self.stream_info_manager.stop()) + + def delete_test_env(): + shutil.rmtree('client') + if os.path.exists("test_file"): + os.remove("test_file") + + d.addCallback(lambda _: threads.deferToThread(delete_test_env)) + return d + + def test_create_stream(self): + wallet = FakeWallet() + peer_manager = PeerManager() + peer_finder = FakePeerFinder(5553, peer_manager, 2) + hash_announcer = FakeAnnouncer() + rate_limiter = DummyRateLimiter() + sd_identifier = StreamDescriptorIdentifier() + + + db_dir = "client" + blob_dir = os.path.join(db_dir, "blobfiles") + os.mkdir(db_dir) + os.mkdir(blob_dir) + + self.session = Session( + settings.data_rate, db_dir=db_dir, lbryid="abcd", + peer_finder=peer_finder, hash_announcer=hash_announcer, + blob_dir=blob_dir, peer_port=5553, + use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, + blob_tracker_class=DummyBlobAvailabilityTracker, + is_generous=self.is_generous + ) + + self.stream_info_manager = TempEncryptedFileMetadataManager() + + self.lbry_file_manager = EncryptedFileManager( + self.session, self.stream_info_manager, sd_identifier) + + d = self.session.setup() + d.addCallback(lambda _: self.stream_info_manager.setup()) + d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) + d.addCallback(lambda _: self.lbry_file_manager.setup()) + + def verify_equal(sd_info): + self.assertEqual(sd_info, test_create_stream_sd_file) + + def verify_stream_descriptor_file(stream_hash): + d = get_sd_info(self.lbry_file_manager.stream_info_manager, stream_hash, True) + d.addCallback(verify_equal) + return d + + def iv_generator(): + iv = 0 + while 1: + iv += 1 + yield "%016d" % iv + + def create_stream(): + test_file = GenFile(5209343, b''.join([chr(i + 3) for i in xrange(0, 64, 6)])) + d = create_lbry_file(self.session, self.lbry_file_manager, "test_file", test_file, + key="0123456701234567", iv_generator=iv_generator()) + return d + + d.addCallback(lambda _: create_stream()) + d.addCallback(verify_stream_descriptor_file) + return d + + def test_create_and_combine_stream(self): + wallet = FakeWallet() + peer_manager = PeerManager() + peer_finder = FakePeerFinder(5553, peer_manager, 2) + hash_announcer = FakeAnnouncer() + rate_limiter = DummyRateLimiter() + sd_identifier = StreamDescriptorIdentifier() + + db_dir = "client" + blob_dir = os.path.join(db_dir, "blobfiles") + os.mkdir(db_dir) + os.mkdir(blob_dir) + + self.session = Session( + settings.data_rate, db_dir=db_dir, lbryid="abcd", + peer_finder=peer_finder, hash_announcer=hash_announcer, + blob_dir=blob_dir, peer_port=5553, + use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, + blob_tracker_class=DummyBlobAvailabilityTracker + ) + + self.stream_info_manager = DBEncryptedFileMetadataManager(self.session.db_dir) + + self.lbry_file_manager = EncryptedFileManager( + self.session, self.stream_info_manager, sd_identifier) + + def start_lbry_file(lbry_file): + logging.debug("Calling lbry_file.start()") + d = lbry_file.start() + return d + + def combine_stream(stream_hash): + prm = self.session.payment_rate_manager + d = self.lbry_file_manager.add_lbry_file(stream_hash, prm) + d.addCallback(start_lbry_file) + + def check_md5_sum(): + f = open('test_file') + hashsum = MD5.new() + hashsum.update(f.read()) + self.assertEqual(hashsum.hexdigest(), "68959747edc73df45e45db6379dd7b3b") + + d.addCallback(lambda _: check_md5_sum()) + return d + + def create_stream(): + test_file = GenFile(53209343, b''.join([chr(i + 5) for i in xrange(0, 64, 6)])) + return create_lbry_file( + self.session, self.lbry_file_manager, "test_file", test_file, + suggested_file_name="test_file") + + d = self.session.setup() + d.addCallback(lambda _: self.stream_info_manager.setup()) + d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier)) + d.addCallback(lambda _: self.lbry_file_manager.setup()) + d.addCallback(lambda _: create_stream()) + d.addCallback(combine_stream) + return d diff --git a/tests/mocks.py b/tests/mocks.py index 1dbb3fdf3..c63827932 100644 --- a/tests/mocks.py +++ b/tests/mocks.py @@ -2,10 +2,10 @@ import io from Crypto.PublicKey import RSA from decimal import Decimal -from twisted.internet import defer, threads, task, error +from twisted.internet import defer from lbrynet.core import PTCWallet -from lbrynet.core.BlobAvailability import BlobAvailabilityTracker +from lbrynet.core import BlobAvailability class Node(object): @@ -54,6 +54,9 @@ class Wallet(object): def set_public_key_for_peer(self, peer, public_key): pass + def get_claim_metadata_for_sd_hash(self, sd_hash): + return "fakeuri", "faketxid" + class PeerFinder(object): def __init__(self, start_port, peer_manager, num_peers): @@ -136,7 +139,7 @@ class GenFile(io.RawIOBase): return output -class DummyBlobAvailabilityTracker(BlobAvailabilityTracker): +class BlobAvailabilityTracker(BlobAvailability.BlobAvailabilityTracker): """ Class to track peer counts for known blobs, and to discover new popular blobs diff --git a/tests/unit/analytics/__init__.py b/tests/unit/analytics/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit/analytics/test_events.py b/tests/unit/analytics/test_events.py new file mode 100644 index 000000000..d9baf2e59 --- /dev/null +++ b/tests/unit/analytics/test_events.py @@ -0,0 +1,38 @@ +from lbrynet.analytics import events + +from twisted.trial import unittest + +from tests import util + + +class EventsTest(unittest.TestCase): + def setUp(self): + util.resetTime(self) + self.event_generator = events.Events('any valid json datatype', 'lbry123', 'session456') + + def test_heartbeat(self): + result = self.event_generator.heartbeat() + desired_result = { + 'context': 'any valid json datatype', + 'event': 'Heartbeat', + 'properties': {'lbry_id': 'lbry123', 'session_id': 'session456'}, + 'timestamp': '2016-01-01T00:00:00Z', + 'userId': 'lbry' + } + self.assertEqual(desired_result, result) + + def test_download_started(self): + result = self.event_generator.download_started('great gatsby') + desired_result = { + 'context': 'any valid json datatype', + 'event': 'Download Started', + 'properties': { + 'lbry_id': 'lbry123', + 'session_id': 'session456', + 'name': 'great gatsby', + 'stream_info': None, + }, + 'timestamp': '2016-01-01T00:00:00Z', + 'userId': 'lbry' + } + self.assertEqual(desired_result, result) diff --git a/tests/unit/analytics/test_track.py b/tests/unit/analytics/test_track.py new file mode 100644 index 000000000..531ec56a5 --- /dev/null +++ b/tests/unit/analytics/test_track.py @@ -0,0 +1,27 @@ +from lbrynet import analytics + +from twisted.trial import unittest + + +class TrackTest(unittest.TestCase): + def test_empty_summarize_is_None(self): + track = analytics.Track() + _, result = track.summarize_and_reset('a') + self.assertEqual(None, result) + + def test_can_get_sum_of_metric(self): + track = analytics.Track() + track.add_observation('b', 1) + track.add_observation('b', 2) + + _, result = track.summarize_and_reset('b') + self.assertEqual(3, result) + + def test_summarize_resets_metric(self): + track = analytics.Track() + track.add_observation('metric', 1) + track.add_observation('metric', 2) + + track.summarize_and_reset('metric') + _, result = track.summarize_and_reset('metric') + self.assertEqual(None, result) diff --git a/tests/unit/core/server/test_BlobRequestHandler.py b/tests/unit/core/server/test_BlobRequestHandler.py index 31d7e48ee..aeb8a4ffd 100644 --- a/tests/unit/core/server/test_BlobRequestHandler.py +++ b/tests/unit/core/server/test_BlobRequestHandler.py @@ -5,17 +5,20 @@ from twisted.internet import defer from twisted.test import proto_helpers from twisted.trial import unittest +from lbrynet import analytics from lbrynet.core import Peer from lbrynet.core.server import BlobRequestHandler from lbrynet.core.PaymentRateManager import NegotiatedPaymentRateManager, BasePaymentRateManager -from tests.mocks import DummyBlobAvailabilityTracker +from tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker class TestBlobRequestHandlerQueries(unittest.TestCase): def setUp(self): self.blob_manager = mock.Mock() - self.payment_rate_manager = NegotiatedPaymentRateManager(BasePaymentRateManager(0.001), DummyBlobAvailabilityTracker()) - self.handler = BlobRequestHandler.BlobRequestHandler(self.blob_manager, None, self.payment_rate_manager) + self.payment_rate_manager = NegotiatedPaymentRateManager( + BasePaymentRateManager(0.001), DummyBlobAvailabilityTracker()) + self.handler = BlobRequestHandler.BlobRequestHandler( + self.blob_manager, None, self.payment_rate_manager, None) def test_empty_response_when_empty_query(self): self.assertEqual({}, self.successResultOf(self.handler.handle_queries({}))) @@ -107,7 +110,7 @@ class TestBlobRequestHandlerQueries(unittest.TestCase): class TestBlobRequestHandlerSender(unittest.TestCase): def test_nothing_happens_if_not_currently_uploading(self): - handler = BlobRequestHandler.BlobRequestHandler(None, None, None) + handler = BlobRequestHandler.BlobRequestHandler(None, None, None, None) handler.currently_uploading = None deferred = handler.send_blob_if_requested(None) self.assertEqual(True, self.successResultOf(deferred)) @@ -116,7 +119,8 @@ class TestBlobRequestHandlerSender(unittest.TestCase): # TODO: also check that the expected payment values are set consumer = proto_helpers.StringTransport() test_file = StringIO.StringIO('test') - handler = BlobRequestHandler.BlobRequestHandler(None, None, None) + track = analytics.Track() + handler = BlobRequestHandler.BlobRequestHandler(None, None, None, track) handler.peer = mock.create_autospec(Peer.Peer) handler.currently_uploading = mock.Mock() handler.read_handle = test_file diff --git a/tests/unit/core/test_ExchangeRateManager.py b/tests/unit/core/test_ExchangeRateManager.py index 96827c263..1cd24b700 100644 --- a/tests/unit/core/test_ExchangeRateManager.py +++ b/tests/unit/core/test_ExchangeRateManager.py @@ -1,9 +1,10 @@ -import mock from lbrynet.metadata import Fee from lbrynet.lbrynet_daemon import ExchangeRateManager from twisted.trial import unittest +from tests import util + class FeeFormatTest(unittest.TestCase): def test_fee_created_with_correct_inputs(self): @@ -19,10 +20,7 @@ class FeeFormatTest(unittest.TestCase): class FeeTest(unittest.TestCase): def setUp(self): - patcher = mock.patch('time.time') - self.time = patcher.start() - self.time.return_value = 0 - self.addCleanup(patcher.stop) + util.resetTime(self) def test_fee_converts_to_lbc(self): fee_dict = { @@ -31,6 +29,10 @@ class FeeTest(unittest.TestCase): 'address': "bRcHraa8bYJZL7vkh5sNmGwPDERFUjGPP9" } } - rates = {'BTCLBC': {'spot': 3.0, 'ts': 2}, 'USDBTC': {'spot': 2.0, 'ts': 3}} + rates = { + 'BTCLBC': {'spot': 3.0, 'ts': util.DEFAULT_ISO_TIME + 1}, + 'USDBTC': {'spot': 2.0, 'ts': util.DEFAULT_ISO_TIME + 2} + } manager = ExchangeRateManager.DummyExchangeRateManager(rates) - self.assertEqual(60.0, manager.to_lbc(fee_dict).amount) + result = manager.to_lbc(fee_dict).amount + self.assertEqual(60.0, result) diff --git a/tests/unit/core/test_Strategy.py b/tests/unit/core/test_Strategy.py index 62e18d7f7..1cf5bb39e 100644 --- a/tests/unit/core/test_Strategy.py +++ b/tests/unit/core/test_Strategy.py @@ -5,7 +5,7 @@ import mock from lbrynet.core.PaymentRateManager import NegotiatedPaymentRateManager, BasePaymentRateManager from lbrynet.core.Strategy import BasicAvailabilityWeightedStrategy from lbrynet.core.Offer import Offer -from tests.mocks import DummyBlobAvailabilityTracker +from tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker MAX_NEGOTIATION_TURNS = 10 random.seed(12345) diff --git a/tests/unit/dht/__init__.py b/tests/unit/dht/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit/dht/test_contact.py b/tests/unit/dht/test_contact.py new file mode 100644 index 000000000..da361b364 --- /dev/null +++ b/tests/unit/dht/test_contact.py @@ -0,0 +1,49 @@ +import unittest + +from lbrynet.dht import contact + + +class ContactOperatorsTest(unittest.TestCase): + """ Basic tests case for boolean operators on the Contact class """ + def setUp(self): + self.firstContact = contact.Contact('firstContactID', '127.0.0.1', 1000, None, 1) + self.secondContact = contact.Contact('2ndContactID', '192.168.0.1', 1000, None, 32) + self.secondContactCopy = contact.Contact('2ndContactID', '192.168.0.1', 1000, None, 32) + self.firstContactDifferentValues = contact.Contact( + 'firstContactID', '192.168.1.20', 1000, None, 50) + + def testBoolean(self): + """ Test "equals" and "not equals" comparisons """ + self.failIfEqual( + self.firstContact, self.secondContact, + 'Contacts with different IDs should not be equal.') + self.failUnlessEqual( + self.firstContact, self.firstContactDifferentValues, + 'Contacts with same IDs should be equal, even if their other values differ.') + self.failUnlessEqual( + self.secondContact, self.secondContactCopy, + 'Different copies of the same Contact instance should be equal') + + def testStringComparisons(self): + """ Test comparisons of Contact objects with str types """ + self.failUnlessEqual( + 'firstContactID', self.firstContact, + 'The node ID string must be equal to the contact object') + self.failIfEqual( + 'some random string', self.firstContact, + "The tested string should not be equal to the contact object (not equal to it's ID)") + + def testIllogicalComparisons(self): + """ Test comparisons with non-Contact and non-str types """ + msg = '"{}" operator: Contact object should not be equal to {} type' + for item in (123, [1,2,3], {'key': 'value'}): + self.failIfEqual( + self.firstContact, item, + msg.format('eq', type(item).__name__)) + self.failUnless( + self.firstContact != item, + msg.format('ne', type(item).__name__)) + + def testCompactIP(self): + self.assertEqual(self.firstContact.compact_ip(), '\x7f\x00\x00\x01') + self.assertEqual(self.secondContact.compact_ip(), '\xc0\xa8\x00\x01') diff --git a/tests/dht/testDatastore.py b/tests/unit/dht/test_datastore.py similarity index 100% rename from tests/dht/testDatastore.py rename to tests/unit/dht/test_datastore.py diff --git a/tests/dht/testEncoding.py b/tests/unit/dht/test_encoding.py similarity index 100% rename from tests/dht/testEncoding.py rename to tests/unit/dht/test_encoding.py diff --git a/tests/dht/testKBucket.py b/tests/unit/dht/test_kbucket.py similarity index 100% rename from tests/dht/testKBucket.py rename to tests/unit/dht/test_kbucket.py diff --git a/tests/dht/testMessages.py b/tests/unit/dht/test_messages.py similarity index 100% rename from tests/dht/testMessages.py rename to tests/unit/dht/test_messages.py diff --git a/tests/util.py b/tests/util.py new file mode 100644 index 000000000..34e644511 --- /dev/null +++ b/tests/util.py @@ -0,0 +1,23 @@ +import datetime +import time + +import mock + + +DEFAULT_TIMESTAMP = datetime.datetime(2016, 1, 1) +DEFAULT_ISO_TIME = time.mktime(DEFAULT_TIMESTAMP.timetuple()) + + +def resetTime(test_case, timestamp=DEFAULT_TIMESTAMP): + iso_time = time.mktime(timestamp.timetuple()) + patcher = mock.patch('time.time') + patcher.start().return_value = iso_time + test_case.addCleanup(patcher.stop) + + patcher = mock.patch('lbrynet.core.utils.now') + patcher.start().return_value = timestamp + test_case.addCleanup(patcher.stop) + + patcher = mock.patch('lbrynet.core.utils.utcnow') + patcher.start().return_value = timestamp + test_case.addCleanup(patcher.stop)