forked from LBRYCommunity/lbry-sdk
Merge branch 'master' into hackrush
This commit is contained in:
commit
53579dc613
25 changed files with 392 additions and 203 deletions
|
@ -27,7 +27,10 @@ test_script:
|
|||
- pip install .
|
||||
- pylint lbrynet
|
||||
# disable tests for now so that appveyor can build the app
|
||||
#- python -m twisted.trial tests # avoids having to set PYTHONPATH=. (see https://twistedmatrix.com/trac/ticket/9035)
|
||||
- set PYTHONPATH=.
|
||||
- trial tests
|
||||
# TODO: integration tests do not work
|
||||
#- python -m unittest discover tests/integration
|
||||
#- rvm use 2.3.1 && gem install danger --version '~> 4.0' && danger
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
[bumpversion]
|
||||
current_version = 0.14.2rc2
|
||||
current_version = 0.14.1
|
||||
commit = True
|
||||
tag = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)((?P<release>[a-z]+)(?P<candidate>\d+))?
|
||||
|
|
71
CHANGELOG.md
71
CHANGELOG.md
|
@ -9,38 +9,65 @@ at anytime.
|
|||
|
||||
## [Unreleased]
|
||||
### Added
|
||||
* Missing docstring for `blob_list`
|
||||
* Added convenient import for setting up a daemon client, `from lbrynet.daemon import get_client`
|
||||
* Added unit tests for CryptBlob.py
|
||||
* Add link to instructions on how to change the default peer port
|
||||
*
|
||||
|
||||
### Changed
|
||||
* Change `max_key_fee` setting to be a dictionary with values for `currency` and `amount`
|
||||
* Linux default downloads folder changed from `~/Downloads` to `XDG_DOWNLOAD_DIR`
|
||||
* Linux folders moved from the home directory to `~/.local/share/lbry`
|
||||
* Windows folders moved from `%APPDATA%/Roaming` to `%APPDATA%/Local/lbry`
|
||||
* Renamed `lbrynet.lbryfile` to `lbrynet.lbry_file`
|
||||
* Renamed `lbrynet.lbryfilemanager` to `lbrynet.file_manager`
|
||||
* Renamed `lbrynet.lbrynet_daemon` to `lbrynet.daemon`
|
||||
* Initialize lbrynet settings when configuring an api client if they are not set yet
|
||||
* Updated lbryum imports
|
||||
* Improve error message when resolving a claim fails using the "get" command
|
||||
*
|
||||
*
|
||||
|
||||
### Fixed
|
||||
* Fixed some log messages throwing exceptions
|
||||
* Fix shutdown of the blob tracker by Session
|
||||
* Fixed claim_new_support docstrings
|
||||
* Fix default directories to comply to XDG
|
||||
* Fixed BlobManager causing functional tests to fail, removed its unneeded manage() loop
|
||||
* Increased max_key_fee
|
||||
* Gives message and instructions if port 3333 is used
|
||||
* peer_port is settable using `settings_set`
|
||||
|
||||
### Deprecated
|
||||
*
|
||||
*
|
||||
|
||||
### Removed
|
||||
* Removed unused settings from conf.py and `settings_set`
|
||||
* Removed download_directory argument from API command get
|
||||
*
|
||||
*
|
||||
|
||||
## [0.14.1] - 2017-07-07
|
||||
|
||||
### Fixed
|
||||
* Fixed timeout behaviour when calling API command get
|
||||
* Fixed https://github.com/lbryio/lbry/issues/765
|
||||
|
||||
|
||||
|
||||
## [0.14.0] - 2017-07-05
|
||||
|
||||
### Added
|
||||
* Missing docstring for `blob_list`
|
||||
* Added convenient import for setting up a daemon client, `from lbrynet.daemon import get_client`
|
||||
* Added unit tests for CryptBlob.py
|
||||
|
||||
|
||||
### Changed
|
||||
* Change `max_key_fee` setting to be a dictionary with values for `currency` and `amount`
|
||||
* Renamed `lbrynet.lbryfile` to `lbrynet.lbry_file`
|
||||
* Renamed `lbrynet.lbryfilemanager` to `lbrynet.file_manager`
|
||||
* Renamed `lbrynet.lbrynet_daemon` to `lbrynet.daemon`
|
||||
* Initialize lbrynet settings when configuring an api client if they are not set yet
|
||||
* Updated lbryum imports
|
||||
* Improve error message when resolving a claim fails using the "get" command
|
||||
|
||||
|
||||
### Removed
|
||||
* Removed unused settings from conf.py and `settings_set`
|
||||
* Removed download_directory argument from API command get
|
||||
|
||||
|
||||
### Fixed
|
||||
* Fixed some log messages throwing exceptions
|
||||
* Fix shutdown of the blob tracker by Session
|
||||
* Fixed claim_new_support docstrings
|
||||
* Fixed BlobManager causing functional tests to fail, removed its unneeded manage() loop
|
||||
* Increased max_key_fee
|
||||
* Fixed unit tests on appveyor Windows build
|
||||
* Fixed [#692](https://github.com/lbryio/lbry/issues/692)
|
||||
|
||||
|
||||
|
||||
## [0.13.1] - 2017-06-15
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ It provides a daemon that can interact with the network via a json-rpc interface
|
|||
|
||||
## Installing
|
||||
|
||||
**Note**: This project no longer directly bundles a graphic interface (browser). If you want to use LBRY via a browser, [use the LBRY App](https://github.com/lbryio/lbry-app).
|
||||
**Note**: This project no longer bundles a graphic interface (browser). If you want to use LBRY via a GUI, [use the LBRY App](https://github.com/lbryio/lbry-app).
|
||||
|
||||
Our [releases page](https://github.com/lbryio/lbry/releases) contains pre-built binaries of the latest release, pre-releases, and past releases, for macOS, Debian-based Linux, and Windows.
|
||||
|
||||
|
@ -25,7 +25,7 @@ Run `lbrynet-daemon` to launch the daemon.
|
|||
|
||||
By default, `lbrynet-daemon` will provide a JSON-RPC server at `http://localhost:5279`. It is easy to interact with via cURL or sane programming languages.
|
||||
|
||||
Our [quickstart guide](http://lbry.io/quickstart) provides clear sample usages and free credits for learning.
|
||||
Our [quickstart guide](http://lbry.io/quickstart) provides a simple walkthrough and examples for learning.
|
||||
|
||||
The full API is documented [here](https://lbry.io/api).
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
|
||||
__version__ = "0.14.2rc2"
|
||||
__version__ = "0.14.2rc1"
|
||||
version = tuple(__version__.split('.'))
|
||||
|
||||
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
||||
|
|
|
@ -2,11 +2,10 @@ import base58
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import yaml
|
||||
import envparse
|
||||
from appdirs import user_data_dir, user_config_dir
|
||||
from appdirs import user_data_dir
|
||||
from lbrynet.core import utils
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -44,48 +43,44 @@ settings_encoders = {
|
|||
'.yml': yaml.safe_dump
|
||||
}
|
||||
|
||||
if 'darwin' in sys.platform:
|
||||
def _win_path_to_bytes(path):
|
||||
"""
|
||||
Encode Windows paths to string. appdirs.user_data_dir()
|
||||
on windows will return unicode path, unlike other platforms
|
||||
which returns string. This will cause problems
|
||||
because we use strings for filenames and combining them with
|
||||
os.path.join() will result in errors.
|
||||
"""
|
||||
for encoding in ('ASCII', 'MBCS'):
|
||||
try:
|
||||
return path.encode(encoding)
|
||||
except (UnicodeEncodeError, LookupError):
|
||||
pass
|
||||
return path
|
||||
|
||||
if sys.platform.startswith('darwin'):
|
||||
platform = DARWIN
|
||||
default_download_dir = os.path.expanduser('~/Downloads')
|
||||
default_download_directory = os.path.join(os.path.expanduser('~'), 'Downloads')
|
||||
default_data_dir = user_data_dir('LBRY')
|
||||
default_lbryum_dir = os.path.expanduser('~/.lbryum')
|
||||
elif 'nt' in sys.platform:
|
||||
default_lbryum_dir = os.path.join(os.path.expanduser('~'), '.lbryum')
|
||||
elif sys.platform.startswith('win'):
|
||||
platform = WINDOWS
|
||||
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
|
||||
|
||||
default_download_dir = get_path(FOLDERID.Downloads, UserHandle.current)
|
||||
default_download_directory = get_path(FOLDERID.Downloads, UserHandle.current)
|
||||
default_data_dir = os.path.join(
|
||||
get_path(FOLDERID.RoamingAppData, UserHandle.current), 'lbrynet')
|
||||
default_lbryum_dir = os.path.join(
|
||||
get_path(FOLDERID.RoamingAppData, UserHandle.current), 'lbryum')
|
||||
|
||||
# This checks whether the folders are still in their old locations
|
||||
if os.path.isdir(user_data_dir('lbryum', roaming=True)) and \
|
||||
os.path.isdir(user_data_dir('lbrynet', roaming=True)):
|
||||
default_data_dir = user_data_dir('lbrynet', roaming=True)
|
||||
default_lbryum_dir = user_data_dir('lbryum', roaming=True)
|
||||
else:
|
||||
default_data_dir = user_data_dir('lbrynet', 'lbry')
|
||||
default_lbryum_dir = user_data_dir('lbryum', 'lbry')
|
||||
default_download_directory = _win_path_to_bytes(default_download_directory)
|
||||
default_data_dir = _win_path_to_bytes(default_data_dir)
|
||||
default_lbryum_dir = _win_path_to_bytes(default_lbryum_dir)
|
||||
else:
|
||||
platform = LINUX
|
||||
|
||||
# This checks whether the folders are still in their old locations
|
||||
if os.path.isdir(os.path.expanduser('~/.lbrynet')) and \
|
||||
os.path.isdir(os.path.expanduser('~/.lbryum')):
|
||||
default_data_dir = os.path.expanduser('~/.lbrynet')
|
||||
default_lbryum_dir = os.path.expanduser('~/.lbryum')
|
||||
default_download_dir = os.path.expanduser('~/Downloads')
|
||||
else:
|
||||
default_data_dir = user_data_dir('lbry/lbrynet')
|
||||
default_lbryum_dir = user_data_dir('lbry/lbryum')
|
||||
try:
|
||||
with open(os.path.join(user_config_dir(), 'user-dirs.dirs'), 'r') as xdg:
|
||||
down_dir = re.search(r'XDG_DOWNLOAD_DIR=(.+)', xdg.read()).group(1)
|
||||
down_dir = re.sub('\$HOME', os.getenv('HOME'), down_dir)
|
||||
default_download_dir = re.sub('\"', '', down_dir)
|
||||
except EnvironmentError:
|
||||
default_download_dir = os.getenv('XDG_DOWNLOAD_DIR')
|
||||
|
||||
if not default_download_dir:
|
||||
default_download_dir = os.path.expanduser('~/Downloads')
|
||||
|
||||
default_download_directory = os.path.join(os.path.expanduser('~'), 'Downloads')
|
||||
default_data_dir = os.path.join(os.path.expanduser('~'), '.lbrynet')
|
||||
default_lbryum_dir = os.path.join(os.path.expanduser('~'), '.lbryum')
|
||||
|
||||
ICON_PATH = 'icons' if platform is WINDOWS else 'app.icns'
|
||||
|
||||
|
@ -183,7 +178,7 @@ ADJUSTABLE_SETTINGS = {
|
|||
'data_rate': (float, .0001), # points/megabyte
|
||||
'delete_blobs_on_remove': (bool, True),
|
||||
'dht_node_port': (int, 4444),
|
||||
'download_directory': (str, default_download_dir),
|
||||
'download_directory': (str, default_download_directory),
|
||||
'download_timeout': (int, 180),
|
||||
'is_generous_host': (bool, True),
|
||||
'known_dht_nodes': (list, DEFAULT_DHT_NODES, server_port),
|
||||
|
|
|
@ -9,11 +9,23 @@ class DuplicateStreamHashError(Exception):
|
|||
class DownloadCanceledError(Exception):
|
||||
pass
|
||||
|
||||
class DownloadTimeoutError(Exception):
|
||||
def __init__(self, download):
|
||||
Exception.__init__(self, 'Failed to download {} within timeout'.format(download))
|
||||
self.download = download
|
||||
|
||||
class RequestCanceledError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class NegativeFundsError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class NullFundsError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InsufficientFundsError(Exception):
|
||||
pass
|
||||
|
||||
|
|
|
@ -236,7 +236,7 @@ class StreamDescriptorIdentifier(object):
|
|||
return d
|
||||
|
||||
|
||||
def download_sd_blob(session, blob_hash, payment_rate_manager):
|
||||
def download_sd_blob(session, blob_hash, payment_rate_manager, timeout=None):
|
||||
"""
|
||||
Downloads a single blob from the network
|
||||
|
||||
|
@ -253,5 +253,6 @@ def download_sd_blob(session, blob_hash, payment_rate_manager):
|
|||
session.peer_finder,
|
||||
session.rate_limiter,
|
||||
payment_rate_manager,
|
||||
session.wallet)
|
||||
session.wallet,
|
||||
timeout)
|
||||
return downloader.download()
|
||||
|
|
|
@ -28,7 +28,7 @@ from lbrynet.core.sqlite_helpers import rerun_if_locked
|
|||
from lbrynet.interfaces import IRequestCreator, IQueryHandlerFactory, IQueryHandler, IWallet
|
||||
from lbrynet.core.client.ClientRequest import ClientRequest
|
||||
from lbrynet.core.Error import RequestCanceledError, InsufficientFundsError, UnknownNameError
|
||||
from lbrynet.core.Error import UnknownClaimID, UnknownURI
|
||||
from lbrynet.core.Error import UnknownClaimID, UnknownURI, NegativeFundsError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -541,6 +541,8 @@ class Wallet(object):
|
|||
once the service has been rendered
|
||||
"""
|
||||
rounded_amount = Decimal(str(round(amount, 8)))
|
||||
if rounded_amount < 0:
|
||||
raise NegativeFundsError(rounded_amount)
|
||||
if self.get_balance() >= rounded_amount:
|
||||
self.total_reserved_points += rounded_amount
|
||||
return ReservedPoints(identifier, rounded_amount)
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import logging
|
||||
from twisted.internet import defer
|
||||
from twisted.python import failure
|
||||
from zope.interface import implements
|
||||
from lbrynet import interfaces
|
||||
|
||||
|
@ -29,37 +28,17 @@ class DownloadManager(object):
|
|||
d.addCallback(lambda _: self.resume_downloading())
|
||||
return d
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def resume_downloading(self):
|
||||
yield self.connection_manager.start()
|
||||
yield self.progress_manager.start()
|
||||
defer.returnValue(True)
|
||||
|
||||
def check_start(result, manager):
|
||||
if isinstance(result, failure.Failure):
|
||||
log.error("Failed to start the %s: %s", manager, result.getErrorMessage())
|
||||
return False
|
||||
return True
|
||||
|
||||
d1 = self.progress_manager.start()
|
||||
d1.addBoth(check_start, "progress manager")
|
||||
d2 = self.connection_manager.start()
|
||||
d2.addBoth(check_start, "connection manager")
|
||||
dl = defer.DeferredList([d1, d2])
|
||||
dl.addCallback(lambda xs: False not in xs)
|
||||
return dl
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def stop_downloading(self):
|
||||
|
||||
def check_stop(result, manager):
|
||||
if isinstance(result, failure.Failure):
|
||||
log.error("Failed to stop the %s: %s", manager, result.getErrorMessage())
|
||||
return False
|
||||
return True
|
||||
|
||||
d1 = self.progress_manager.stop()
|
||||
d1.addBoth(check_stop, "progress manager")
|
||||
d2 = self.connection_manager.stop()
|
||||
d2.addBoth(check_stop, "connection manager")
|
||||
dl = defer.DeferredList([d1, d2], consumeErrors=True)
|
||||
dl.addCallback(lambda results: all([success for success, val in results]))
|
||||
return dl
|
||||
yield self.progress_manager.stop()
|
||||
yield self.connection_manager.stop()
|
||||
defer.returnValue(True)
|
||||
|
||||
def add_blobs_to_download(self, blob_infos):
|
||||
|
||||
|
|
|
@ -5,11 +5,11 @@ from lbrynet.core.BlobInfo import BlobInfo
|
|||
from lbrynet.core.client.BlobRequester import BlobRequester
|
||||
from lbrynet.core.client.ConnectionManager import ConnectionManager
|
||||
from lbrynet.core.client.DownloadManager import DownloadManager
|
||||
from lbrynet.core.Error import InvalidBlobHashError
|
||||
from lbrynet.core.utils import is_valid_blobhash
|
||||
from lbrynet.core.Error import InvalidBlobHashError, DownloadTimeoutError
|
||||
from lbrynet.core.utils import is_valid_blobhash, safe_start_looping_call, safe_stop_looping_call
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.internet import defer
|
||||
|
||||
from twisted.internet.task import LoopingCall
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -32,36 +32,35 @@ class SingleBlobMetadataHandler(object):
|
|||
|
||||
|
||||
class SingleProgressManager(object):
|
||||
def __init__(self, finished_callback, download_manager):
|
||||
def __init__(self, download_manager, finished_callback, timeout_callback, timeout):
|
||||
self.finished_callback = finished_callback
|
||||
self.finished = False
|
||||
self.timeout_callback = timeout_callback
|
||||
self.download_manager = download_manager
|
||||
self._next_check_if_finished = None
|
||||
|
||||
self.timeout = timeout
|
||||
self.timeout_counter = 0
|
||||
self.checker = LoopingCall(self._check_if_finished)
|
||||
|
||||
def start(self):
|
||||
|
||||
from twisted.internet import reactor
|
||||
|
||||
assert self._next_check_if_finished is None
|
||||
self._next_check_if_finished = reactor.callLater(0, self._check_if_finished)
|
||||
safe_start_looping_call(self.checker, 1)
|
||||
return defer.succeed(True)
|
||||
|
||||
def stop(self):
|
||||
if self._next_check_if_finished is not None:
|
||||
self._next_check_if_finished.cancel()
|
||||
self._next_check_if_finished = None
|
||||
safe_stop_looping_call(self.checker)
|
||||
return defer.succeed(True)
|
||||
|
||||
def _check_if_finished(self):
|
||||
|
||||
from twisted.internet import reactor
|
||||
|
||||
self._next_check_if_finished = None
|
||||
if self.finished is False:
|
||||
if self.stream_position() == 1:
|
||||
self.blob_downloaded(self.download_manager.blobs[0], 0)
|
||||
else:
|
||||
self._next_check_if_finished = reactor.callLater(1, self._check_if_finished)
|
||||
if self.stream_position() == 1:
|
||||
blob_downloaded = self.download_manager.blobs[0]
|
||||
log.debug("The blob %s has been downloaded. Calling the finished callback",
|
||||
str(blob_downloaded))
|
||||
safe_stop_looping_call(self.checker)
|
||||
self.finished_callback(blob_downloaded)
|
||||
elif self.timeout is not None:
|
||||
self.timeout_counter += 1
|
||||
if self.timeout_counter >= self.timeout:
|
||||
safe_stop_looping_call(self.checker)
|
||||
self.timeout_callback()
|
||||
|
||||
def stream_position(self):
|
||||
blobs = self.download_manager.blobs
|
||||
|
@ -74,15 +73,6 @@ class SingleProgressManager(object):
|
|||
assert len(blobs) == 1
|
||||
return [b for b in blobs.itervalues() if not b.is_validated()]
|
||||
|
||||
def blob_downloaded(self, blob, blob_num):
|
||||
|
||||
from twisted.internet import reactor
|
||||
|
||||
log.debug("The blob %s has been downloaded. Calling the finished callback", str(blob))
|
||||
if self.finished is False:
|
||||
self.finished = True
|
||||
reactor.callLater(0, self.finished_callback, blob)
|
||||
|
||||
|
||||
class DummyBlobHandler(object):
|
||||
def __init__(self):
|
||||
|
@ -94,13 +84,15 @@ class DummyBlobHandler(object):
|
|||
|
||||
class StandaloneBlobDownloader(object):
|
||||
def __init__(self, blob_hash, blob_manager, peer_finder,
|
||||
rate_limiter, payment_rate_manager, wallet):
|
||||
rate_limiter, payment_rate_manager, wallet,
|
||||
timeout=None):
|
||||
self.blob_hash = blob_hash
|
||||
self.blob_manager = blob_manager
|
||||
self.peer_finder = peer_finder
|
||||
self.rate_limiter = rate_limiter
|
||||
self.payment_rate_manager = payment_rate_manager
|
||||
self.wallet = wallet
|
||||
self.timeout = timeout
|
||||
self.download_manager = None
|
||||
self.finished_deferred = None
|
||||
|
||||
|
@ -118,8 +110,10 @@ class StandaloneBlobDownloader(object):
|
|||
self.download_manager)
|
||||
self.download_manager.blob_info_finder = SingleBlobMetadataHandler(self.blob_hash,
|
||||
self.download_manager)
|
||||
self.download_manager.progress_manager = SingleProgressManager(self._blob_downloaded,
|
||||
self.download_manager)
|
||||
self.download_manager.progress_manager = SingleProgressManager(self.download_manager,
|
||||
self._blob_downloaded,
|
||||
self._download_timedout,
|
||||
self.timeout)
|
||||
self.download_manager.blob_handler = DummyBlobHandler()
|
||||
self.download_manager.wallet_info_exchanger = self.wallet.get_info_exchanger()
|
||||
self.download_manager.connection_manager = ConnectionManager(
|
||||
|
@ -139,6 +133,11 @@ class StandaloneBlobDownloader(object):
|
|||
if not self.finished_deferred.called:
|
||||
self.finished_deferred.callback(blob)
|
||||
|
||||
def _download_timedout(self):
|
||||
self.stop()
|
||||
if not self.finished_deferred.called:
|
||||
self.finished_deferred.errback(DownloadTimeoutError(self.blob_hash))
|
||||
|
||||
def insufficient_funds(self, err):
|
||||
self.stop()
|
||||
if not self.finished_deferred.called:
|
||||
|
|
|
@ -49,6 +49,13 @@ def call_later(delay, func, *args, **kwargs):
|
|||
from twisted.internet import reactor
|
||||
return reactor.callLater(delay, func, *args, **kwargs)
|
||||
|
||||
def safe_start_looping_call(looping_call, interval_sec):
|
||||
if not looping_call.running:
|
||||
looping_call.start(interval_sec)
|
||||
|
||||
def safe_stop_looping_call(looping_call):
|
||||
if looping_call.running:
|
||||
looping_call.stop()
|
||||
|
||||
def generate_id(num=None):
|
||||
h = get_lbry_hash_obj()
|
||||
|
|
|
@ -45,6 +45,7 @@ from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory
|
|||
from lbrynet.core.server.ServerProtocol import ServerProtocolFactory
|
||||
from lbrynet.core.Error import InsufficientFundsError, UnknownNameError, NoSuchSDHash
|
||||
from lbrynet.core.Error import NoSuchStreamHash, UnknownClaimID, UnknownURI
|
||||
from lbrynet.core.Error import NullFundsError, NegativeFundsError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -602,26 +603,12 @@ class Daemon(AuthJSONRPCServer):
|
|||
:param timeout (int): blob timeout
|
||||
:return: BlobFile
|
||||
"""
|
||||
|
||||
def cb(blob):
|
||||
if not finished_d.called:
|
||||
finished_d.callback(blob)
|
||||
|
||||
def eb():
|
||||
if not finished_d.called:
|
||||
finished_d.errback(Exception("Blob (%s) download timed out" %
|
||||
blob_hash[:SHORT_ID_LEN]))
|
||||
|
||||
if not blob_hash:
|
||||
raise Exception("Nothing to download")
|
||||
|
||||
rate_manager = rate_manager or self.session.payment_rate_manager
|
||||
timeout = timeout or 30
|
||||
finished_d = defer.Deferred(None)
|
||||
reactor.callLater(timeout, eb)
|
||||
d = download_sd_blob(self.session, blob_hash, rate_manager)
|
||||
d.addCallback(cb)
|
||||
return finished_d
|
||||
return download_sd_blob(self.session, blob_hash, rate_manager, timeout)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _download_name(self, name, claim_dict, claim_id, timeout=None, file_name=None):
|
||||
|
@ -2250,6 +2237,11 @@ class Daemon(AuthJSONRPCServer):
|
|||
(bool) true if payment successfully scheduled
|
||||
"""
|
||||
|
||||
if amount < 0:
|
||||
raise NegativeFundsError()
|
||||
elif not amount:
|
||||
raise NullFundsError()
|
||||
|
||||
reserved_points = self.session.wallet.reserve_points(address, amount)
|
||||
if reserved_points is None:
|
||||
raise InsufficientFundsError()
|
||||
|
|
|
@ -5,7 +5,8 @@ from twisted.internet.task import LoopingCall
|
|||
|
||||
from lbryschema.fee import Fee
|
||||
|
||||
from lbrynet.core.Error import InsufficientFundsError, KeyFeeAboveMaxAllowed
|
||||
from lbrynet.core.Error import InsufficientFundsError, KeyFeeAboveMaxAllowed, DownloadTimeoutError
|
||||
from lbrynet.core.utils import safe_start_looping_call, safe_stop_looping_call
|
||||
from lbrynet.core.StreamDescriptor import download_sd_blob
|
||||
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
|
||||
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader
|
||||
|
@ -28,16 +29,6 @@ STREAM_STAGES = [
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def safe_start(looping_call):
|
||||
if not looping_call.running:
|
||||
looping_call.start(1)
|
||||
|
||||
|
||||
def safe_stop(looping_call):
|
||||
if looping_call.running:
|
||||
looping_call.stop()
|
||||
|
||||
|
||||
class GetStream(object):
|
||||
def __init__(self, sd_identifier, session, exchange_rate_manager,
|
||||
max_key_fee, data_rate=None, timeout=None,
|
||||
|
@ -64,6 +55,7 @@ class GetStream(object):
|
|||
# fired after the metadata and the first data blob have been downloaded
|
||||
self.data_downloading_deferred = defer.Deferred(None)
|
||||
|
||||
|
||||
@property
|
||||
def download_path(self):
|
||||
return os.path.join(self.download_directory, self.downloader.file_name)
|
||||
|
@ -71,11 +63,10 @@ class GetStream(object):
|
|||
def _check_status(self, status):
|
||||
stop_condition = (status.num_completed > 0 or
|
||||
status.running_status == ManagedEncryptedFileDownloader.STATUS_STOPPED)
|
||||
|
||||
if stop_condition and not self.data_downloading_deferred.called:
|
||||
self.data_downloading_deferred.callback(True)
|
||||
if self.data_downloading_deferred.called:
|
||||
safe_stop(self.checker)
|
||||
safe_stop_looping_call(self.checker)
|
||||
else:
|
||||
log.info("Downloading stream data (%i seconds)", self.timeout_counter)
|
||||
|
||||
|
@ -83,17 +74,14 @@ class GetStream(object):
|
|||
"""
|
||||
Check if we've got the first data blob in the stream yet
|
||||
"""
|
||||
|
||||
self.timeout_counter += 1
|
||||
if self.timeout_counter >= self.timeout:
|
||||
if not self.data_downloading_deferred.called:
|
||||
self.data_downloading_deferred.errback(Exception("Timeout"))
|
||||
safe_stop(self.checker)
|
||||
elif self.downloader:
|
||||
self.data_downloading_deferred.errback(DownloadTimeoutError(self.file_name))
|
||||
safe_stop_looping_call(self.checker)
|
||||
else:
|
||||
d = self.downloader.status()
|
||||
d.addCallback(self._check_status)
|
||||
else:
|
||||
log.info("Downloading stream descriptor blob (%i seconds)", self.timeout_counter)
|
||||
|
||||
def convert_max_fee(self):
|
||||
currency, amount = self.max_key_fee['currency'], self.max_key_fee['amount']
|
||||
|
@ -157,15 +145,14 @@ class GetStream(object):
|
|||
self.set_status(DOWNLOAD_STOPPED_CODE, name)
|
||||
log.info("Finished downloading lbry://%s (%s) --> %s", name, self.sd_hash[:6],
|
||||
self.download_path)
|
||||
safe_stop(self.checker)
|
||||
safe_stop_looping_call(self.checker)
|
||||
status = yield self.downloader.status()
|
||||
self._check_status(status)
|
||||
defer.returnValue(self.download_path)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def initialize(self, stream_info, name):
|
||||
def _initialize(self, stream_info):
|
||||
# Set sd_hash and return key_fee from stream_info
|
||||
self.set_status(INITIALIZING_CODE, name)
|
||||
self.sd_hash = stream_info.source_hash
|
||||
key_fee = None
|
||||
if stream_info.has_fee:
|
||||
|
@ -180,15 +167,15 @@ class GetStream(object):
|
|||
defer.returnValue(downloader)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def download(self, name, key_fee):
|
||||
# download sd blob, and start downloader
|
||||
self.set_status(DOWNLOAD_METADATA_CODE, name)
|
||||
sd_blob = yield download_sd_blob(self.session, self.sd_hash, self.payment_rate_manager)
|
||||
self.downloader = yield self._create_downloader(sd_blob)
|
||||
def _download_sd_blob(self):
|
||||
sd_blob = yield download_sd_blob(self.session, self.sd_hash,
|
||||
self.payment_rate_manager, self.timeout)
|
||||
defer.returnValue(sd_blob)
|
||||
|
||||
self.set_status(DOWNLOAD_RUNNING_CODE, name)
|
||||
if key_fee:
|
||||
yield self.pay_key_fee(key_fee, name)
|
||||
@defer.inlineCallbacks
|
||||
def _download(self, sd_blob, name, key_fee):
|
||||
self.downloader = yield self._create_downloader(sd_blob)
|
||||
yield self.pay_key_fee(key_fee, name)
|
||||
|
||||
log.info("Downloading lbry://%s (%s) --> %s", name, self.sd_hash[:6], self.download_path)
|
||||
self.finished_deferred = self.downloader.start()
|
||||
|
@ -205,20 +192,22 @@ class GetStream(object):
|
|||
downloader - instance of ManagedEncryptedFileDownloader
|
||||
finished_deferred - deferred callbacked when download is finished
|
||||
"""
|
||||
key_fee = yield self.initialize(stream_info, name)
|
||||
safe_start(self.checker)
|
||||
self.set_status(INITIALIZING_CODE, name)
|
||||
key_fee = yield self._initialize(stream_info)
|
||||
|
||||
try:
|
||||
yield self.download(name, key_fee)
|
||||
except Exception as err:
|
||||
safe_stop(self.checker)
|
||||
raise
|
||||
self.set_status(DOWNLOAD_METADATA_CODE, name)
|
||||
sd_blob = yield self._download_sd_blob()
|
||||
|
||||
yield self._download(sd_blob, name, key_fee)
|
||||
self.set_status(DOWNLOAD_RUNNING_CODE, name)
|
||||
safe_start_looping_call(self.checker, 1)
|
||||
|
||||
try:
|
||||
yield self.data_downloading_deferred
|
||||
except Exception as err:
|
||||
self.downloader.stop()
|
||||
safe_stop(self.checker)
|
||||
safe_stop_looping_call(self.checker)
|
||||
raise
|
||||
|
||||
defer.returnValue((self.downloader, self.finished_deferred))
|
||||
|
||||
|
|
|
@ -1,14 +1,24 @@
|
|||
import logging
|
||||
import os
|
||||
import sys
|
||||
import mimetypes
|
||||
|
||||
from appdirs import user_data_dir
|
||||
from zope.interface import implements
|
||||
from twisted.internet import defer, error, interfaces, abstract, task, reactor
|
||||
|
||||
|
||||
# TODO: omg, this code is essentially duplicated in Daemon
|
||||
if sys.platform != "darwin":
|
||||
data_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
|
||||
else:
|
||||
data_dir = user_data_dir("LBRY")
|
||||
if not os.path.isdir(data_dir):
|
||||
os.mkdir(data_dir)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
STATUS_FINISHED = 'finished'
|
||||
|
||||
|
||||
class EncryptedFileStreamer(object):
|
||||
"""
|
||||
Writes LBRY stream to request; will pause to wait for new data if the file
|
||||
|
@ -21,6 +31,7 @@ class EncryptedFileStreamer(object):
|
|||
|
||||
bufferSize = abstract.FileDescriptor.bufferSize
|
||||
|
||||
|
||||
# How long to wait between sending blocks (needed because some
|
||||
# video players freeze up if you try to send data too fast)
|
||||
stream_interval = 0.005
|
||||
|
@ -28,6 +39,7 @@ class EncryptedFileStreamer(object):
|
|||
# How long to wait before checking if new data has been appended to the file
|
||||
new_data_check_interval = 0.25
|
||||
|
||||
|
||||
def __init__(self, request, path, stream, file_manager):
|
||||
def _set_content_length_header(length):
|
||||
self._request.setHeader('content-length', length)
|
||||
|
|
|
@ -2,15 +2,25 @@ import logging
|
|||
import os
|
||||
import shutil
|
||||
import json
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
from appdirs import user_data_dir
|
||||
from twisted.web import server, static, resource
|
||||
from twisted.internet import defer, error
|
||||
|
||||
from lbrynet import conf
|
||||
from lbrynet.daemon.FileStreamer import EncryptedFileStreamer
|
||||
|
||||
# TODO: omg, this code is essentially duplicated in Daemon
|
||||
|
||||
if sys.platform != "darwin":
|
||||
data_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
|
||||
else:
|
||||
data_dir = user_data_dir("LBRY")
|
||||
if not os.path.isdir(data_dir):
|
||||
os.mkdir(data_dir)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -1,14 +1,21 @@
|
|||
import json
|
||||
import logging.handlers
|
||||
import sys
|
||||
import os
|
||||
|
||||
from appdirs import user_data_dir
|
||||
from twisted.internet.task import LoopingCall
|
||||
from twisted.internet import reactor
|
||||
from lbrynet import conf
|
||||
|
||||
|
||||
conf.initialize_settings()
|
||||
log_dir = conf.settings['data_dir']
|
||||
if sys.platform != "darwin":
|
||||
log_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
|
||||
else:
|
||||
log_dir = user_data_dir("LBRY")
|
||||
|
||||
if not os.path.isdir(log_dir):
|
||||
os.mkdir(log_dir)
|
||||
|
||||
LOG_FILENAME = os.path.join(log_dir, 'lbrynet-daemon.log')
|
||||
|
||||
if os.path.isfile(LOG_FILENAME):
|
||||
|
|
|
@ -24,7 +24,7 @@ class DBEncryptedFileMetadataManager(object):
|
|||
return self._open_db()
|
||||
|
||||
def stop(self):
|
||||
self.db_conn = None
|
||||
self.db_conn.close()
|
||||
return defer.succeed(True)
|
||||
|
||||
def get_all_streams(self):
|
||||
|
|
|
@ -13,8 +13,8 @@ gmpy==1.17
|
|||
jsonrpc==1.2
|
||||
jsonrpclib==0.1.7
|
||||
jsonschema==2.5.1
|
||||
git+https://github.com/lbryio/lbryschema.git@v0.0.7#egg=lbryschema
|
||||
git+https://github.com/lbryio/lbryum.git@v3.1.1#egg=lbryum
|
||||
git+https://github.com/lbryio/lbryschema.git@v0.0.8#egg=lbryschema
|
||||
git+https://github.com/lbryio/lbryum.git@v3.1.3#egg=lbryum
|
||||
miniupnpc==1.9
|
||||
pbkdf2==1.3
|
||||
pycrypto==2.6.1
|
||||
|
|
4
setup.py
4
setup.py
|
@ -21,8 +21,8 @@ requires = [
|
|||
'envparse',
|
||||
'jsonrpc',
|
||||
'jsonschema',
|
||||
'lbryum==3.1.1',
|
||||
'lbryschema==0.0.7',
|
||||
'lbryum==3.1.4rc1',
|
||||
'lbryschema==0.0.8',
|
||||
'miniupnpc',
|
||||
'pycrypto',
|
||||
'pyyaml',
|
||||
|
|
|
@ -65,6 +65,19 @@ def use_epoll_on_linux():
|
|||
sys.modules['twisted.internet.reactor'] = twisted.internet.reactor
|
||||
|
||||
|
||||
def init_conf_windows(settings={}):
|
||||
"""
|
||||
There is no fork on windows, so imports
|
||||
are freshly initialized in new processes.
|
||||
So conf needs to be intialized for new processes
|
||||
"""
|
||||
if os.name == 'nt':
|
||||
original_settings = conf.settings
|
||||
conf.settings = conf.Config(conf.FIXED_SETTINGS, conf.ADJUSTABLE_SETTINGS)
|
||||
conf.settings.installation_id = conf.settings.get_installation_id()
|
||||
conf.settings.update(settings)
|
||||
|
||||
|
||||
class LbryUploader(object):
|
||||
def __init__(self, sd_hash_queue, kill_event, dead_event,
|
||||
file_size, ul_rate_limit=None, is_generous=False):
|
||||
|
@ -84,6 +97,8 @@ class LbryUploader(object):
|
|||
|
||||
def start(self):
|
||||
use_epoll_on_linux()
|
||||
init_conf_windows()
|
||||
|
||||
from twisted.internet import reactor
|
||||
self.reactor = reactor
|
||||
logging.debug("Starting the uploader")
|
||||
|
@ -98,6 +113,7 @@ class LbryUploader(object):
|
|||
self.sd_identifier = StreamDescriptorIdentifier()
|
||||
db_dir = "server"
|
||||
os.mkdir(db_dir)
|
||||
|
||||
self.session = Session(
|
||||
conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd",
|
||||
peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553,
|
||||
|
@ -182,6 +198,7 @@ class LbryUploader(object):
|
|||
def start_lbry_reuploader(sd_hash, kill_event, dead_event,
|
||||
ready_event, n, ul_rate_limit=None, is_generous=False):
|
||||
use_epoll_on_linux()
|
||||
init_conf_windows()
|
||||
from twisted.internet import reactor
|
||||
|
||||
logging.debug("Starting the uploader")
|
||||
|
@ -295,6 +312,7 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event,
|
|||
|
||||
def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_generous=False):
|
||||
use_epoll_on_linux()
|
||||
init_conf_windows()
|
||||
from twisted.internet import reactor
|
||||
|
||||
logging.debug("Starting the uploader")
|
||||
|
|
|
@ -24,7 +24,10 @@ class BlobManagerTest(unittest.TestCase):
|
|||
|
||||
def tearDown(self):
|
||||
self.bm.stop()
|
||||
shutil.rmtree(self.blob_dir)
|
||||
# BlobFile will try to delete itself in _close_writer
|
||||
# thus when calling rmtree we may get a FileNotFoundError
|
||||
# for the blob file
|
||||
shutil.rmtree(self.blob_dir, ignore_errors=True)
|
||||
shutil.rmtree(self.db_dir)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -100,8 +103,8 @@ class BlobManagerTest(unittest.TestCase):
|
|||
|
||||
# open the last blob
|
||||
blob = yield self.bm.get_blob(blob_hashes[-1])
|
||||
yield blob.open_for_writing(self.peer)
|
||||
|
||||
finished_d, write, cancel = yield blob.open_for_writing(self.peer)
|
||||
|
||||
# delete the last blob and check if it still exists
|
||||
out = yield self.bm.delete_blobs([blob_hash])
|
||||
blobs = yield self.bm.get_all_verified_blobs()
|
||||
|
@ -109,4 +112,4 @@ class BlobManagerTest(unittest.TestCase):
|
|||
self.assertTrue(blob_hashes[-1] in blobs)
|
||||
self.assertTrue(os.path.isfile(os.path.join(self.blob_dir,blob_hashes[-1])))
|
||||
|
||||
|
||||
blob._close_writer(blob.writers[self.peer][0])
|
||||
|
|
|
@ -14,6 +14,7 @@ class DBEncryptedFileMetadataManagerTest(unittest.TestCase):
|
|||
self.manager = DBEncryptedFileMetadataManager(self.db_dir)
|
||||
|
||||
def tearDown(self):
|
||||
self.manager.stop()
|
||||
shutil.rmtree(self.db_dir)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
|
|
@ -2,17 +2,18 @@ import types
|
|||
import mock
|
||||
import json
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
from twisted.internet import defer, task
|
||||
|
||||
from lbryschema.claim import ClaimDict
|
||||
|
||||
from lbrynet.core import Session, PaymentRateManager, Wallet
|
||||
from lbrynet.core.Error import DownloadTimeoutError
|
||||
from lbrynet.daemon import Downloader
|
||||
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier,StreamMetadata
|
||||
from lbrynet.lbry_file.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier
|
||||
from lbrynet.core.HashBlob import TempBlob
|
||||
from lbrynet.core.BlobManager import TempBlobManager
|
||||
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
|
||||
|
||||
from lbrynet.file_manager.EncryptedFileStatusReport import EncryptedFileStatusReport
|
||||
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader, ManagedEncryptedFileDownloaderFactory
|
||||
from lbrynet.daemon.ExchangeRateManager import ExchangeRateManager
|
||||
|
||||
from tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker
|
||||
|
@ -20,10 +21,48 @@ from tests.mocks import ExchangeRateManager as DummyExchangeRateManager
|
|||
from tests.mocks import BTCLBCFeed, USDBTCFeed
|
||||
from tests.mocks import mock_conf_settings
|
||||
|
||||
class MocDownloader(object):
|
||||
def __init__(self):
|
||||
self.finish_deferred = defer.Deferred(None)
|
||||
self.stop_called = False
|
||||
|
||||
self.name = 'test'
|
||||
self.num_completed = 0
|
||||
self.num_known = 1
|
||||
self.running_status = ManagedEncryptedFileDownloader.STATUS_RUNNING
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def status(self):
|
||||
out = yield EncryptedFileStatusReport(self.name, self.num_completed, self.num_known, self.running_status)
|
||||
defer.returnValue(out)
|
||||
|
||||
def start(self):
|
||||
return self.finish_deferred
|
||||
|
||||
def stop(self):
|
||||
self.stop_called = True
|
||||
self.finish_deferred.callback(True)
|
||||
|
||||
def moc_initialize(self,stream_info):
|
||||
self.sd_hash ="d5169241150022f996fa7cd6a9a1c421937276a3275eb912790bd07ba7aec1fac5fd45431d226b8fb402691e79aeb24b"
|
||||
return None
|
||||
|
||||
def moc_download_sd_blob(self):
|
||||
return None
|
||||
|
||||
def moc_download(self, sd_blob, name, key_fee):
|
||||
self.pay_key_fee(key_fee, name)
|
||||
self.downloader = MocDownloader()
|
||||
self.downloader.start()
|
||||
|
||||
def moc_pay_key_fee(self, key_fee, name):
|
||||
self.pay_key_fee_called = True
|
||||
|
||||
class GetStreamTests(unittest.TestCase):
|
||||
|
||||
def init_getstream_with_mocs(self):
|
||||
mock_conf_settings(self)
|
||||
|
||||
sd_identifier = mock.Mock(spec=StreamDescriptorIdentifier)
|
||||
session = mock.Mock(spec=Session.Session)
|
||||
session.wallet = mock.Mock(spec=Wallet.LBRYumWallet)
|
||||
|
@ -37,8 +76,11 @@ class GetStreamTests(unittest.TestCase):
|
|||
data_rate = {'currency':"LBC", 'amount':0, 'address':''}
|
||||
|
||||
getstream = Downloader.GetStream(sd_identifier, session,
|
||||
exchange_rate_manager, max_key_fee, timeout=10, data_rate=data_rate)
|
||||
exchange_rate_manager, max_key_fee, timeout=3, data_rate=data_rate)
|
||||
getstream.pay_key_fee_called = False
|
||||
|
||||
self.clock = task.Clock()
|
||||
getstream.checker.clock = self.clock
|
||||
return getstream
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -56,3 +98,86 @@ class GetStreamTests(unittest.TestCase):
|
|||
yield getstream.start(stream_info,name)
|
||||
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_sd_blob_download_timeout(self):
|
||||
"""
|
||||
test that if download_sd_blob fails due to timeout,
|
||||
DownloadTimeoutError is raised
|
||||
"""
|
||||
def download_sd_blob(self):
|
||||
raise DownloadTimeoutError(self.file_name)
|
||||
|
||||
getstream = self.init_getstream_with_mocs()
|
||||
getstream._initialize = types.MethodType(moc_initialize, getstream)
|
||||
getstream._download_sd_blob = types.MethodType(download_sd_blob, getstream)
|
||||
getstream._download = types.MethodType(moc_download, getstream)
|
||||
getstream.pay_key_fee = types.MethodType(moc_pay_key_fee, getstream)
|
||||
name='test'
|
||||
stream_info = None
|
||||
with self.assertRaises(DownloadTimeoutError):
|
||||
yield getstream.start(stream_info,name)
|
||||
self.assertFalse(getstream.pay_key_fee_called)
|
||||
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_timeout(self):
|
||||
"""
|
||||
test that timeout (set to 2 here) exception is raised
|
||||
when download times out while downloading first blob, and key fee is paid
|
||||
"""
|
||||
getstream = self.init_getstream_with_mocs()
|
||||
getstream._initialize = types.MethodType(moc_initialize, getstream)
|
||||
getstream._download_sd_blob = types.MethodType(moc_download_sd_blob, getstream)
|
||||
getstream._download = types.MethodType(moc_download, getstream)
|
||||
getstream.pay_key_fee = types.MethodType(moc_pay_key_fee, getstream)
|
||||
name='test'
|
||||
stream_info = None
|
||||
start = getstream.start(stream_info,name)
|
||||
self.clock.advance(1)
|
||||
self.clock.advance(1)
|
||||
with self.assertRaises(DownloadTimeoutError):
|
||||
yield start
|
||||
self.assertTrue(getstream.downloader.stop_called)
|
||||
self.assertTrue(getstream.pay_key_fee_called)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_finish_one_blob(self):
|
||||
"""
|
||||
test that if we have 1 completed blob, start() returns
|
||||
and key fee is paid
|
||||
"""
|
||||
getstream = self.init_getstream_with_mocs()
|
||||
getstream._initialize = types.MethodType(moc_initialize, getstream)
|
||||
|
||||
getstream._download_sd_blob = types.MethodType(moc_download_sd_blob, getstream)
|
||||
getstream._download = types.MethodType(moc_download, getstream)
|
||||
getstream.pay_key_fee = types.MethodType(moc_pay_key_fee, getstream)
|
||||
name='test'
|
||||
stream_info = None
|
||||
start = getstream.start(stream_info,name)
|
||||
|
||||
getstream.downloader.num_completed = 1
|
||||
self.clock.advance(1)
|
||||
|
||||
downloader, f_deferred = yield start
|
||||
self.assertTrue(getstream.pay_key_fee_called)
|
||||
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_finish_stopped_downloader(self):
|
||||
"""
|
||||
test that if we have a stopped downloader, beforfe a blob is downloaded,
|
||||
start() returns
|
||||
"""
|
||||
getstream = self.init_getstream_with_mocs()
|
||||
getstream._initialize = types.MethodType(moc_initialize, getstream)
|
||||
getstream._download_sd_blob = types.MethodType(moc_download_sd_blob, getstream)
|
||||
getstream._download = types.MethodType(moc_download, getstream)
|
||||
name='test'
|
||||
stream_info = None
|
||||
start = getstream.start(stream_info,name)
|
||||
|
||||
getstream.downloader.running_status = ManagedEncryptedFileDownloader.STATUS_STOPPED
|
||||
self.clock.advance(1)
|
||||
downloader, f_deferred = yield start
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import os
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
||||
from lbrynet import conf
|
||||
|
||||
|
||||
|
@ -54,3 +53,11 @@ class SettingsTest(unittest.TestCase):
|
|||
self.assertEqual('cli_test_string', settings['test'])
|
||||
settings.set('test', 'runtime_takes_precedence', data_types=(conf.TYPE_RUNTIME,))
|
||||
self.assertEqual('runtime_takes_precedence', settings['test'])
|
||||
|
||||
def test_data_dir(self):
|
||||
# check if these directories are returned as string and not unicode
|
||||
# otherwise there will be problems when calling os.path.join on
|
||||
# unicode directory names with string file names
|
||||
self.assertEqual(str, type(conf.default_download_directory))
|
||||
self.assertEqual(str, type(conf.default_data_dir))
|
||||
self.assertEqual(str, type(conf.default_lbryum_dir))
|
||||
|
|
Loading…
Reference in a new issue