forked from LBRYCommunity/lbry-sdk
Merge branch 'master' into hackrush
This commit is contained in:
commit
53579dc613
25 changed files with 392 additions and 203 deletions
|
@ -27,7 +27,10 @@ test_script:
|
||||||
- pip install .
|
- pip install .
|
||||||
- pylint lbrynet
|
- pylint lbrynet
|
||||||
# disable tests for now so that appveyor can build the app
|
# disable tests for now so that appveyor can build the app
|
||||||
#- python -m twisted.trial tests # avoids having to set PYTHONPATH=. (see https://twistedmatrix.com/trac/ticket/9035)
|
- set PYTHONPATH=.
|
||||||
|
- trial tests
|
||||||
|
# TODO: integration tests do not work
|
||||||
|
#- python -m unittest discover tests/integration
|
||||||
#- rvm use 2.3.1 && gem install danger --version '~> 4.0' && danger
|
#- rvm use 2.3.1 && gem install danger --version '~> 4.0' && danger
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 0.14.2rc2
|
current_version = 0.14.1
|
||||||
commit = True
|
commit = True
|
||||||
tag = True
|
tag = True
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)((?P<release>[a-z]+)(?P<candidate>\d+))?
|
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)((?P<release>[a-z]+)(?P<candidate>\d+))?
|
||||||
|
|
71
CHANGELOG.md
71
CHANGELOG.md
|
@ -9,38 +9,65 @@ at anytime.
|
||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
### Added
|
### Added
|
||||||
* Missing docstring for `blob_list`
|
* Add link to instructions on how to change the default peer port
|
||||||
* Added convenient import for setting up a daemon client, `from lbrynet.daemon import get_client`
|
*
|
||||||
* Added unit tests for CryptBlob.py
|
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
* Change `max_key_fee` setting to be a dictionary with values for `currency` and `amount`
|
*
|
||||||
* Linux default downloads folder changed from `~/Downloads` to `XDG_DOWNLOAD_DIR`
|
*
|
||||||
* Linux folders moved from the home directory to `~/.local/share/lbry`
|
|
||||||
* Windows folders moved from `%APPDATA%/Roaming` to `%APPDATA%/Local/lbry`
|
|
||||||
* Renamed `lbrynet.lbryfile` to `lbrynet.lbry_file`
|
|
||||||
* Renamed `lbrynet.lbryfilemanager` to `lbrynet.file_manager`
|
|
||||||
* Renamed `lbrynet.lbrynet_daemon` to `lbrynet.daemon`
|
|
||||||
* Initialize lbrynet settings when configuring an api client if they are not set yet
|
|
||||||
* Updated lbryum imports
|
|
||||||
* Improve error message when resolving a claim fails using the "get" command
|
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
* Fixed some log messages throwing exceptions
|
* peer_port is settable using `settings_set`
|
||||||
* Fix shutdown of the blob tracker by Session
|
|
||||||
* Fixed claim_new_support docstrings
|
|
||||||
* Fix default directories to comply to XDG
|
|
||||||
* Fixed BlobManager causing functional tests to fail, removed its unneeded manage() loop
|
|
||||||
* Increased max_key_fee
|
|
||||||
* Gives message and instructions if port 3333 is used
|
|
||||||
|
|
||||||
### Deprecated
|
### Deprecated
|
||||||
*
|
*
|
||||||
*
|
*
|
||||||
|
|
||||||
### Removed
|
### Removed
|
||||||
* Removed unused settings from conf.py and `settings_set`
|
*
|
||||||
* Removed download_directory argument from API command get
|
*
|
||||||
|
|
||||||
|
## [0.14.1] - 2017-07-07
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
* Fixed timeout behaviour when calling API command get
|
||||||
|
* Fixed https://github.com/lbryio/lbry/issues/765
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## [0.14.0] - 2017-07-05
|
||||||
|
|
||||||
|
### Added
|
||||||
|
* Missing docstring for `blob_list`
|
||||||
|
* Added convenient import for setting up a daemon client, `from lbrynet.daemon import get_client`
|
||||||
|
* Added unit tests for CryptBlob.py
|
||||||
|
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
* Change `max_key_fee` setting to be a dictionary with values for `currency` and `amount`
|
||||||
|
* Renamed `lbrynet.lbryfile` to `lbrynet.lbry_file`
|
||||||
|
* Renamed `lbrynet.lbryfilemanager` to `lbrynet.file_manager`
|
||||||
|
* Renamed `lbrynet.lbrynet_daemon` to `lbrynet.daemon`
|
||||||
|
* Initialize lbrynet settings when configuring an api client if they are not set yet
|
||||||
|
* Updated lbryum imports
|
||||||
|
* Improve error message when resolving a claim fails using the "get" command
|
||||||
|
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
* Removed unused settings from conf.py and `settings_set`
|
||||||
|
* Removed download_directory argument from API command get
|
||||||
|
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
* Fixed some log messages throwing exceptions
|
||||||
|
* Fix shutdown of the blob tracker by Session
|
||||||
|
* Fixed claim_new_support docstrings
|
||||||
|
* Fixed BlobManager causing functional tests to fail, removed its unneeded manage() loop
|
||||||
|
* Increased max_key_fee
|
||||||
|
* Fixed unit tests on appveyor Windows build
|
||||||
|
* Fixed [#692](https://github.com/lbryio/lbry/issues/692)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## [0.13.1] - 2017-06-15
|
## [0.13.1] - 2017-06-15
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ It provides a daemon that can interact with the network via a json-rpc interface
|
||||||
|
|
||||||
## Installing
|
## Installing
|
||||||
|
|
||||||
**Note**: This project no longer directly bundles a graphic interface (browser). If you want to use LBRY via a browser, [use the LBRY App](https://github.com/lbryio/lbry-app).
|
**Note**: This project no longer bundles a graphic interface (browser). If you want to use LBRY via a GUI, [use the LBRY App](https://github.com/lbryio/lbry-app).
|
||||||
|
|
||||||
Our [releases page](https://github.com/lbryio/lbry/releases) contains pre-built binaries of the latest release, pre-releases, and past releases, for macOS, Debian-based Linux, and Windows.
|
Our [releases page](https://github.com/lbryio/lbry/releases) contains pre-built binaries of the latest release, pre-releases, and past releases, for macOS, Debian-based Linux, and Windows.
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ Run `lbrynet-daemon` to launch the daemon.
|
||||||
|
|
||||||
By default, `lbrynet-daemon` will provide a JSON-RPC server at `http://localhost:5279`. It is easy to interact with via cURL or sane programming languages.
|
By default, `lbrynet-daemon` will provide a JSON-RPC server at `http://localhost:5279`. It is easy to interact with via cURL or sane programming languages.
|
||||||
|
|
||||||
Our [quickstart guide](http://lbry.io/quickstart) provides clear sample usages and free credits for learning.
|
Our [quickstart guide](http://lbry.io/quickstart) provides a simple walkthrough and examples for learning.
|
||||||
|
|
||||||
The full API is documented [here](https://lbry.io/api).
|
The full API is documented [here](https://lbry.io/api).
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
__version__ = "0.14.2rc2"
|
__version__ = "0.14.2rc1"
|
||||||
version = tuple(__version__.split('.'))
|
version = tuple(__version__.split('.'))
|
||||||
|
|
||||||
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
||||||
|
|
|
@ -2,11 +2,10 @@ import base58
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import sys
|
import sys
|
||||||
import yaml
|
import yaml
|
||||||
import envparse
|
import envparse
|
||||||
from appdirs import user_data_dir, user_config_dir
|
from appdirs import user_data_dir
|
||||||
from lbrynet.core import utils
|
from lbrynet.core import utils
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
@ -44,48 +43,44 @@ settings_encoders = {
|
||||||
'.yml': yaml.safe_dump
|
'.yml': yaml.safe_dump
|
||||||
}
|
}
|
||||||
|
|
||||||
if 'darwin' in sys.platform:
|
def _win_path_to_bytes(path):
|
||||||
|
"""
|
||||||
|
Encode Windows paths to string. appdirs.user_data_dir()
|
||||||
|
on windows will return unicode path, unlike other platforms
|
||||||
|
which returns string. This will cause problems
|
||||||
|
because we use strings for filenames and combining them with
|
||||||
|
os.path.join() will result in errors.
|
||||||
|
"""
|
||||||
|
for encoding in ('ASCII', 'MBCS'):
|
||||||
|
try:
|
||||||
|
return path.encode(encoding)
|
||||||
|
except (UnicodeEncodeError, LookupError):
|
||||||
|
pass
|
||||||
|
return path
|
||||||
|
|
||||||
|
if sys.platform.startswith('darwin'):
|
||||||
platform = DARWIN
|
platform = DARWIN
|
||||||
default_download_dir = os.path.expanduser('~/Downloads')
|
default_download_directory = os.path.join(os.path.expanduser('~'), 'Downloads')
|
||||||
default_data_dir = user_data_dir('LBRY')
|
default_data_dir = user_data_dir('LBRY')
|
||||||
default_lbryum_dir = os.path.expanduser('~/.lbryum')
|
default_lbryum_dir = os.path.join(os.path.expanduser('~'), '.lbryum')
|
||||||
elif 'nt' in sys.platform:
|
elif sys.platform.startswith('win'):
|
||||||
platform = WINDOWS
|
platform = WINDOWS
|
||||||
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
|
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
|
||||||
|
|
||||||
default_download_dir = get_path(FOLDERID.Downloads, UserHandle.current)
|
default_download_directory = get_path(FOLDERID.Downloads, UserHandle.current)
|
||||||
|
default_data_dir = os.path.join(
|
||||||
|
get_path(FOLDERID.RoamingAppData, UserHandle.current), 'lbrynet')
|
||||||
|
default_lbryum_dir = os.path.join(
|
||||||
|
get_path(FOLDERID.RoamingAppData, UserHandle.current), 'lbryum')
|
||||||
|
|
||||||
# This checks whether the folders are still in their old locations
|
default_download_directory = _win_path_to_bytes(default_download_directory)
|
||||||
if os.path.isdir(user_data_dir('lbryum', roaming=True)) and \
|
default_data_dir = _win_path_to_bytes(default_data_dir)
|
||||||
os.path.isdir(user_data_dir('lbrynet', roaming=True)):
|
default_lbryum_dir = _win_path_to_bytes(default_lbryum_dir)
|
||||||
default_data_dir = user_data_dir('lbrynet', roaming=True)
|
|
||||||
default_lbryum_dir = user_data_dir('lbryum', roaming=True)
|
|
||||||
else:
|
|
||||||
default_data_dir = user_data_dir('lbrynet', 'lbry')
|
|
||||||
default_lbryum_dir = user_data_dir('lbryum', 'lbry')
|
|
||||||
else:
|
else:
|
||||||
platform = LINUX
|
platform = LINUX
|
||||||
|
default_download_directory = os.path.join(os.path.expanduser('~'), 'Downloads')
|
||||||
# This checks whether the folders are still in their old locations
|
default_data_dir = os.path.join(os.path.expanduser('~'), '.lbrynet')
|
||||||
if os.path.isdir(os.path.expanduser('~/.lbrynet')) and \
|
default_lbryum_dir = os.path.join(os.path.expanduser('~'), '.lbryum')
|
||||||
os.path.isdir(os.path.expanduser('~/.lbryum')):
|
|
||||||
default_data_dir = os.path.expanduser('~/.lbrynet')
|
|
||||||
default_lbryum_dir = os.path.expanduser('~/.lbryum')
|
|
||||||
default_download_dir = os.path.expanduser('~/Downloads')
|
|
||||||
else:
|
|
||||||
default_data_dir = user_data_dir('lbry/lbrynet')
|
|
||||||
default_lbryum_dir = user_data_dir('lbry/lbryum')
|
|
||||||
try:
|
|
||||||
with open(os.path.join(user_config_dir(), 'user-dirs.dirs'), 'r') as xdg:
|
|
||||||
down_dir = re.search(r'XDG_DOWNLOAD_DIR=(.+)', xdg.read()).group(1)
|
|
||||||
down_dir = re.sub('\$HOME', os.getenv('HOME'), down_dir)
|
|
||||||
default_download_dir = re.sub('\"', '', down_dir)
|
|
||||||
except EnvironmentError:
|
|
||||||
default_download_dir = os.getenv('XDG_DOWNLOAD_DIR')
|
|
||||||
|
|
||||||
if not default_download_dir:
|
|
||||||
default_download_dir = os.path.expanduser('~/Downloads')
|
|
||||||
|
|
||||||
|
|
||||||
ICON_PATH = 'icons' if platform is WINDOWS else 'app.icns'
|
ICON_PATH = 'icons' if platform is WINDOWS else 'app.icns'
|
||||||
|
|
||||||
|
@ -183,7 +178,7 @@ ADJUSTABLE_SETTINGS = {
|
||||||
'data_rate': (float, .0001), # points/megabyte
|
'data_rate': (float, .0001), # points/megabyte
|
||||||
'delete_blobs_on_remove': (bool, True),
|
'delete_blobs_on_remove': (bool, True),
|
||||||
'dht_node_port': (int, 4444),
|
'dht_node_port': (int, 4444),
|
||||||
'download_directory': (str, default_download_dir),
|
'download_directory': (str, default_download_directory),
|
||||||
'download_timeout': (int, 180),
|
'download_timeout': (int, 180),
|
||||||
'is_generous_host': (bool, True),
|
'is_generous_host': (bool, True),
|
||||||
'known_dht_nodes': (list, DEFAULT_DHT_NODES, server_port),
|
'known_dht_nodes': (list, DEFAULT_DHT_NODES, server_port),
|
||||||
|
|
|
@ -9,11 +9,23 @@ class DuplicateStreamHashError(Exception):
|
||||||
class DownloadCanceledError(Exception):
|
class DownloadCanceledError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class DownloadTimeoutError(Exception):
|
||||||
|
def __init__(self, download):
|
||||||
|
Exception.__init__(self, 'Failed to download {} within timeout'.format(download))
|
||||||
|
self.download = download
|
||||||
|
|
||||||
class RequestCanceledError(Exception):
|
class RequestCanceledError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NegativeFundsError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NullFundsError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InsufficientFundsError(Exception):
|
class InsufficientFundsError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -236,7 +236,7 @@ class StreamDescriptorIdentifier(object):
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
def download_sd_blob(session, blob_hash, payment_rate_manager):
|
def download_sd_blob(session, blob_hash, payment_rate_manager, timeout=None):
|
||||||
"""
|
"""
|
||||||
Downloads a single blob from the network
|
Downloads a single blob from the network
|
||||||
|
|
||||||
|
@ -253,5 +253,6 @@ def download_sd_blob(session, blob_hash, payment_rate_manager):
|
||||||
session.peer_finder,
|
session.peer_finder,
|
||||||
session.rate_limiter,
|
session.rate_limiter,
|
||||||
payment_rate_manager,
|
payment_rate_manager,
|
||||||
session.wallet)
|
session.wallet,
|
||||||
|
timeout)
|
||||||
return downloader.download()
|
return downloader.download()
|
||||||
|
|
|
@ -28,7 +28,7 @@ from lbrynet.core.sqlite_helpers import rerun_if_locked
|
||||||
from lbrynet.interfaces import IRequestCreator, IQueryHandlerFactory, IQueryHandler, IWallet
|
from lbrynet.interfaces import IRequestCreator, IQueryHandlerFactory, IQueryHandler, IWallet
|
||||||
from lbrynet.core.client.ClientRequest import ClientRequest
|
from lbrynet.core.client.ClientRequest import ClientRequest
|
||||||
from lbrynet.core.Error import RequestCanceledError, InsufficientFundsError, UnknownNameError
|
from lbrynet.core.Error import RequestCanceledError, InsufficientFundsError, UnknownNameError
|
||||||
from lbrynet.core.Error import UnknownClaimID, UnknownURI
|
from lbrynet.core.Error import UnknownClaimID, UnknownURI, NegativeFundsError
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -541,6 +541,8 @@ class Wallet(object):
|
||||||
once the service has been rendered
|
once the service has been rendered
|
||||||
"""
|
"""
|
||||||
rounded_amount = Decimal(str(round(amount, 8)))
|
rounded_amount = Decimal(str(round(amount, 8)))
|
||||||
|
if rounded_amount < 0:
|
||||||
|
raise NegativeFundsError(rounded_amount)
|
||||||
if self.get_balance() >= rounded_amount:
|
if self.get_balance() >= rounded_amount:
|
||||||
self.total_reserved_points += rounded_amount
|
self.total_reserved_points += rounded_amount
|
||||||
return ReservedPoints(identifier, rounded_amount)
|
return ReservedPoints(identifier, rounded_amount)
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import logging
|
import logging
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from twisted.python import failure
|
|
||||||
from zope.interface import implements
|
from zope.interface import implements
|
||||||
from lbrynet import interfaces
|
from lbrynet import interfaces
|
||||||
|
|
||||||
|
@ -29,37 +28,17 @@ class DownloadManager(object):
|
||||||
d.addCallback(lambda _: self.resume_downloading())
|
d.addCallback(lambda _: self.resume_downloading())
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
def resume_downloading(self):
|
def resume_downloading(self):
|
||||||
|
yield self.connection_manager.start()
|
||||||
|
yield self.progress_manager.start()
|
||||||
|
defer.returnValue(True)
|
||||||
|
|
||||||
def check_start(result, manager):
|
@defer.inlineCallbacks
|
||||||
if isinstance(result, failure.Failure):
|
|
||||||
log.error("Failed to start the %s: %s", manager, result.getErrorMessage())
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
d1 = self.progress_manager.start()
|
|
||||||
d1.addBoth(check_start, "progress manager")
|
|
||||||
d2 = self.connection_manager.start()
|
|
||||||
d2.addBoth(check_start, "connection manager")
|
|
||||||
dl = defer.DeferredList([d1, d2])
|
|
||||||
dl.addCallback(lambda xs: False not in xs)
|
|
||||||
return dl
|
|
||||||
|
|
||||||
def stop_downloading(self):
|
def stop_downloading(self):
|
||||||
|
yield self.progress_manager.stop()
|
||||||
def check_stop(result, manager):
|
yield self.connection_manager.stop()
|
||||||
if isinstance(result, failure.Failure):
|
defer.returnValue(True)
|
||||||
log.error("Failed to stop the %s: %s", manager, result.getErrorMessage())
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
d1 = self.progress_manager.stop()
|
|
||||||
d1.addBoth(check_stop, "progress manager")
|
|
||||||
d2 = self.connection_manager.stop()
|
|
||||||
d2.addBoth(check_stop, "connection manager")
|
|
||||||
dl = defer.DeferredList([d1, d2], consumeErrors=True)
|
|
||||||
dl.addCallback(lambda results: all([success for success, val in results]))
|
|
||||||
return dl
|
|
||||||
|
|
||||||
def add_blobs_to_download(self, blob_infos):
|
def add_blobs_to_download(self, blob_infos):
|
||||||
|
|
||||||
|
|
|
@ -5,11 +5,11 @@ from lbrynet.core.BlobInfo import BlobInfo
|
||||||
from lbrynet.core.client.BlobRequester import BlobRequester
|
from lbrynet.core.client.BlobRequester import BlobRequester
|
||||||
from lbrynet.core.client.ConnectionManager import ConnectionManager
|
from lbrynet.core.client.ConnectionManager import ConnectionManager
|
||||||
from lbrynet.core.client.DownloadManager import DownloadManager
|
from lbrynet.core.client.DownloadManager import DownloadManager
|
||||||
from lbrynet.core.Error import InvalidBlobHashError
|
from lbrynet.core.Error import InvalidBlobHashError, DownloadTimeoutError
|
||||||
from lbrynet.core.utils import is_valid_blobhash
|
from lbrynet.core.utils import is_valid_blobhash, safe_start_looping_call, safe_stop_looping_call
|
||||||
from twisted.python.failure import Failure
|
from twisted.python.failure import Failure
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
from twisted.internet.task import LoopingCall
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -32,36 +32,35 @@ class SingleBlobMetadataHandler(object):
|
||||||
|
|
||||||
|
|
||||||
class SingleProgressManager(object):
|
class SingleProgressManager(object):
|
||||||
def __init__(self, finished_callback, download_manager):
|
def __init__(self, download_manager, finished_callback, timeout_callback, timeout):
|
||||||
self.finished_callback = finished_callback
|
self.finished_callback = finished_callback
|
||||||
self.finished = False
|
self.timeout_callback = timeout_callback
|
||||||
self.download_manager = download_manager
|
self.download_manager = download_manager
|
||||||
self._next_check_if_finished = None
|
|
||||||
|
self.timeout = timeout
|
||||||
|
self.timeout_counter = 0
|
||||||
|
self.checker = LoopingCall(self._check_if_finished)
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
|
safe_start_looping_call(self.checker, 1)
|
||||||
from twisted.internet import reactor
|
|
||||||
|
|
||||||
assert self._next_check_if_finished is None
|
|
||||||
self._next_check_if_finished = reactor.callLater(0, self._check_if_finished)
|
|
||||||
return defer.succeed(True)
|
return defer.succeed(True)
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
if self._next_check_if_finished is not None:
|
safe_stop_looping_call(self.checker)
|
||||||
self._next_check_if_finished.cancel()
|
|
||||||
self._next_check_if_finished = None
|
|
||||||
return defer.succeed(True)
|
return defer.succeed(True)
|
||||||
|
|
||||||
def _check_if_finished(self):
|
def _check_if_finished(self):
|
||||||
|
if self.stream_position() == 1:
|
||||||
from twisted.internet import reactor
|
blob_downloaded = self.download_manager.blobs[0]
|
||||||
|
log.debug("The blob %s has been downloaded. Calling the finished callback",
|
||||||
self._next_check_if_finished = None
|
str(blob_downloaded))
|
||||||
if self.finished is False:
|
safe_stop_looping_call(self.checker)
|
||||||
if self.stream_position() == 1:
|
self.finished_callback(blob_downloaded)
|
||||||
self.blob_downloaded(self.download_manager.blobs[0], 0)
|
elif self.timeout is not None:
|
||||||
else:
|
self.timeout_counter += 1
|
||||||
self._next_check_if_finished = reactor.callLater(1, self._check_if_finished)
|
if self.timeout_counter >= self.timeout:
|
||||||
|
safe_stop_looping_call(self.checker)
|
||||||
|
self.timeout_callback()
|
||||||
|
|
||||||
def stream_position(self):
|
def stream_position(self):
|
||||||
blobs = self.download_manager.blobs
|
blobs = self.download_manager.blobs
|
||||||
|
@ -74,15 +73,6 @@ class SingleProgressManager(object):
|
||||||
assert len(blobs) == 1
|
assert len(blobs) == 1
|
||||||
return [b for b in blobs.itervalues() if not b.is_validated()]
|
return [b for b in blobs.itervalues() if not b.is_validated()]
|
||||||
|
|
||||||
def blob_downloaded(self, blob, blob_num):
|
|
||||||
|
|
||||||
from twisted.internet import reactor
|
|
||||||
|
|
||||||
log.debug("The blob %s has been downloaded. Calling the finished callback", str(blob))
|
|
||||||
if self.finished is False:
|
|
||||||
self.finished = True
|
|
||||||
reactor.callLater(0, self.finished_callback, blob)
|
|
||||||
|
|
||||||
|
|
||||||
class DummyBlobHandler(object):
|
class DummyBlobHandler(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -94,13 +84,15 @@ class DummyBlobHandler(object):
|
||||||
|
|
||||||
class StandaloneBlobDownloader(object):
|
class StandaloneBlobDownloader(object):
|
||||||
def __init__(self, blob_hash, blob_manager, peer_finder,
|
def __init__(self, blob_hash, blob_manager, peer_finder,
|
||||||
rate_limiter, payment_rate_manager, wallet):
|
rate_limiter, payment_rate_manager, wallet,
|
||||||
|
timeout=None):
|
||||||
self.blob_hash = blob_hash
|
self.blob_hash = blob_hash
|
||||||
self.blob_manager = blob_manager
|
self.blob_manager = blob_manager
|
||||||
self.peer_finder = peer_finder
|
self.peer_finder = peer_finder
|
||||||
self.rate_limiter = rate_limiter
|
self.rate_limiter = rate_limiter
|
||||||
self.payment_rate_manager = payment_rate_manager
|
self.payment_rate_manager = payment_rate_manager
|
||||||
self.wallet = wallet
|
self.wallet = wallet
|
||||||
|
self.timeout = timeout
|
||||||
self.download_manager = None
|
self.download_manager = None
|
||||||
self.finished_deferred = None
|
self.finished_deferred = None
|
||||||
|
|
||||||
|
@ -118,8 +110,10 @@ class StandaloneBlobDownloader(object):
|
||||||
self.download_manager)
|
self.download_manager)
|
||||||
self.download_manager.blob_info_finder = SingleBlobMetadataHandler(self.blob_hash,
|
self.download_manager.blob_info_finder = SingleBlobMetadataHandler(self.blob_hash,
|
||||||
self.download_manager)
|
self.download_manager)
|
||||||
self.download_manager.progress_manager = SingleProgressManager(self._blob_downloaded,
|
self.download_manager.progress_manager = SingleProgressManager(self.download_manager,
|
||||||
self.download_manager)
|
self._blob_downloaded,
|
||||||
|
self._download_timedout,
|
||||||
|
self.timeout)
|
||||||
self.download_manager.blob_handler = DummyBlobHandler()
|
self.download_manager.blob_handler = DummyBlobHandler()
|
||||||
self.download_manager.wallet_info_exchanger = self.wallet.get_info_exchanger()
|
self.download_manager.wallet_info_exchanger = self.wallet.get_info_exchanger()
|
||||||
self.download_manager.connection_manager = ConnectionManager(
|
self.download_manager.connection_manager = ConnectionManager(
|
||||||
|
@ -139,6 +133,11 @@ class StandaloneBlobDownloader(object):
|
||||||
if not self.finished_deferred.called:
|
if not self.finished_deferred.called:
|
||||||
self.finished_deferred.callback(blob)
|
self.finished_deferred.callback(blob)
|
||||||
|
|
||||||
|
def _download_timedout(self):
|
||||||
|
self.stop()
|
||||||
|
if not self.finished_deferred.called:
|
||||||
|
self.finished_deferred.errback(DownloadTimeoutError(self.blob_hash))
|
||||||
|
|
||||||
def insufficient_funds(self, err):
|
def insufficient_funds(self, err):
|
||||||
self.stop()
|
self.stop()
|
||||||
if not self.finished_deferred.called:
|
if not self.finished_deferred.called:
|
||||||
|
|
|
@ -49,6 +49,13 @@ def call_later(delay, func, *args, **kwargs):
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
return reactor.callLater(delay, func, *args, **kwargs)
|
return reactor.callLater(delay, func, *args, **kwargs)
|
||||||
|
|
||||||
|
def safe_start_looping_call(looping_call, interval_sec):
|
||||||
|
if not looping_call.running:
|
||||||
|
looping_call.start(interval_sec)
|
||||||
|
|
||||||
|
def safe_stop_looping_call(looping_call):
|
||||||
|
if looping_call.running:
|
||||||
|
looping_call.stop()
|
||||||
|
|
||||||
def generate_id(num=None):
|
def generate_id(num=None):
|
||||||
h = get_lbry_hash_obj()
|
h = get_lbry_hash_obj()
|
||||||
|
|
|
@ -45,6 +45,7 @@ from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory
|
||||||
from lbrynet.core.server.ServerProtocol import ServerProtocolFactory
|
from lbrynet.core.server.ServerProtocol import ServerProtocolFactory
|
||||||
from lbrynet.core.Error import InsufficientFundsError, UnknownNameError, NoSuchSDHash
|
from lbrynet.core.Error import InsufficientFundsError, UnknownNameError, NoSuchSDHash
|
||||||
from lbrynet.core.Error import NoSuchStreamHash, UnknownClaimID, UnknownURI
|
from lbrynet.core.Error import NoSuchStreamHash, UnknownClaimID, UnknownURI
|
||||||
|
from lbrynet.core.Error import NullFundsError, NegativeFundsError
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -602,26 +603,12 @@ class Daemon(AuthJSONRPCServer):
|
||||||
:param timeout (int): blob timeout
|
:param timeout (int): blob timeout
|
||||||
:return: BlobFile
|
:return: BlobFile
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def cb(blob):
|
|
||||||
if not finished_d.called:
|
|
||||||
finished_d.callback(blob)
|
|
||||||
|
|
||||||
def eb():
|
|
||||||
if not finished_d.called:
|
|
||||||
finished_d.errback(Exception("Blob (%s) download timed out" %
|
|
||||||
blob_hash[:SHORT_ID_LEN]))
|
|
||||||
|
|
||||||
if not blob_hash:
|
if not blob_hash:
|
||||||
raise Exception("Nothing to download")
|
raise Exception("Nothing to download")
|
||||||
|
|
||||||
rate_manager = rate_manager or self.session.payment_rate_manager
|
rate_manager = rate_manager or self.session.payment_rate_manager
|
||||||
timeout = timeout or 30
|
timeout = timeout or 30
|
||||||
finished_d = defer.Deferred(None)
|
return download_sd_blob(self.session, blob_hash, rate_manager, timeout)
|
||||||
reactor.callLater(timeout, eb)
|
|
||||||
d = download_sd_blob(self.session, blob_hash, rate_manager)
|
|
||||||
d.addCallback(cb)
|
|
||||||
return finished_d
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _download_name(self, name, claim_dict, claim_id, timeout=None, file_name=None):
|
def _download_name(self, name, claim_dict, claim_id, timeout=None, file_name=None):
|
||||||
|
@ -2250,6 +2237,11 @@ class Daemon(AuthJSONRPCServer):
|
||||||
(bool) true if payment successfully scheduled
|
(bool) true if payment successfully scheduled
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if amount < 0:
|
||||||
|
raise NegativeFundsError()
|
||||||
|
elif not amount:
|
||||||
|
raise NullFundsError()
|
||||||
|
|
||||||
reserved_points = self.session.wallet.reserve_points(address, amount)
|
reserved_points = self.session.wallet.reserve_points(address, amount)
|
||||||
if reserved_points is None:
|
if reserved_points is None:
|
||||||
raise InsufficientFundsError()
|
raise InsufficientFundsError()
|
||||||
|
|
|
@ -5,7 +5,8 @@ from twisted.internet.task import LoopingCall
|
||||||
|
|
||||||
from lbryschema.fee import Fee
|
from lbryschema.fee import Fee
|
||||||
|
|
||||||
from lbrynet.core.Error import InsufficientFundsError, KeyFeeAboveMaxAllowed
|
from lbrynet.core.Error import InsufficientFundsError, KeyFeeAboveMaxAllowed, DownloadTimeoutError
|
||||||
|
from lbrynet.core.utils import safe_start_looping_call, safe_stop_looping_call
|
||||||
from lbrynet.core.StreamDescriptor import download_sd_blob
|
from lbrynet.core.StreamDescriptor import download_sd_blob
|
||||||
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
|
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
|
||||||
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader
|
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader
|
||||||
|
@ -28,16 +29,6 @@ STREAM_STAGES = [
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def safe_start(looping_call):
|
|
||||||
if not looping_call.running:
|
|
||||||
looping_call.start(1)
|
|
||||||
|
|
||||||
|
|
||||||
def safe_stop(looping_call):
|
|
||||||
if looping_call.running:
|
|
||||||
looping_call.stop()
|
|
||||||
|
|
||||||
|
|
||||||
class GetStream(object):
|
class GetStream(object):
|
||||||
def __init__(self, sd_identifier, session, exchange_rate_manager,
|
def __init__(self, sd_identifier, session, exchange_rate_manager,
|
||||||
max_key_fee, data_rate=None, timeout=None,
|
max_key_fee, data_rate=None, timeout=None,
|
||||||
|
@ -64,6 +55,7 @@ class GetStream(object):
|
||||||
# fired after the metadata and the first data blob have been downloaded
|
# fired after the metadata and the first data blob have been downloaded
|
||||||
self.data_downloading_deferred = defer.Deferred(None)
|
self.data_downloading_deferred = defer.Deferred(None)
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def download_path(self):
|
def download_path(self):
|
||||||
return os.path.join(self.download_directory, self.downloader.file_name)
|
return os.path.join(self.download_directory, self.downloader.file_name)
|
||||||
|
@ -71,11 +63,10 @@ class GetStream(object):
|
||||||
def _check_status(self, status):
|
def _check_status(self, status):
|
||||||
stop_condition = (status.num_completed > 0 or
|
stop_condition = (status.num_completed > 0 or
|
||||||
status.running_status == ManagedEncryptedFileDownloader.STATUS_STOPPED)
|
status.running_status == ManagedEncryptedFileDownloader.STATUS_STOPPED)
|
||||||
|
|
||||||
if stop_condition and not self.data_downloading_deferred.called:
|
if stop_condition and not self.data_downloading_deferred.called:
|
||||||
self.data_downloading_deferred.callback(True)
|
self.data_downloading_deferred.callback(True)
|
||||||
if self.data_downloading_deferred.called:
|
if self.data_downloading_deferred.called:
|
||||||
safe_stop(self.checker)
|
safe_stop_looping_call(self.checker)
|
||||||
else:
|
else:
|
||||||
log.info("Downloading stream data (%i seconds)", self.timeout_counter)
|
log.info("Downloading stream data (%i seconds)", self.timeout_counter)
|
||||||
|
|
||||||
|
@ -83,17 +74,14 @@ class GetStream(object):
|
||||||
"""
|
"""
|
||||||
Check if we've got the first data blob in the stream yet
|
Check if we've got the first data blob in the stream yet
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.timeout_counter += 1
|
self.timeout_counter += 1
|
||||||
if self.timeout_counter >= self.timeout:
|
if self.timeout_counter >= self.timeout:
|
||||||
if not self.data_downloading_deferred.called:
|
if not self.data_downloading_deferred.called:
|
||||||
self.data_downloading_deferred.errback(Exception("Timeout"))
|
self.data_downloading_deferred.errback(DownloadTimeoutError(self.file_name))
|
||||||
safe_stop(self.checker)
|
safe_stop_looping_call(self.checker)
|
||||||
elif self.downloader:
|
else:
|
||||||
d = self.downloader.status()
|
d = self.downloader.status()
|
||||||
d.addCallback(self._check_status)
|
d.addCallback(self._check_status)
|
||||||
else:
|
|
||||||
log.info("Downloading stream descriptor blob (%i seconds)", self.timeout_counter)
|
|
||||||
|
|
||||||
def convert_max_fee(self):
|
def convert_max_fee(self):
|
||||||
currency, amount = self.max_key_fee['currency'], self.max_key_fee['amount']
|
currency, amount = self.max_key_fee['currency'], self.max_key_fee['amount']
|
||||||
|
@ -157,15 +145,14 @@ class GetStream(object):
|
||||||
self.set_status(DOWNLOAD_STOPPED_CODE, name)
|
self.set_status(DOWNLOAD_STOPPED_CODE, name)
|
||||||
log.info("Finished downloading lbry://%s (%s) --> %s", name, self.sd_hash[:6],
|
log.info("Finished downloading lbry://%s (%s) --> %s", name, self.sd_hash[:6],
|
||||||
self.download_path)
|
self.download_path)
|
||||||
safe_stop(self.checker)
|
safe_stop_looping_call(self.checker)
|
||||||
status = yield self.downloader.status()
|
status = yield self.downloader.status()
|
||||||
self._check_status(status)
|
self._check_status(status)
|
||||||
defer.returnValue(self.download_path)
|
defer.returnValue(self.download_path)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def initialize(self, stream_info, name):
|
def _initialize(self, stream_info):
|
||||||
# Set sd_hash and return key_fee from stream_info
|
# Set sd_hash and return key_fee from stream_info
|
||||||
self.set_status(INITIALIZING_CODE, name)
|
|
||||||
self.sd_hash = stream_info.source_hash
|
self.sd_hash = stream_info.source_hash
|
||||||
key_fee = None
|
key_fee = None
|
||||||
if stream_info.has_fee:
|
if stream_info.has_fee:
|
||||||
|
@ -180,15 +167,15 @@ class GetStream(object):
|
||||||
defer.returnValue(downloader)
|
defer.returnValue(downloader)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def download(self, name, key_fee):
|
def _download_sd_blob(self):
|
||||||
# download sd blob, and start downloader
|
sd_blob = yield download_sd_blob(self.session, self.sd_hash,
|
||||||
self.set_status(DOWNLOAD_METADATA_CODE, name)
|
self.payment_rate_manager, self.timeout)
|
||||||
sd_blob = yield download_sd_blob(self.session, self.sd_hash, self.payment_rate_manager)
|
defer.returnValue(sd_blob)
|
||||||
self.downloader = yield self._create_downloader(sd_blob)
|
|
||||||
|
|
||||||
self.set_status(DOWNLOAD_RUNNING_CODE, name)
|
@defer.inlineCallbacks
|
||||||
if key_fee:
|
def _download(self, sd_blob, name, key_fee):
|
||||||
yield self.pay_key_fee(key_fee, name)
|
self.downloader = yield self._create_downloader(sd_blob)
|
||||||
|
yield self.pay_key_fee(key_fee, name)
|
||||||
|
|
||||||
log.info("Downloading lbry://%s (%s) --> %s", name, self.sd_hash[:6], self.download_path)
|
log.info("Downloading lbry://%s (%s) --> %s", name, self.sd_hash[:6], self.download_path)
|
||||||
self.finished_deferred = self.downloader.start()
|
self.finished_deferred = self.downloader.start()
|
||||||
|
@ -205,20 +192,22 @@ class GetStream(object):
|
||||||
downloader - instance of ManagedEncryptedFileDownloader
|
downloader - instance of ManagedEncryptedFileDownloader
|
||||||
finished_deferred - deferred callbacked when download is finished
|
finished_deferred - deferred callbacked when download is finished
|
||||||
"""
|
"""
|
||||||
key_fee = yield self.initialize(stream_info, name)
|
self.set_status(INITIALIZING_CODE, name)
|
||||||
safe_start(self.checker)
|
key_fee = yield self._initialize(stream_info)
|
||||||
|
|
||||||
try:
|
self.set_status(DOWNLOAD_METADATA_CODE, name)
|
||||||
yield self.download(name, key_fee)
|
sd_blob = yield self._download_sd_blob()
|
||||||
except Exception as err:
|
|
||||||
safe_stop(self.checker)
|
yield self._download(sd_blob, name, key_fee)
|
||||||
raise
|
self.set_status(DOWNLOAD_RUNNING_CODE, name)
|
||||||
|
safe_start_looping_call(self.checker, 1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield self.data_downloading_deferred
|
yield self.data_downloading_deferred
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
self.downloader.stop()
|
self.downloader.stop()
|
||||||
safe_stop(self.checker)
|
safe_stop_looping_call(self.checker)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
defer.returnValue((self.downloader, self.finished_deferred))
|
defer.returnValue((self.downloader, self.finished_deferred))
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,24 @@
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
import mimetypes
|
import mimetypes
|
||||||
|
|
||||||
|
from appdirs import user_data_dir
|
||||||
from zope.interface import implements
|
from zope.interface import implements
|
||||||
from twisted.internet import defer, error, interfaces, abstract, task, reactor
|
from twisted.internet import defer, error, interfaces, abstract, task, reactor
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: omg, this code is essentially duplicated in Daemon
|
||||||
|
if sys.platform != "darwin":
|
||||||
|
data_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
|
||||||
|
else:
|
||||||
|
data_dir = user_data_dir("LBRY")
|
||||||
|
if not os.path.isdir(data_dir):
|
||||||
|
os.mkdir(data_dir)
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
STATUS_FINISHED = 'finished'
|
STATUS_FINISHED = 'finished'
|
||||||
|
|
||||||
|
|
||||||
class EncryptedFileStreamer(object):
|
class EncryptedFileStreamer(object):
|
||||||
"""
|
"""
|
||||||
Writes LBRY stream to request; will pause to wait for new data if the file
|
Writes LBRY stream to request; will pause to wait for new data if the file
|
||||||
|
@ -21,6 +31,7 @@ class EncryptedFileStreamer(object):
|
||||||
|
|
||||||
bufferSize = abstract.FileDescriptor.bufferSize
|
bufferSize = abstract.FileDescriptor.bufferSize
|
||||||
|
|
||||||
|
|
||||||
# How long to wait between sending blocks (needed because some
|
# How long to wait between sending blocks (needed because some
|
||||||
# video players freeze up if you try to send data too fast)
|
# video players freeze up if you try to send data too fast)
|
||||||
stream_interval = 0.005
|
stream_interval = 0.005
|
||||||
|
@ -28,6 +39,7 @@ class EncryptedFileStreamer(object):
|
||||||
# How long to wait before checking if new data has been appended to the file
|
# How long to wait before checking if new data has been appended to the file
|
||||||
new_data_check_interval = 0.25
|
new_data_check_interval = 0.25
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, request, path, stream, file_manager):
|
def __init__(self, request, path, stream, file_manager):
|
||||||
def _set_content_length_header(length):
|
def _set_content_length_header(length):
|
||||||
self._request.setHeader('content-length', length)
|
self._request.setHeader('content-length', length)
|
||||||
|
|
|
@ -2,15 +2,25 @@ import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import json
|
import json
|
||||||
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
|
|
||||||
|
from appdirs import user_data_dir
|
||||||
from twisted.web import server, static, resource
|
from twisted.web import server, static, resource
|
||||||
from twisted.internet import defer, error
|
from twisted.internet import defer, error
|
||||||
|
|
||||||
from lbrynet import conf
|
from lbrynet import conf
|
||||||
from lbrynet.daemon.FileStreamer import EncryptedFileStreamer
|
from lbrynet.daemon.FileStreamer import EncryptedFileStreamer
|
||||||
|
|
||||||
|
# TODO: omg, this code is essentially duplicated in Daemon
|
||||||
|
|
||||||
|
if sys.platform != "darwin":
|
||||||
|
data_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
|
||||||
|
else:
|
||||||
|
data_dir = user_data_dir("LBRY")
|
||||||
|
if not os.path.isdir(data_dir):
|
||||||
|
os.mkdir(data_dir)
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,21 @@
|
||||||
import json
|
import json
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
|
import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from appdirs import user_data_dir
|
||||||
from twisted.internet.task import LoopingCall
|
from twisted.internet.task import LoopingCall
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
from lbrynet import conf
|
|
||||||
|
|
||||||
|
|
||||||
conf.initialize_settings()
|
if sys.platform != "darwin":
|
||||||
log_dir = conf.settings['data_dir']
|
log_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
|
||||||
|
else:
|
||||||
|
log_dir = user_data_dir("LBRY")
|
||||||
|
|
||||||
|
if not os.path.isdir(log_dir):
|
||||||
|
os.mkdir(log_dir)
|
||||||
|
|
||||||
LOG_FILENAME = os.path.join(log_dir, 'lbrynet-daemon.log')
|
LOG_FILENAME = os.path.join(log_dir, 'lbrynet-daemon.log')
|
||||||
|
|
||||||
if os.path.isfile(LOG_FILENAME):
|
if os.path.isfile(LOG_FILENAME):
|
||||||
|
|
|
@ -24,7 +24,7 @@ class DBEncryptedFileMetadataManager(object):
|
||||||
return self._open_db()
|
return self._open_db()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
self.db_conn = None
|
self.db_conn.close()
|
||||||
return defer.succeed(True)
|
return defer.succeed(True)
|
||||||
|
|
||||||
def get_all_streams(self):
|
def get_all_streams(self):
|
||||||
|
|
|
@ -13,8 +13,8 @@ gmpy==1.17
|
||||||
jsonrpc==1.2
|
jsonrpc==1.2
|
||||||
jsonrpclib==0.1.7
|
jsonrpclib==0.1.7
|
||||||
jsonschema==2.5.1
|
jsonschema==2.5.1
|
||||||
git+https://github.com/lbryio/lbryschema.git@v0.0.7#egg=lbryschema
|
git+https://github.com/lbryio/lbryschema.git@v0.0.8#egg=lbryschema
|
||||||
git+https://github.com/lbryio/lbryum.git@v3.1.1#egg=lbryum
|
git+https://github.com/lbryio/lbryum.git@v3.1.3#egg=lbryum
|
||||||
miniupnpc==1.9
|
miniupnpc==1.9
|
||||||
pbkdf2==1.3
|
pbkdf2==1.3
|
||||||
pycrypto==2.6.1
|
pycrypto==2.6.1
|
||||||
|
|
4
setup.py
4
setup.py
|
@ -21,8 +21,8 @@ requires = [
|
||||||
'envparse',
|
'envparse',
|
||||||
'jsonrpc',
|
'jsonrpc',
|
||||||
'jsonschema',
|
'jsonschema',
|
||||||
'lbryum==3.1.1',
|
'lbryum==3.1.4rc1',
|
||||||
'lbryschema==0.0.7',
|
'lbryschema==0.0.8',
|
||||||
'miniupnpc',
|
'miniupnpc',
|
||||||
'pycrypto',
|
'pycrypto',
|
||||||
'pyyaml',
|
'pyyaml',
|
||||||
|
|
|
@ -65,6 +65,19 @@ def use_epoll_on_linux():
|
||||||
sys.modules['twisted.internet.reactor'] = twisted.internet.reactor
|
sys.modules['twisted.internet.reactor'] = twisted.internet.reactor
|
||||||
|
|
||||||
|
|
||||||
|
def init_conf_windows(settings={}):
|
||||||
|
"""
|
||||||
|
There is no fork on windows, so imports
|
||||||
|
are freshly initialized in new processes.
|
||||||
|
So conf needs to be intialized for new processes
|
||||||
|
"""
|
||||||
|
if os.name == 'nt':
|
||||||
|
original_settings = conf.settings
|
||||||
|
conf.settings = conf.Config(conf.FIXED_SETTINGS, conf.ADJUSTABLE_SETTINGS)
|
||||||
|
conf.settings.installation_id = conf.settings.get_installation_id()
|
||||||
|
conf.settings.update(settings)
|
||||||
|
|
||||||
|
|
||||||
class LbryUploader(object):
|
class LbryUploader(object):
|
||||||
def __init__(self, sd_hash_queue, kill_event, dead_event,
|
def __init__(self, sd_hash_queue, kill_event, dead_event,
|
||||||
file_size, ul_rate_limit=None, is_generous=False):
|
file_size, ul_rate_limit=None, is_generous=False):
|
||||||
|
@ -84,6 +97,8 @@ class LbryUploader(object):
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
use_epoll_on_linux()
|
use_epoll_on_linux()
|
||||||
|
init_conf_windows()
|
||||||
|
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
self.reactor = reactor
|
self.reactor = reactor
|
||||||
logging.debug("Starting the uploader")
|
logging.debug("Starting the uploader")
|
||||||
|
@ -98,6 +113,7 @@ class LbryUploader(object):
|
||||||
self.sd_identifier = StreamDescriptorIdentifier()
|
self.sd_identifier = StreamDescriptorIdentifier()
|
||||||
db_dir = "server"
|
db_dir = "server"
|
||||||
os.mkdir(db_dir)
|
os.mkdir(db_dir)
|
||||||
|
|
||||||
self.session = Session(
|
self.session = Session(
|
||||||
conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd",
|
conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, lbryid="abcd",
|
||||||
peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553,
|
peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553,
|
||||||
|
@ -182,6 +198,7 @@ class LbryUploader(object):
|
||||||
def start_lbry_reuploader(sd_hash, kill_event, dead_event,
|
def start_lbry_reuploader(sd_hash, kill_event, dead_event,
|
||||||
ready_event, n, ul_rate_limit=None, is_generous=False):
|
ready_event, n, ul_rate_limit=None, is_generous=False):
|
||||||
use_epoll_on_linux()
|
use_epoll_on_linux()
|
||||||
|
init_conf_windows()
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
|
|
||||||
logging.debug("Starting the uploader")
|
logging.debug("Starting the uploader")
|
||||||
|
@ -295,6 +312,7 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event,
|
||||||
|
|
||||||
def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_generous=False):
|
def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_generous=False):
|
||||||
use_epoll_on_linux()
|
use_epoll_on_linux()
|
||||||
|
init_conf_windows()
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
|
|
||||||
logging.debug("Starting the uploader")
|
logging.debug("Starting the uploader")
|
||||||
|
|
|
@ -24,7 +24,10 @@ class BlobManagerTest(unittest.TestCase):
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
self.bm.stop()
|
self.bm.stop()
|
||||||
shutil.rmtree(self.blob_dir)
|
# BlobFile will try to delete itself in _close_writer
|
||||||
|
# thus when calling rmtree we may get a FileNotFoundError
|
||||||
|
# for the blob file
|
||||||
|
shutil.rmtree(self.blob_dir, ignore_errors=True)
|
||||||
shutil.rmtree(self.db_dir)
|
shutil.rmtree(self.db_dir)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
@ -100,7 +103,7 @@ class BlobManagerTest(unittest.TestCase):
|
||||||
|
|
||||||
# open the last blob
|
# open the last blob
|
||||||
blob = yield self.bm.get_blob(blob_hashes[-1])
|
blob = yield self.bm.get_blob(blob_hashes[-1])
|
||||||
yield blob.open_for_writing(self.peer)
|
finished_d, write, cancel = yield blob.open_for_writing(self.peer)
|
||||||
|
|
||||||
# delete the last blob and check if it still exists
|
# delete the last blob and check if it still exists
|
||||||
out = yield self.bm.delete_blobs([blob_hash])
|
out = yield self.bm.delete_blobs([blob_hash])
|
||||||
|
@ -109,4 +112,4 @@ class BlobManagerTest(unittest.TestCase):
|
||||||
self.assertTrue(blob_hashes[-1] in blobs)
|
self.assertTrue(blob_hashes[-1] in blobs)
|
||||||
self.assertTrue(os.path.isfile(os.path.join(self.blob_dir,blob_hashes[-1])))
|
self.assertTrue(os.path.isfile(os.path.join(self.blob_dir,blob_hashes[-1])))
|
||||||
|
|
||||||
|
blob._close_writer(blob.writers[self.peer][0])
|
||||||
|
|
|
@ -14,6 +14,7 @@ class DBEncryptedFileMetadataManagerTest(unittest.TestCase):
|
||||||
self.manager = DBEncryptedFileMetadataManager(self.db_dir)
|
self.manager = DBEncryptedFileMetadataManager(self.db_dir)
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
|
self.manager.stop()
|
||||||
shutil.rmtree(self.db_dir)
|
shutil.rmtree(self.db_dir)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
|
|
@ -2,17 +2,18 @@ import types
|
||||||
import mock
|
import mock
|
||||||
import json
|
import json
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer, task
|
||||||
|
|
||||||
from lbryschema.claim import ClaimDict
|
from lbryschema.claim import ClaimDict
|
||||||
|
|
||||||
from lbrynet.core import Session, PaymentRateManager, Wallet
|
from lbrynet.core import Session, PaymentRateManager, Wallet
|
||||||
|
from lbrynet.core.Error import DownloadTimeoutError
|
||||||
from lbrynet.daemon import Downloader
|
from lbrynet.daemon import Downloader
|
||||||
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier,StreamMetadata
|
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier,StreamMetadata
|
||||||
from lbrynet.lbry_file.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier
|
from lbrynet.lbry_file.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier
|
||||||
from lbrynet.core.HashBlob import TempBlob
|
|
||||||
from lbrynet.core.BlobManager import TempBlobManager
|
from lbrynet.file_manager.EncryptedFileStatusReport import EncryptedFileStatusReport
|
||||||
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
|
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader, ManagedEncryptedFileDownloaderFactory
|
||||||
from lbrynet.daemon.ExchangeRateManager import ExchangeRateManager
|
from lbrynet.daemon.ExchangeRateManager import ExchangeRateManager
|
||||||
|
|
||||||
from tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker
|
from tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker
|
||||||
|
@ -20,10 +21,48 @@ from tests.mocks import ExchangeRateManager as DummyExchangeRateManager
|
||||||
from tests.mocks import BTCLBCFeed, USDBTCFeed
|
from tests.mocks import BTCLBCFeed, USDBTCFeed
|
||||||
from tests.mocks import mock_conf_settings
|
from tests.mocks import mock_conf_settings
|
||||||
|
|
||||||
|
class MocDownloader(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.finish_deferred = defer.Deferred(None)
|
||||||
|
self.stop_called = False
|
||||||
|
|
||||||
|
self.name = 'test'
|
||||||
|
self.num_completed = 0
|
||||||
|
self.num_known = 1
|
||||||
|
self.running_status = ManagedEncryptedFileDownloader.STATUS_RUNNING
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def status(self):
|
||||||
|
out = yield EncryptedFileStatusReport(self.name, self.num_completed, self.num_known, self.running_status)
|
||||||
|
defer.returnValue(out)
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
return self.finish_deferred
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.stop_called = True
|
||||||
|
self.finish_deferred.callback(True)
|
||||||
|
|
||||||
|
def moc_initialize(self,stream_info):
|
||||||
|
self.sd_hash ="d5169241150022f996fa7cd6a9a1c421937276a3275eb912790bd07ba7aec1fac5fd45431d226b8fb402691e79aeb24b"
|
||||||
|
return None
|
||||||
|
|
||||||
|
def moc_download_sd_blob(self):
|
||||||
|
return None
|
||||||
|
|
||||||
|
def moc_download(self, sd_blob, name, key_fee):
|
||||||
|
self.pay_key_fee(key_fee, name)
|
||||||
|
self.downloader = MocDownloader()
|
||||||
|
self.downloader.start()
|
||||||
|
|
||||||
|
def moc_pay_key_fee(self, key_fee, name):
|
||||||
|
self.pay_key_fee_called = True
|
||||||
|
|
||||||
class GetStreamTests(unittest.TestCase):
|
class GetStreamTests(unittest.TestCase):
|
||||||
|
|
||||||
def init_getstream_with_mocs(self):
|
def init_getstream_with_mocs(self):
|
||||||
mock_conf_settings(self)
|
mock_conf_settings(self)
|
||||||
|
|
||||||
sd_identifier = mock.Mock(spec=StreamDescriptorIdentifier)
|
sd_identifier = mock.Mock(spec=StreamDescriptorIdentifier)
|
||||||
session = mock.Mock(spec=Session.Session)
|
session = mock.Mock(spec=Session.Session)
|
||||||
session.wallet = mock.Mock(spec=Wallet.LBRYumWallet)
|
session.wallet = mock.Mock(spec=Wallet.LBRYumWallet)
|
||||||
|
@ -37,8 +76,11 @@ class GetStreamTests(unittest.TestCase):
|
||||||
data_rate = {'currency':"LBC", 'amount':0, 'address':''}
|
data_rate = {'currency':"LBC", 'amount':0, 'address':''}
|
||||||
|
|
||||||
getstream = Downloader.GetStream(sd_identifier, session,
|
getstream = Downloader.GetStream(sd_identifier, session,
|
||||||
exchange_rate_manager, max_key_fee, timeout=10, data_rate=data_rate)
|
exchange_rate_manager, max_key_fee, timeout=3, data_rate=data_rate)
|
||||||
|
getstream.pay_key_fee_called = False
|
||||||
|
|
||||||
|
self.clock = task.Clock()
|
||||||
|
getstream.checker.clock = self.clock
|
||||||
return getstream
|
return getstream
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
@ -56,3 +98,86 @@ class GetStreamTests(unittest.TestCase):
|
||||||
yield getstream.start(stream_info,name)
|
yield getstream.start(stream_info,name)
|
||||||
|
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def test_sd_blob_download_timeout(self):
|
||||||
|
"""
|
||||||
|
test that if download_sd_blob fails due to timeout,
|
||||||
|
DownloadTimeoutError is raised
|
||||||
|
"""
|
||||||
|
def download_sd_blob(self):
|
||||||
|
raise DownloadTimeoutError(self.file_name)
|
||||||
|
|
||||||
|
getstream = self.init_getstream_with_mocs()
|
||||||
|
getstream._initialize = types.MethodType(moc_initialize, getstream)
|
||||||
|
getstream._download_sd_blob = types.MethodType(download_sd_blob, getstream)
|
||||||
|
getstream._download = types.MethodType(moc_download, getstream)
|
||||||
|
getstream.pay_key_fee = types.MethodType(moc_pay_key_fee, getstream)
|
||||||
|
name='test'
|
||||||
|
stream_info = None
|
||||||
|
with self.assertRaises(DownloadTimeoutError):
|
||||||
|
yield getstream.start(stream_info,name)
|
||||||
|
self.assertFalse(getstream.pay_key_fee_called)
|
||||||
|
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def test_timeout(self):
|
||||||
|
"""
|
||||||
|
test that timeout (set to 2 here) exception is raised
|
||||||
|
when download times out while downloading first blob, and key fee is paid
|
||||||
|
"""
|
||||||
|
getstream = self.init_getstream_with_mocs()
|
||||||
|
getstream._initialize = types.MethodType(moc_initialize, getstream)
|
||||||
|
getstream._download_sd_blob = types.MethodType(moc_download_sd_blob, getstream)
|
||||||
|
getstream._download = types.MethodType(moc_download, getstream)
|
||||||
|
getstream.pay_key_fee = types.MethodType(moc_pay_key_fee, getstream)
|
||||||
|
name='test'
|
||||||
|
stream_info = None
|
||||||
|
start = getstream.start(stream_info,name)
|
||||||
|
self.clock.advance(1)
|
||||||
|
self.clock.advance(1)
|
||||||
|
with self.assertRaises(DownloadTimeoutError):
|
||||||
|
yield start
|
||||||
|
self.assertTrue(getstream.downloader.stop_called)
|
||||||
|
self.assertTrue(getstream.pay_key_fee_called)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def test_finish_one_blob(self):
|
||||||
|
"""
|
||||||
|
test that if we have 1 completed blob, start() returns
|
||||||
|
and key fee is paid
|
||||||
|
"""
|
||||||
|
getstream = self.init_getstream_with_mocs()
|
||||||
|
getstream._initialize = types.MethodType(moc_initialize, getstream)
|
||||||
|
|
||||||
|
getstream._download_sd_blob = types.MethodType(moc_download_sd_blob, getstream)
|
||||||
|
getstream._download = types.MethodType(moc_download, getstream)
|
||||||
|
getstream.pay_key_fee = types.MethodType(moc_pay_key_fee, getstream)
|
||||||
|
name='test'
|
||||||
|
stream_info = None
|
||||||
|
start = getstream.start(stream_info,name)
|
||||||
|
|
||||||
|
getstream.downloader.num_completed = 1
|
||||||
|
self.clock.advance(1)
|
||||||
|
|
||||||
|
downloader, f_deferred = yield start
|
||||||
|
self.assertTrue(getstream.pay_key_fee_called)
|
||||||
|
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def test_finish_stopped_downloader(self):
|
||||||
|
"""
|
||||||
|
test that if we have a stopped downloader, beforfe a blob is downloaded,
|
||||||
|
start() returns
|
||||||
|
"""
|
||||||
|
getstream = self.init_getstream_with_mocs()
|
||||||
|
getstream._initialize = types.MethodType(moc_initialize, getstream)
|
||||||
|
getstream._download_sd_blob = types.MethodType(moc_download_sd_blob, getstream)
|
||||||
|
getstream._download = types.MethodType(moc_download, getstream)
|
||||||
|
name='test'
|
||||||
|
stream_info = None
|
||||||
|
start = getstream.start(stream_info,name)
|
||||||
|
|
||||||
|
getstream.downloader.running_status = ManagedEncryptedFileDownloader.STATUS_STOPPED
|
||||||
|
self.clock.advance(1)
|
||||||
|
downloader, f_deferred = yield start
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from twisted.trial import unittest
|
from twisted.trial import unittest
|
||||||
|
|
||||||
from lbrynet import conf
|
from lbrynet import conf
|
||||||
|
|
||||||
|
|
||||||
|
@ -54,3 +53,11 @@ class SettingsTest(unittest.TestCase):
|
||||||
self.assertEqual('cli_test_string', settings['test'])
|
self.assertEqual('cli_test_string', settings['test'])
|
||||||
settings.set('test', 'runtime_takes_precedence', data_types=(conf.TYPE_RUNTIME,))
|
settings.set('test', 'runtime_takes_precedence', data_types=(conf.TYPE_RUNTIME,))
|
||||||
self.assertEqual('runtime_takes_precedence', settings['test'])
|
self.assertEqual('runtime_takes_precedence', settings['test'])
|
||||||
|
|
||||||
|
def test_data_dir(self):
|
||||||
|
# check if these directories are returned as string and not unicode
|
||||||
|
# otherwise there will be problems when calling os.path.join on
|
||||||
|
# unicode directory names with string file names
|
||||||
|
self.assertEqual(str, type(conf.default_download_directory))
|
||||||
|
self.assertEqual(str, type(conf.default_data_dir))
|
||||||
|
self.assertEqual(str, type(conf.default_lbryum_dir))
|
||||||
|
|
Loading…
Reference in a new issue