forked from LBRYCommunity/lbry-sdk
Merge branch 'master' into metadata-version
# Conflicts: # lbrynet/conf.py # lbrynet/lbrynet_daemon/LBRYDaemon.py
This commit is contained in:
commit
fbef187400
27 changed files with 576 additions and 69 deletions
|
@ -1,5 +1,5 @@
|
||||||
[bumpversion]
|
[bumpversion]
|
||||||
current_version = 0.3.6
|
current_version = 0.3.10
|
||||||
commit = True
|
commit = True
|
||||||
tag = True
|
tag = True
|
||||||
message = Bump version: {current_version} -> {new_version}
|
message = Bump version: {current_version} -> {new_version}
|
||||||
|
|
|
@ -298,7 +298,7 @@ ignored-classes=twisted.internet,RequestMessage
|
||||||
# List of members which are set dynamically and missed by pylint inference
|
# List of members which are set dynamically and missed by pylint inference
|
||||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||||
# expressions are accepted.
|
# expressions are accepted.
|
||||||
generated-members=
|
generated-members=lbrynet.lbrynet_daemon.LBRYDaemon.Parameters
|
||||||
|
|
||||||
|
|
||||||
[IMPORTS]
|
[IMPORTS]
|
||||||
|
|
|
@ -22,6 +22,7 @@ cache:
|
||||||
directories:
|
directories:
|
||||||
- $HOME/.cache/pip
|
- $HOME/.cache/pip
|
||||||
- $HOME/Library/Caches/pip
|
- $HOME/Library/Caches/pip
|
||||||
|
- $TRAVIS_BUILD_DIR/cache/wheel
|
||||||
|
|
||||||
before_install:
|
before_install:
|
||||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then ./packaging/travis/setup_osx.sh; fi
|
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then ./packaging/travis/setup_osx.sh; fi
|
||||||
|
|
2
FAQ.md
2
FAQ.md
|
@ -12,7 +12,7 @@ You can install LBRY command line by running `curl -sL https://rawgit.com/lbryio
|
||||||
|
|
||||||
On Ubuntu or Mint you can install the prerequisites and lbrynet by running
|
On Ubuntu or Mint you can install the prerequisites and lbrynet by running
|
||||||
|
|
||||||
sudo apt-get install libgmp3-dev build-essential python2.7 python2.7-dev python-pip
|
sudo apt-get install libgmp3-dev build-essential python2.7 python2.7-dev python-pip git
|
||||||
git clone https://github.com/lbryio/lbry.git
|
git clone https://github.com/lbryio/lbry.git
|
||||||
cd lbry
|
cd lbry
|
||||||
sudo python setup.py install
|
sudo python setup.py install
|
||||||
|
|
|
@ -23,7 +23,7 @@ You can install LBRY command line by running `curl -sL https://rawgit.com/lbryio
|
||||||
On Ubuntu or Mint you can install the prerequisites and lbrynet by running
|
On Ubuntu or Mint you can install the prerequisites and lbrynet by running
|
||||||
|
|
||||||
```
|
```
|
||||||
sudo apt-get install libgmp3-dev build-essential python2.7 python2.7-dev python-pip
|
sudo apt-get install libgmp3-dev build-essential python2.7 python2.7-dev python-pip git
|
||||||
git clone https://github.com/lbryio/lbry.git
|
git clone https://github.com/lbryio/lbry.git
|
||||||
cd lbry
|
cd lbry
|
||||||
sudo python setup.py install
|
sudo python setup.py install
|
||||||
|
|
|
@ -43,9 +43,9 @@ To stop lbrynet-console, enter the command 'exit'.
|
||||||
Note: this process takes upwards of an hour and is not necessary to use lbrynet.
|
Note: this process takes upwards of an hour and is not necessary to use lbrynet.
|
||||||
|
|
||||||
```
|
```
|
||||||
|
sudo apt-get install build-essential libtool autotools-dev autoconf pkg-config libssl-dev libboost-all-dev libdb-dev libdb++-dev libqt4-dev libprotobuf-dev protobuf-compiler git
|
||||||
git clone --depth=1 -b alpha https://github.com/lbryio/lbrycrd.git
|
git clone --depth=1 -b alpha https://github.com/lbryio/lbrycrd.git
|
||||||
cd lbrycrd
|
cd lbrycrd
|
||||||
sudo apt-get install build-essential libtool autotools-dev autoconf pkg-config libssl-dev libboost-all-dev libdb-dev libdb++-dev libqt4-dev libprotobuf-dev protobuf-compiler
|
|
||||||
./autogen.sh
|
./autogen.sh
|
||||||
./configure --with-incompatible-bdb --without-gui
|
./configure --with-incompatible-bdb --without-gui
|
||||||
|
|
||||||
|
|
|
@ -4,5 +4,5 @@ log = logging.getLogger(__name__)
|
||||||
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
||||||
log.setLevel(logging.ERROR)
|
log.setLevel(logging.ERROR)
|
||||||
|
|
||||||
__version__ = "0.3.6"
|
__version__ = "0.3.10"
|
||||||
version = tuple(__version__.split('.'))
|
version = tuple(__version__.split('.'))
|
|
@ -48,8 +48,7 @@ DEFAULT_CACHE_TIME = 3600
|
||||||
DEFAULT_UI_BRANCH = "master"
|
DEFAULT_UI_BRANCH = "master"
|
||||||
|
|
||||||
SOURCE_TYPES = ['lbry_sd_hash', 'url', 'btih']
|
SOURCE_TYPES = ['lbry_sd_hash', 'url', 'btih']
|
||||||
|
|
||||||
CURRENCIES = [
|
CURRENCIES = [
|
||||||
{'BTC': {'type': 'crypto'}},
|
{'BTC': {'type': 'crypto'}},
|
||||||
{'LBC': {'type': 'crypto'}},
|
{'LBC': {'type': 'crypto'}},
|
||||||
]
|
]
|
||||||
|
|
|
@ -917,7 +917,7 @@ class LBRYumWallet(LBRYWallet):
|
||||||
network_start_d = defer.Deferred()
|
network_start_d = defer.Deferred()
|
||||||
|
|
||||||
def setup_network():
|
def setup_network():
|
||||||
self.config = SimpleConfig()
|
self.config = SimpleConfig({'auto_connect': True})
|
||||||
self.network = Network(self.config)
|
self.network = Network(self.config)
|
||||||
alert.info("Loading the wallet...")
|
alert.info("Loading the wallet...")
|
||||||
return defer.succeed(self.network.start())
|
return defer.succeed(self.network.start())
|
||||||
|
@ -989,7 +989,7 @@ class LBRYumWallet(LBRYWallet):
|
||||||
blockchain_caught_d = defer.Deferred()
|
blockchain_caught_d = defer.Deferred()
|
||||||
|
|
||||||
def check_caught_up():
|
def check_caught_up():
|
||||||
local_height = self.network.get_local_height()
|
local_height = self.network.get_catchup_progress()
|
||||||
remote_height = self.network.get_server_height()
|
remote_height = self.network.get_server_height()
|
||||||
|
|
||||||
if remote_height != 0 and remote_height - local_height <= 5:
|
if remote_height != 0 and remote_height - local_height <= 5:
|
||||||
|
@ -1115,11 +1115,9 @@ class LBRYumWallet(LBRYWallet):
|
||||||
|
|
||||||
def _do_send_many(self, payments_to_send):
|
def _do_send_many(self, payments_to_send):
|
||||||
log.warning("Doing send many. payments to send: %s", str(payments_to_send))
|
log.warning("Doing send many. payments to send: %s", str(payments_to_send))
|
||||||
outputs = [(TYPE_ADDRESS, address, int(amount*COIN)) for address, amount in payments_to_send.iteritems()]
|
cmd = known_commands['paytomanyandsend']
|
||||||
d = threads.deferToThread(self.wallet.mktx, outputs, None, self.config)
|
func = getattr(self.cmd_runner, cmd.name)
|
||||||
d.addCallback(lambda tx: threads.deferToThread(self.wallet.sendtx, tx))
|
return threads.deferToThread(func, payments_to_send.iteritems())
|
||||||
d.addCallback(self._save_wallet)
|
|
||||||
return d
|
|
||||||
|
|
||||||
def _get_value_for_name(self, name):
|
def _get_value_for_name(self, name):
|
||||||
cmd = known_commands['getvalueforname']
|
cmd = known_commands['getvalueforname']
|
||||||
|
|
|
@ -140,6 +140,7 @@ class BlobRequestHandler(object):
|
||||||
def set_expected_payment():
|
def set_expected_payment():
|
||||||
log.info("Setting expected payment")
|
log.info("Setting expected payment")
|
||||||
if self.blob_bytes_uploaded != 0 and self.blob_data_payment_rate is not None:
|
if self.blob_bytes_uploaded != 0 and self.blob_data_payment_rate is not None:
|
||||||
|
# TODO: explain why 2**20
|
||||||
self.wallet.add_expected_payment(self.peer,
|
self.wallet.add_expected_payment(self.peer,
|
||||||
self.currently_uploading.length * 1.0 *
|
self.currently_uploading.length * 1.0 *
|
||||||
self.blob_data_payment_rate / 2**20)
|
self.blob_data_payment_rate / 2**20)
|
||||||
|
@ -156,4 +157,4 @@ class BlobRequestHandler(object):
|
||||||
if reason is not None and isinstance(reason, Failure):
|
if reason is not None and isinstance(reason, Failure):
|
||||||
log.info("Upload has failed. Reason: %s", reason.getErrorMessage())
|
log.info("Upload has failed. Reason: %s", reason.getErrorMessage())
|
||||||
|
|
||||||
return _send_file()
|
return _send_file()
|
||||||
|
|
|
@ -67,6 +67,11 @@ if not os.path.isdir(log_dir):
|
||||||
lbrynet_log = os.path.join(log_dir, LOG_FILE_NAME)
|
lbrynet_log = os.path.join(log_dir, LOG_FILE_NAME)
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# TODO: configuring a logger on module import drastically reduces the
|
||||||
|
# amount of control the caller of this code has over logging
|
||||||
|
#
|
||||||
|
# Better would be to configure all logging at runtime.
|
||||||
handler = logging.handlers.RotatingFileHandler(lbrynet_log, maxBytes=2097152, backupCount=5)
|
handler = logging.handlers.RotatingFileHandler(lbrynet_log, maxBytes=2097152, backupCount=5)
|
||||||
log.addHandler(handler)
|
log.addHandler(handler)
|
||||||
log.setLevel(logging.INFO)
|
log.setLevel(logging.INFO)
|
||||||
|
@ -160,6 +165,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
self.git_lbryum_version = None
|
self.git_lbryum_version = None
|
||||||
self.ui_version = None
|
self.ui_version = None
|
||||||
self.ip = None
|
self.ip = None
|
||||||
|
# TODO: this is confusing to set here, and then to be reset below.
|
||||||
self.wallet_type = wallet_type
|
self.wallet_type = wallet_type
|
||||||
self.first_run = None
|
self.first_run = None
|
||||||
self.log_file = lbrynet_log
|
self.log_file = lbrynet_log
|
||||||
|
@ -171,6 +177,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
self.known_dht_nodes = KNOWN_DHT_NODES
|
self.known_dht_nodes = KNOWN_DHT_NODES
|
||||||
self.first_run_after_update = False
|
self.first_run_after_update = False
|
||||||
self.last_traded_rate = None
|
self.last_traded_rate = None
|
||||||
|
self.uploaded_temp_files = []
|
||||||
|
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
|
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
|
||||||
|
@ -268,13 +275,30 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
self.search_timeout = self.session_settings['search_timeout']
|
self.search_timeout = self.session_settings['search_timeout']
|
||||||
self.download_timeout = self.session_settings['download_timeout']
|
self.download_timeout = self.session_settings['download_timeout']
|
||||||
self.max_search_results = self.session_settings['max_search_results']
|
self.max_search_results = self.session_settings['max_search_results']
|
||||||
if self.session_settings['wallet_type'] in WALLET_TYPES and not wallet_type:
|
####
|
||||||
self.wallet_type = self.session_settings['wallet_type']
|
#
|
||||||
log.info("Using wallet type %s from config" % self.wallet_type)
|
# Ignore the saved wallet type. Some users will have their wallet type
|
||||||
else:
|
# saved as lbrycrd and we want wallets to be lbryum unless explicitly
|
||||||
|
# set on the command line to be lbrycrd.
|
||||||
|
#
|
||||||
|
# if self.session_settings['wallet_type'] in WALLET_TYPES and not wallet_type:
|
||||||
|
# self.wallet_type = self.session_settings['wallet_type']
|
||||||
|
# log.info("Using wallet type %s from config" % self.wallet_type)
|
||||||
|
# else:
|
||||||
|
# self.wallet_type = wallet_type
|
||||||
|
# self.session_settings['wallet_type'] = wallet_type
|
||||||
|
# log.info("Using wallet type %s specified from command line" % self.wallet_type)
|
||||||
|
#
|
||||||
|
# Instead, if wallet is not set on the command line, default to the default wallet
|
||||||
|
#
|
||||||
|
if wallet_type:
|
||||||
|
log.info("Using wallet type %s specified from command line", wallet_type)
|
||||||
self.wallet_type = wallet_type
|
self.wallet_type = wallet_type
|
||||||
self.session_settings['wallet_type'] = wallet_type
|
else:
|
||||||
log.info("Using wallet type %s specified from command line" % self.wallet_type)
|
log.info("Using the default wallet type %s", DEFAULT_WALLET)
|
||||||
|
self.wallet_type = DEFAULT_WALLET
|
||||||
|
#
|
||||||
|
####
|
||||||
self.delete_blobs_on_remove = self.session_settings['delete_blobs_on_remove']
|
self.delete_blobs_on_remove = self.session_settings['delete_blobs_on_remove']
|
||||||
self.peer_port = self.session_settings['peer_port']
|
self.peer_port = self.session_settings['peer_port']
|
||||||
self.dht_node_port = self.session_settings['dht_node_port']
|
self.dht_node_port = self.session_settings['dht_node_port']
|
||||||
|
@ -448,7 +472,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
def _announce():
|
def _announce():
|
||||||
self.announced_startup = True
|
self.announced_startup = True
|
||||||
self.startup_status = STARTUP_STAGES[5]
|
self.startup_status = STARTUP_STAGES[5]
|
||||||
log.info("[" + str(datetime.now()) + "] Started lbrynet-daemon")
|
log.info("Started lbrynet-daemon")
|
||||||
if len(self.startup_scripts):
|
if len(self.startup_scripts):
|
||||||
log.info("Scheduling scripts")
|
log.info("Scheduling scripts")
|
||||||
reactor.callLater(3, self._run_scripts)
|
reactor.callLater(3, self._run_scripts)
|
||||||
|
@ -470,7 +494,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
d.addCallback(lambda _: _announce())
|
d.addCallback(lambda _: _announce())
|
||||||
return d
|
return d
|
||||||
|
|
||||||
log.info("[" + str(datetime.now()) + "] Starting lbrynet-daemon")
|
log.info("Starting lbrynet-daemon")
|
||||||
|
|
||||||
self.internet_connection_checker.start(3600)
|
self.internet_connection_checker.start(3600)
|
||||||
self.version_checker.start(3600 * 12)
|
self.version_checker.start(3600 * 12)
|
||||||
|
@ -536,14 +560,14 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
s = socket.create_connection((host, 80), 2)
|
s = socket.create_connection((host, 80), 2)
|
||||||
self.connected_to_internet = True
|
self.connected_to_internet = True
|
||||||
except:
|
except:
|
||||||
log.info("[" + str(datetime.now()) + "] Internet connection not working")
|
log.info("Internet connection not working")
|
||||||
self.connected_to_internet = False
|
self.connected_to_internet = False
|
||||||
|
|
||||||
def _check_lbrynet_connection(self):
|
def _check_lbrynet_connection(self):
|
||||||
def _log_success():
|
def _log_success():
|
||||||
log.info("[" + str(datetime.now()) + "] lbrynet connectivity test passed")
|
log.info("lbrynet connectivity test passed")
|
||||||
def _log_failure():
|
def _log_failure():
|
||||||
log.info("[" + str(datetime.now()) + "] lbrynet connectivity test failed")
|
log.info("lbrynet connectivity test failed")
|
||||||
|
|
||||||
wonderfullife_sh = "6f3af0fa3924be98a54766aa2715d22c6c1509c3f7fa32566df4899a41f3530a9f97b2ecb817fa1dcbf1b30553aefaa7"
|
wonderfullife_sh = "6f3af0fa3924be98a54766aa2715d22c6c1509c3f7fa32566df4899a41f3530a9f97b2ecb817fa1dcbf1b30553aefaa7"
|
||||||
d = download_sd_blob(self.session, wonderfullife_sh, self.session.base_payment_rate_manager)
|
d = download_sd_blob(self.session, wonderfullife_sh, self.session.base_payment_rate_manager)
|
||||||
|
@ -561,7 +585,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
self.git_lbryum_version = version
|
self.git_lbryum_version = version
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
except:
|
except:
|
||||||
log.info("[" + str(datetime.now()) + "] Failed to get lbryum version from git")
|
log.info("Failed to get lbryum version from git")
|
||||||
self.git_lbryum_version = None
|
self.git_lbryum_version = None
|
||||||
return defer.fail(None)
|
return defer.fail(None)
|
||||||
|
|
||||||
|
@ -576,7 +600,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
self.git_lbrynet_version = vr
|
self.git_lbrynet_version = vr
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
except:
|
except:
|
||||||
log.info("[" + str(datetime.now()) + "] Failed to get lbrynet version from git")
|
log.info("Failed to get lbrynet version from git")
|
||||||
self.git_lbrynet_version = None
|
self.git_lbrynet_version = None
|
||||||
return defer.fail(None)
|
return defer.fail(None)
|
||||||
|
|
||||||
|
@ -703,6 +727,13 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
else:
|
else:
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
|
|
||||||
|
def _clean_up_temp_files(self):
|
||||||
|
for path in self.uploaded_temp_files:
|
||||||
|
try:
|
||||||
|
os.remove(path)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
def _shutdown(self):
|
def _shutdown(self):
|
||||||
log.info("Closing lbrynet session")
|
log.info("Closing lbrynet session")
|
||||||
log.info("Status at time of shutdown: " + self.startup_status[0])
|
log.info("Status at time of shutdown: " + self.startup_status[0])
|
||||||
|
@ -717,6 +748,8 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
if self.price_checker.running:
|
if self.price_checker.running:
|
||||||
self.price_checker.stop()
|
self.price_checker.stop()
|
||||||
|
|
||||||
|
self._clean_up_temp_files()
|
||||||
|
|
||||||
d = self._upload_log(log_type="close", exclude_previous=False if self.first_run else True)
|
d = self._upload_log(log_type="close", exclude_previous=False if self.first_run else True)
|
||||||
d.addCallback(lambda _: self._stop_server())
|
d.addCallback(lambda _: self._stop_server())
|
||||||
d.addErrback(lambda err: True)
|
d.addErrback(lambda err: True)
|
||||||
|
@ -892,7 +925,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def get_wallet():
|
def get_wallet():
|
||||||
if self.wallet_type == "lbrycrd": #force lbrycrd wallet no matter what while lbryum is down
|
if self.wallet_type == "lbrycrd":
|
||||||
log.info("Using lbrycrd wallet")
|
log.info("Using lbrycrd wallet")
|
||||||
d = defer.succeed(LBRYcrdWallet(self.db_dir, wallet_dir=self.wallet_dir, wallet_conf=self.lbrycrd_conf,
|
d = defer.succeed(LBRYcrdWallet(self.db_dir, wallet_dir=self.wallet_dir, wallet_conf=self.lbrycrd_conf,
|
||||||
lbrycrdd_path=self.lbrycrdd_path))
|
lbrycrdd_path=self.lbrycrdd_path))
|
||||||
|
@ -903,7 +936,8 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
log.info("Using PTC wallet")
|
log.info("Using PTC wallet")
|
||||||
d = defer.succeed(PTCWallet(self.db_dir))
|
d = defer.succeed(PTCWallet(self.db_dir))
|
||||||
else:
|
else:
|
||||||
log.info("Requested unknown wallet '%s', using default lbryum" % self.wallet_type)
|
# TODO: should fail here. Can't switch to lbrycrd because the wallet_dir, conf and path won't be set
|
||||||
|
log.info("Requested unknown wallet '%s', using default lbryum", self.wallet_type)
|
||||||
d = defer.succeed(LBRYumWallet(self.db_dir))
|
d = defer.succeed(LBRYumWallet(self.db_dir))
|
||||||
|
|
||||||
d.addCallback(lambda wallet: {"wallet": wallet})
|
d.addCallback(lambda wallet: {"wallet": wallet})
|
||||||
|
@ -1164,15 +1198,15 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
return d
|
return d
|
||||||
|
|
||||||
d.addCallback(lambda _: finish_deletion(lbry_file))
|
d.addCallback(lambda _: finish_deletion(lbry_file))
|
||||||
d.addCallback(lambda _: log.info("[" + str(datetime.now()) + "] Delete lbry file"))
|
d.addCallback(lambda _: log.info("Delete lbry file"))
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def _get_est_cost(self, name):
|
def _get_est_cost(self, name):
|
||||||
def _check_est(d, name):
|
def _check_est(d, name):
|
||||||
if isinstance(d.result, float):
|
if isinstance(d.result, float):
|
||||||
log.info("[" + str(datetime.now()) + "] Cost est for lbry://" + name + ": " + str(d.result) + "LBC")
|
log.info("Cost est for lbry://" + name + ": " + str(d.result) + "LBC")
|
||||||
else:
|
else:
|
||||||
log.info("[" + str(datetime.now()) + "] Timeout estimating cost for lbry://" + name + ", using key fee")
|
log.info("Timeout estimating cost for lbry://" + name + ", using key fee")
|
||||||
d.cancel()
|
d.cancel()
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
|
|
||||||
|
@ -1367,7 +1401,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
Returns: true if daemon completed startup, otherwise false
|
Returns: true if daemon completed startup, otherwise false
|
||||||
"""
|
"""
|
||||||
|
|
||||||
log.info("[" + str(datetime.now()) + "] is_running: " + str(self.announced_startup))
|
log.info("is_running: " + str(self.announced_startup))
|
||||||
|
|
||||||
if self.announced_startup:
|
if self.announced_startup:
|
||||||
return self._render_response(True, OK_CODE)
|
return self._render_response(True, OK_CODE)
|
||||||
|
@ -1405,7 +1439,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
else:
|
else:
|
||||||
r['message'] = "Catching up with the blockchain"
|
r['message'] = "Catching up with the blockchain"
|
||||||
r['progress'] = 0
|
r['progress'] = 0
|
||||||
log.info("[" + str(datetime.now()) + "] daemon status: " + str(r))
|
log.info("daemon status: " + str(r))
|
||||||
return self._render_response(r, OK_CODE)
|
return self._render_response(r, OK_CODE)
|
||||||
|
|
||||||
def jsonrpc_is_first_run(self):
|
def jsonrpc_is_first_run(self):
|
||||||
|
@ -1418,7 +1452,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
True if first run, otherwise False
|
True if first run, otherwise False
|
||||||
"""
|
"""
|
||||||
|
|
||||||
log.info("[" + str(datetime.now()) + "] Check if is first run")
|
log.info("Check if is first run")
|
||||||
try:
|
try:
|
||||||
d = self.session.wallet.is_first_run()
|
d = self.session.wallet.is_first_run()
|
||||||
except:
|
except:
|
||||||
|
@ -1438,7 +1472,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
Startup message, such as first run notification
|
Startup message, such as first run notification
|
||||||
"""
|
"""
|
||||||
|
|
||||||
log.info("[" + str(datetime.now()) + "] Get startup notice")
|
log.info("Get startup notice")
|
||||||
|
|
||||||
if self.first_run and not self.session.wallet.wallet_balance:
|
if self.first_run and not self.session.wallet.wallet_balance:
|
||||||
return self._render_response(self.startup_message, OK_CODE)
|
return self._render_response(self.startup_message, OK_CODE)
|
||||||
|
@ -1478,7 +1512,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
'lbryum_update_available': lbryum_version < self.git_lbryum_version
|
'lbryum_update_available': lbryum_version < self.git_lbryum_version
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info("[" + str(datetime.now()) + "] Get version info: " + json.dumps(msg))
|
log.info("Get version info: " + json.dumps(msg))
|
||||||
return self._render_response(msg, OK_CODE)
|
return self._render_response(msg, OK_CODE)
|
||||||
|
|
||||||
def jsonrpc_get_settings(self):
|
def jsonrpc_get_settings(self):
|
||||||
|
@ -1506,7 +1540,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
'start_lbrycrdd': bool,
|
'start_lbrycrdd': bool,
|
||||||
"""
|
"""
|
||||||
|
|
||||||
log.info("[" + str(datetime.now()) + "] Get daemon settings")
|
log.info("Get daemon settings")
|
||||||
return self._render_response(self.session_settings, OK_CODE)
|
return self._render_response(self.session_settings, OK_CODE)
|
||||||
|
|
||||||
def jsonrpc_set_settings(self, p):
|
def jsonrpc_set_settings(self, p):
|
||||||
|
@ -1527,7 +1561,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _log_settings_change():
|
def _log_settings_change():
|
||||||
log.info("[" + str(datetime.now()) + "] Set daemon settings to " + json.dumps(self.session_settings))
|
log.info("Set daemon settings to " + json.dumps(self.session_settings))
|
||||||
|
|
||||||
d = self._update_settings(p)
|
d = self._update_settings(p)
|
||||||
d.addErrback(lambda err: log.info(err.getTraceback()))
|
d.addErrback(lambda err: log.info(err.getTraceback()))
|
||||||
|
@ -1570,7 +1604,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
balance, float
|
balance, float
|
||||||
"""
|
"""
|
||||||
|
|
||||||
log.info("[" + str(datetime.now()) + "] Get balance")
|
log.info("Get balance")
|
||||||
return self._render_response(float(self.session.wallet.wallet_balance), OK_CODE)
|
return self._render_response(float(self.session.wallet.wallet_balance), OK_CODE)
|
||||||
|
|
||||||
def jsonrpc_stop(self):
|
def jsonrpc_stop(self):
|
||||||
|
@ -1811,7 +1845,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
return defer.DeferredList(ds)
|
return defer.DeferredList(ds)
|
||||||
|
|
||||||
def _disp(results):
|
def _disp(results):
|
||||||
log.info('[' + str(datetime.now()) + '] Found ' + str(len(results)) + ' search results')
|
log.info('Found ' + str(len(results)) + ' search results')
|
||||||
consolidated_results = []
|
consolidated_results = []
|
||||||
for r in results:
|
for r in results:
|
||||||
t = {}
|
t = {}
|
||||||
|
@ -1825,7 +1859,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
|
|
||||||
return consolidated_results
|
return consolidated_results
|
||||||
|
|
||||||
log.info('[' + str(datetime.now()) + '] Search nametrie: ' + search)
|
log.info('Search nametrie: ' + search)
|
||||||
|
|
||||||
d = self.session.wallet.get_nametrie()
|
d = self.session.wallet.get_nametrie()
|
||||||
d.addCallback(lambda trie: [claim for claim in trie if claim['name'].startswith(search) and 'txid' in claim])
|
d.addCallback(lambda trie: [claim for claim in trie if claim['name'].startswith(search) and 'txid' in claim])
|
||||||
|
@ -1913,7 +1947,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
return server.failure
|
return server.failure
|
||||||
|
|
||||||
def _disp(x):
|
def _disp(x):
|
||||||
log.info("[" + str(datetime.now()) + "] Abandoned name claim tx " + str(x))
|
log.info("Abandoned name claim tx " + str(x))
|
||||||
return self._render_response(x, OK_CODE)
|
return self._render_response(x, OK_CODE)
|
||||||
|
|
||||||
d = defer.Deferred()
|
d = defer.Deferred()
|
||||||
|
@ -2024,7 +2058,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _disp(address):
|
def _disp(address):
|
||||||
log.info("[" + str(datetime.now()) + "] Got new wallet address: " + address)
|
log.info("Got new wallet address: " + address)
|
||||||
return defer.succeed(address)
|
return defer.succeed(address)
|
||||||
|
|
||||||
d = self.session.wallet.get_new_address()
|
d = self.session.wallet.get_new_address()
|
||||||
|
@ -2216,7 +2250,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
exclude_previous = True
|
exclude_previous = True
|
||||||
|
|
||||||
if 'message' in p.keys():
|
if 'message' in p.keys():
|
||||||
log.info("[" + str(datetime.now()) + "] Upload log message: " + str(p['message']))
|
log.info("Upload log message: " + str(p['message']))
|
||||||
|
|
||||||
if 'force' in p.keys():
|
if 'force' in p.keys():
|
||||||
force = p['force']
|
force = p['force']
|
||||||
|
|
|
@ -35,7 +35,7 @@ def main():
|
||||||
if len(args) > 1:
|
if len(args) > 1:
|
||||||
if isinstance(args[1], dict):
|
if isinstance(args[1], dict):
|
||||||
params = args[1]
|
params = args[1]
|
||||||
elif isinstance(args[1], str, unicode):
|
elif isinstance(args[1], basestring):
|
||||||
params = json.loads(args[1])
|
params = json.loads(args[1])
|
||||||
else:
|
else:
|
||||||
params = None
|
params = None
|
||||||
|
@ -56,4 +56,4 @@ def main():
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -12,7 +12,7 @@ from twisted.web import server
|
||||||
from twisted.internet import reactor, defer
|
from twisted.internet import reactor, defer
|
||||||
from jsonrpc.proxy import JSONRPCProxy
|
from jsonrpc.proxy import JSONRPCProxy
|
||||||
|
|
||||||
from lbrynet.lbrynet_daemon.LBRYDaemonServer import LBRYDaemonServer
|
from lbrynet.lbrynet_daemon.LBRYDaemonServer import LBRYDaemonServer, LBRYDaemonRequest
|
||||||
from lbrynet.conf import API_CONNECTION_STRING, API_INTERFACE, API_ADDRESS, API_PORT, \
|
from lbrynet.conf import API_CONNECTION_STRING, API_INTERFACE, API_ADDRESS, API_PORT, \
|
||||||
DEFAULT_WALLET, UI_ADDRESS, DEFAULT_UI_BRANCH, LOG_FILE_NAME
|
DEFAULT_WALLET, UI_ADDRESS, DEFAULT_UI_BRANCH, LOG_FILE_NAME
|
||||||
|
|
||||||
|
@ -25,8 +25,13 @@ if not os.path.isdir(log_dir):
|
||||||
os.mkdir(log_dir)
|
os.mkdir(log_dir)
|
||||||
|
|
||||||
lbrynet_log = os.path.join(log_dir, LOG_FILE_NAME)
|
lbrynet_log = os.path.join(log_dir, LOG_FILE_NAME)
|
||||||
|
|
||||||
|
DEFAULT_FORMAT = "%(asctime)s %(levelname)-8s %(name)s:%(lineno)d: %(message)s"
|
||||||
|
DEFAULT_FORMATTER = logging.Formatter(DEFAULT_FORMAT)
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
handler = logging.handlers.RotatingFileHandler(lbrynet_log, maxBytes=2097152, backupCount=5)
|
handler = logging.handlers.RotatingFileHandler(lbrynet_log, maxBytes=2097152, backupCount=5)
|
||||||
|
handler.setFormatter(DEFAULT_FORMATTER)
|
||||||
log.addHandler(handler)
|
log.addHandler(handler)
|
||||||
log.setLevel(logging.INFO)
|
log.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
@ -57,6 +62,13 @@ def stop():
|
||||||
d.callback(None)
|
d.callback(None)
|
||||||
|
|
||||||
|
|
||||||
|
def configureConsoleLogger():
|
||||||
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
handler.setFormatter(DEFAULT_FORMATTER)
|
||||||
|
logging.getLogger().addHandler(handler)
|
||||||
|
logging.getLogger().setLevel(level=logging.INFO)
|
||||||
|
|
||||||
|
|
||||||
def start():
|
def start():
|
||||||
parser = argparse.ArgumentParser(description="Launch lbrynet-daemon")
|
parser = argparse.ArgumentParser(description="Launch lbrynet-daemon")
|
||||||
parser.add_argument("--wallet",
|
parser.add_argument("--wallet",
|
||||||
|
@ -75,7 +87,7 @@ def start():
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if args.logtoconsole:
|
if args.logtoconsole:
|
||||||
logging.basicConfig(level=logging.INFO)
|
configureConsoleLogger()
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
@ -109,7 +121,9 @@ def start():
|
||||||
if args.launchui:
|
if args.launchui:
|
||||||
d.addCallback(lambda _: webbrowser.open(UI_ADDRESS))
|
d.addCallback(lambda _: webbrowser.open(UI_ADDRESS))
|
||||||
|
|
||||||
reactor.listenTCP(API_PORT, server.Site(lbry.root), interface=API_INTERFACE)
|
lbrynet_server = server.Site(lbry.root)
|
||||||
|
lbrynet_server.requestFactory = LBRYDaemonRequest
|
||||||
|
reactor.listenTCP(API_PORT, lbrynet_server, interface=API_INTERFACE)
|
||||||
reactor.run()
|
reactor.run()
|
||||||
|
|
||||||
if not args.logtoconsole and not args.quiet:
|
if not args.logtoconsole and not args.quiet:
|
||||||
|
@ -121,4 +135,4 @@ def start():
|
||||||
return
|
return
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
start()
|
start()
|
||||||
|
|
|
@ -4,6 +4,10 @@ import shutil
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
import mimetypes
|
import mimetypes
|
||||||
|
import mimetools
|
||||||
|
import tempfile
|
||||||
|
import time
|
||||||
|
import cgi
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from appdirs import user_data_dir
|
from appdirs import user_data_dir
|
||||||
|
@ -29,6 +33,184 @@ handler = logging.handlers.RotatingFileHandler(lbrynet_log, maxBytes=2097152, ba
|
||||||
log.addHandler(handler)
|
log.addHandler(handler)
|
||||||
log.setLevel(logging.INFO)
|
log.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
class LBRYDaemonRequest(server.Request):
|
||||||
|
"""
|
||||||
|
For LBRY specific request functionality. Currently just provides
|
||||||
|
handling for large multipart POST requests, taken from here:
|
||||||
|
http://sammitch.ca/2013/07/handling-large-requests-in-twisted/
|
||||||
|
|
||||||
|
For multipart POST requests, this populates self.args with temp
|
||||||
|
file objects instead of strings. Note that these files don't auto-delete
|
||||||
|
on close because we want to be able to move and rename them.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# max amount of memory to allow any ~single~ request argument [ie: POSTed file]
|
||||||
|
# note: this value seems to be taken with a grain of salt, memory usage may spike
|
||||||
|
# FAR above this value in some cases.
|
||||||
|
# eg: set the memory limit to 5 MB, write 2 blocks of 4MB, mem usage will
|
||||||
|
# have spiked to 8MB before the data is rolled to disk after the
|
||||||
|
# second write completes.
|
||||||
|
memorylimit = 1024*1024*100
|
||||||
|
|
||||||
|
# enable/disable debug logging
|
||||||
|
do_log = False
|
||||||
|
|
||||||
|
# re-defined only for debug/logging purposes
|
||||||
|
def gotLength(self, length):
|
||||||
|
if self.do_log:
|
||||||
|
print '%f Headers received, Content-Length: %d' % (time.time(), length)
|
||||||
|
server.Request.gotLength(self, length)
|
||||||
|
|
||||||
|
# re-definition of twisted.web.server.Request.requestreceived, the only difference
|
||||||
|
# is that self.parse_multipart() is used rather than cgi.parse_multipart()
|
||||||
|
def requestReceived(self, command, path, version):
|
||||||
|
from twisted.web.http import parse_qs
|
||||||
|
if self.do_log:
|
||||||
|
print '%f Request Received' % time.time()
|
||||||
|
print self.content
|
||||||
|
|
||||||
|
self.content.seek(0,0)
|
||||||
|
self.args = {}
|
||||||
|
self.stack = []
|
||||||
|
|
||||||
|
self.method, self.uri = command, path
|
||||||
|
self.clientproto = version
|
||||||
|
x = self.uri.split(b'?', 1)
|
||||||
|
|
||||||
|
if len(x) == 1:
|
||||||
|
self.path = self.uri
|
||||||
|
else:
|
||||||
|
self.path, argstring = x
|
||||||
|
self.args = parse_qs(argstring, 1)
|
||||||
|
|
||||||
|
# cache the client and server information, we'll need this later to be
|
||||||
|
# serialized and sent with the request so CGIs will work remotely
|
||||||
|
self.client = self.channel.transport.getPeer()
|
||||||
|
self.host = self.channel.transport.getHost()
|
||||||
|
|
||||||
|
# Argument processing
|
||||||
|
args = self.args
|
||||||
|
ctype = self.requestHeaders.getRawHeaders(b'content-type')
|
||||||
|
if ctype is not None:
|
||||||
|
ctype = ctype[0]
|
||||||
|
|
||||||
|
if self.method == b"POST" and ctype:
|
||||||
|
mfd = b'multipart/form-data'
|
||||||
|
key, pdict = cgi.parse_header(ctype)
|
||||||
|
if key == b'application/x-www-form-urlencoded':
|
||||||
|
args.update(parse_qs(self.content.read(), 1))
|
||||||
|
elif key == mfd:
|
||||||
|
try:
|
||||||
|
self.content.seek(0,0)
|
||||||
|
args.update(self.parse_multipart(self.content, pdict))
|
||||||
|
#args.update(cgi.parse_multipart(self.content, pdict))
|
||||||
|
|
||||||
|
except KeyError as e:
|
||||||
|
if e.args[0] == b'content-disposition':
|
||||||
|
# Parse_multipart can't cope with missing
|
||||||
|
# content-dispostion headers in multipart/form-data
|
||||||
|
# parts, so we catch the exception and tell the client
|
||||||
|
# it was a bad request.
|
||||||
|
self.channel.transport.write(
|
||||||
|
b"HTTP/1.1 400 Bad Request\r\n\r\n")
|
||||||
|
self.channel.transport.loseConnection()
|
||||||
|
return
|
||||||
|
raise
|
||||||
|
|
||||||
|
self.content.seek(0, 0)
|
||||||
|
|
||||||
|
self.process()
|
||||||
|
|
||||||
|
# re-definition of cgi.parse_multipart that uses a single temporary file to store
|
||||||
|
# data rather than storing 2 to 3 copies in various lists.
|
||||||
|
def parse_multipart(self, fp, pdict):
|
||||||
|
if self.do_log:
|
||||||
|
print '%f Parsing Multipart data: ' % time.time()
|
||||||
|
rewind = fp.tell() #save cursor
|
||||||
|
fp.seek(0,0) #reset cursor
|
||||||
|
|
||||||
|
boundary = ""
|
||||||
|
if 'boundary' in pdict:
|
||||||
|
boundary = pdict['boundary']
|
||||||
|
if not cgi.valid_boundary(boundary):
|
||||||
|
raise ValueError, ('Invalid boundary in multipart form: %r'
|
||||||
|
% (boundary,))
|
||||||
|
|
||||||
|
nextpart = "--" + boundary
|
||||||
|
lastpart = "--" + boundary + "--"
|
||||||
|
partdict = {}
|
||||||
|
terminator = ""
|
||||||
|
|
||||||
|
while terminator != lastpart:
|
||||||
|
c_bytes = -1
|
||||||
|
|
||||||
|
data = tempfile.NamedTemporaryFile(delete=False)
|
||||||
|
if terminator:
|
||||||
|
# At start of next part. Read headers first.
|
||||||
|
headers = mimetools.Message(fp)
|
||||||
|
clength = headers.getheader('content-length')
|
||||||
|
if clength:
|
||||||
|
try:
|
||||||
|
c_bytes = int(clength)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
if c_bytes > 0:
|
||||||
|
data.write(fp.read(c_bytes))
|
||||||
|
# Read lines until end of part.
|
||||||
|
while 1:
|
||||||
|
line = fp.readline()
|
||||||
|
if not line:
|
||||||
|
terminator = lastpart # End outer loop
|
||||||
|
break
|
||||||
|
if line[:2] == "--":
|
||||||
|
terminator = line.strip()
|
||||||
|
if terminator in (nextpart, lastpart):
|
||||||
|
break
|
||||||
|
data.write(line)
|
||||||
|
# Done with part.
|
||||||
|
if data.tell() == 0:
|
||||||
|
continue
|
||||||
|
if c_bytes < 0:
|
||||||
|
# if a Content-Length header was not supplied with the MIME part
|
||||||
|
# then the trailing line break must be removed.
|
||||||
|
# we have data, read the last 2 bytes
|
||||||
|
rewind = min(2, data.tell())
|
||||||
|
data.seek(-rewind, os.SEEK_END)
|
||||||
|
line = data.read(2)
|
||||||
|
if line[-2:] == "\r\n":
|
||||||
|
data.seek(-2, os.SEEK_END)
|
||||||
|
data.truncate()
|
||||||
|
elif line[-1:] == "\n":
|
||||||
|
data.seek(-1, os.SEEK_END)
|
||||||
|
data.truncate()
|
||||||
|
|
||||||
|
line = headers['content-disposition']
|
||||||
|
if not line:
|
||||||
|
continue
|
||||||
|
key, params = cgi.parse_header(line)
|
||||||
|
if key != 'form-data':
|
||||||
|
continue
|
||||||
|
if 'name' in params:
|
||||||
|
name = params['name']
|
||||||
|
# kludge in the filename
|
||||||
|
if 'filename' in params:
|
||||||
|
fname_index = name + '_filename'
|
||||||
|
if fname_index in partdict:
|
||||||
|
partdict[fname_index].append(params['filename'])
|
||||||
|
else:
|
||||||
|
partdict[fname_index] = [params['filename']]
|
||||||
|
else:
|
||||||
|
# Unnamed parts are not returned at all.
|
||||||
|
continue
|
||||||
|
data.seek(0,0)
|
||||||
|
if name in partdict:
|
||||||
|
partdict[name].append(data)
|
||||||
|
else:
|
||||||
|
partdict[name] = [data]
|
||||||
|
|
||||||
|
fp.seek(rewind) # Restore cursor
|
||||||
|
return partdict
|
||||||
|
|
||||||
class LBRYindex(resource.Resource):
|
class LBRYindex(resource.Resource):
|
||||||
def __init__(self, ui_dir):
|
def __init__(self, ui_dir):
|
||||||
|
@ -79,7 +261,7 @@ class LBRYFileStreamer(object):
|
||||||
|
|
||||||
def pauseProducing(self):
|
def pauseProducing(self):
|
||||||
self._paused = True
|
self._paused = True
|
||||||
log.info("[" + str(datetime.now()) + "] Pausing producer")
|
log.info("Pausing producer")
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
|
|
||||||
def resumeProducing(self):
|
def resumeProducing(self):
|
||||||
|
@ -104,7 +286,7 @@ class LBRYFileStreamer(object):
|
||||||
self._request.write(data)
|
self._request.write(data)
|
||||||
self._cursor += 1
|
self._cursor += 1
|
||||||
|
|
||||||
log.info("[" + str(datetime.now()) + "] Wrote range %s-%s/%s, length: %s, readable: %s, depth: %s" %
|
log.info("Wrote range %s-%s/%s, length: %s, readable: %s, depth: %s" %
|
||||||
(start_cur, self._cursor, self._file_size, self._cursor - start_cur, readable_bytes, self._depth))
|
(start_cur, self._cursor, self._file_size, self._cursor - start_cur, readable_bytes, self._depth))
|
||||||
self._sent_bytes = True
|
self._sent_bytes = True
|
||||||
|
|
||||||
|
@ -117,12 +299,12 @@ class LBRYFileStreamer(object):
|
||||||
self._deferred.addCallback(lambda _: threads.deferToThread(reactor.callLater, self._delay, _check_for_new_data))
|
self._deferred.addCallback(lambda _: threads.deferToThread(reactor.callLater, self._delay, _check_for_new_data))
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
|
|
||||||
log.info("[" + str(datetime.now()) + "] Resuming producer")
|
log.info("Resuming producer")
|
||||||
self._paused = False
|
self._paused = False
|
||||||
self._deferred.addCallback(lambda _: _check_for_new_data())
|
self._deferred.addCallback(lambda _: _check_for_new_data())
|
||||||
|
|
||||||
def stopProducing(self):
|
def stopProducing(self):
|
||||||
log.info("[" + str(datetime.now()) + "] Stopping producer")
|
log.info("Stopping producer")
|
||||||
self._stopped = True
|
self._stopped = True
|
||||||
# self._fileObject.close()
|
# self._fileObject.close()
|
||||||
self._deferred.addErrback(lambda err: err.trap(defer.CancelledError))
|
self._deferred.addErrback(lambda err: err.trap(defer.CancelledError))
|
||||||
|
@ -147,7 +329,7 @@ class HostedLBRYFile(resource.Resource):
|
||||||
#
|
#
|
||||||
# range_header = request.getAllHeaders()['range'].replace('bytes=', '').split('-')
|
# range_header = request.getAllHeaders()['range'].replace('bytes=', '').split('-')
|
||||||
# start, stop = int(range_header[0]), range_header[1]
|
# start, stop = int(range_header[0]), range_header[1]
|
||||||
# log.info("[" + str(datetime.now()) + "] GET range %s-%s" % (start, stop))
|
# log.info("GET range %s-%s" % (start, stop))
|
||||||
# path = os.path.join(self._api.download_directory, stream.file_name)
|
# path = os.path.join(self._api.download_directory, stream.file_name)
|
||||||
#
|
#
|
||||||
# d = stream.get_total_bytes()
|
# d = stream.get_total_bytes()
|
||||||
|
@ -179,12 +361,35 @@ class HostedLBRYFile(resource.Resource):
|
||||||
# call.addErrback(lambda err: log.info("Error: " + str(err)))
|
# call.addErrback(lambda err: log.info("Error: " + str(err)))
|
||||||
# call.cancel()
|
# call.cancel()
|
||||||
|
|
||||||
|
class LBRYFileUpload(resource.Resource):
|
||||||
|
"""
|
||||||
|
Accepts a file sent via the file upload widget in the web UI, saves
|
||||||
|
it into a temporary dir, and responds with a JSON string containing
|
||||||
|
the path of the newly created file.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, api):
|
||||||
|
self._api = api
|
||||||
|
|
||||||
|
def render_POST(self, request):
|
||||||
|
origfilename = request.args['file_filename'][0]
|
||||||
|
uploaded_file = request.args['file'][0] # Temp file created by request
|
||||||
|
|
||||||
|
# Move to a new temporary dir and restore the original file name
|
||||||
|
newdirpath = tempfile.mkdtemp()
|
||||||
|
newpath = os.path.join(newdirpath, origfilename)
|
||||||
|
shutil.move(uploaded_file.name, newpath)
|
||||||
|
self._api.uploaded_temp_files.append(newpath)
|
||||||
|
|
||||||
|
return json.dumps(newpath)
|
||||||
|
|
||||||
|
|
||||||
class LBRYDaemonServer(object):
|
class LBRYDaemonServer(object):
|
||||||
def _setup_server(self, wallet):
|
def _setup_server(self, wallet):
|
||||||
self.root = LBRYindex(os.path.join(os.path.join(data_dir, "lbry-ui"), "active"))
|
self.root = LBRYindex(os.path.join(os.path.join(data_dir, "lbry-ui"), "active"))
|
||||||
self._api = LBRYDaemon(self.root, wallet_type=wallet)
|
self._api = LBRYDaemon(self.root, wallet_type=wallet)
|
||||||
self.root.putChild("view", HostedLBRYFile(self._api))
|
self.root.putChild("view", HostedLBRYFile(self._api))
|
||||||
|
self.root.putChild("upload", LBRYFileUpload(self._api))
|
||||||
self.root.putChild(API_ADDRESS, self._api)
|
self.root.putChild(API_ADDRESS, self._api)
|
||||||
return defer.succeed(True)
|
return defer.succeed(True)
|
||||||
|
|
||||||
|
|
|
@ -143,6 +143,6 @@ class GetStream(object):
|
||||||
d = _pay_key_fee()
|
d = _pay_key_fee()
|
||||||
self.downloader = downloader
|
self.downloader = downloader
|
||||||
self.download_path = os.path.join(downloader.download_directory, downloader.file_name)
|
self.download_path = os.path.join(downloader.download_directory, downloader.file_name)
|
||||||
d.addCallback(lambda _: log.info("[%s] Downloading %s --> %s" % (datetime.now(), self.stream_hash, self.downloader.file_name)))
|
d.addCallback(lambda _: log.info("Downloading %s --> %s", self.stream_hash, self.downloader.file_name))
|
||||||
d.addCallback(lambda _: self.downloader.start())
|
d.addCallback(lambda _: self.downloader.start())
|
||||||
|
|
||||||
|
|
|
@ -48,9 +48,7 @@ class Publisher(object):
|
||||||
def start(self, name, file_path, bid, metadata, fee=None, sources={}):
|
def start(self, name, file_path, bid, metadata, fee=None, sources={}):
|
||||||
|
|
||||||
def _show_result():
|
def _show_result():
|
||||||
|
log.info("Published %s --> lbry://%s txid: %s", self.file_name, self.publish_name, self.txid)
|
||||||
message = "[%s] Published %s --> lbry://%s txid: %s" % (datetime.now(), self.file_name, self.publish_name, self.txid)
|
|
||||||
log.info(message)
|
|
||||||
return defer.succeed(self.txid)
|
return defer.succeed(self.txid)
|
||||||
|
|
||||||
self.publish_name = name
|
self.publish_name = name
|
||||||
|
|
|
@ -5,9 +5,11 @@ set -o xtrace
|
||||||
|
|
||||||
DEST=`pwd`
|
DEST=`pwd`
|
||||||
tmp="${DEST}/build"
|
tmp="${DEST}/build"
|
||||||
|
ON_TRAVIS=false
|
||||||
|
|
||||||
rm -rf build dist LBRY.app
|
rm -rf build dist LBRY.app
|
||||||
|
|
||||||
|
pip install wheel
|
||||||
# the default py2app (v0.9) has a bug that is fixed in the head of /metachris/py2app
|
# the default py2app (v0.9) has a bug that is fixed in the head of /metachris/py2app
|
||||||
pip install git+https://github.com/metachris/py2app
|
pip install git+https://github.com/metachris/py2app
|
||||||
pip install jsonrpc
|
pip install jsonrpc
|
||||||
|
@ -23,6 +25,7 @@ if [ -z ${TRAVIS_BUILD_DIR+x} ]; then
|
||||||
LBRY="${tmp}/lbry"
|
LBRY="${tmp}/lbry"
|
||||||
else
|
else
|
||||||
# building on travis
|
# building on travis
|
||||||
|
ON_TRAVIS=true
|
||||||
cd ${TRAVIS_BUILD_DIR}
|
cd ${TRAVIS_BUILD_DIR}
|
||||||
LBRY=${TRAVIS_BUILD_DIR}
|
LBRY=${TRAVIS_BUILD_DIR}
|
||||||
fi
|
fi
|
||||||
|
@ -45,7 +48,25 @@ codesign -s "${LBRY_DEVELOPER_ID}" -f "${DEST}/dist/LBRYURIHandler.app/Contents/
|
||||||
codesign --deep -s "${LBRY_DEVELOPER_ID}" -f "${DEST}/dist/LBRYURIHandler.app/Contents/MacOS/LBRYURIHandler"
|
codesign --deep -s "${LBRY_DEVELOPER_ID}" -f "${DEST}/dist/LBRYURIHandler.app/Contents/MacOS/LBRYURIHandler"
|
||||||
codesign -vvvv "${DEST}/dist/LBRYURIHandler.app"
|
codesign -vvvv "${DEST}/dist/LBRYURIHandler.app"
|
||||||
|
|
||||||
pip install certifi pyobjc-core pyobjc-framework-Cocoa pyobjc-framework-CFNetwork
|
pip install certifi
|
||||||
|
MODULES="pyobjc-core pyobjc-framework-Cocoa pyobjc-framework-CFNetwork"
|
||||||
|
if [ ${ON_TRAVIS} = true ]; then
|
||||||
|
WHEEL_DIR="${TRAVIS_BUILD_DIR}/cache/wheel"
|
||||||
|
mkdir -p "${WHEEL_DIR}"
|
||||||
|
# mapping from the package name to the
|
||||||
|
# actual built wheel file is surprisingly
|
||||||
|
# hard so instead of checking for the existance
|
||||||
|
# of each wheel, we mark with a file when they've all been
|
||||||
|
# built and skip when that file exists
|
||||||
|
if [ ! -f "${WHEEL_DIR}"/finished ]; then
|
||||||
|
pip wheel -w "${WHEEL_DIR}" ${MODULES}
|
||||||
|
touch "${WHEEL_DIR}"/finished
|
||||||
|
fi
|
||||||
|
pip install "${WHEEL_DIR}"/*.whl
|
||||||
|
else
|
||||||
|
pip install $MODULES
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
# add lbrycrdd as a resource. Following
|
# add lbrycrdd as a resource. Following
|
||||||
# http://stackoverflow.com/questions/11370012/can-executables-made-with-py2app-include-other-terminal-scripts-and-run-them
|
# http://stackoverflow.com/questions/11370012/can-executables-made-with-py2app-include-other-terminal-scripts-and-run-them
|
||||||
|
|
|
@ -38,8 +38,8 @@ rm get-pip.py
|
||||||
|
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
|
|
||||||
pip install nose coverage coveralls pylint
|
pip install mock pylint
|
||||||
nosetests --with-coverage --cover-package=lbrynet -v -I functional_tests.py tests/
|
trial tests
|
||||||
# TODO: submit coverage report to coveralls
|
# TODO: submit coverage report to coveralls
|
||||||
|
|
||||||
# TODO: as code quality improves, make pylint be more strict
|
# TODO: as code quality improves, make pylint be more strict
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
[Desktop Entry]
|
[Desktop Entry]
|
||||||
Version=0.3.6
|
Version=0.3.10
|
||||||
Name=LBRY
|
Name=LBRY
|
||||||
Comment=The world's first user-owned content marketplace
|
Comment=The world's first user-owned content marketplace
|
||||||
Icon=lbry
|
Icon=lbry
|
||||||
|
|
|
@ -12,7 +12,7 @@ https://github.com/lbryio/lbryum/tarball/master/#egg=lbryum
|
||||||
leveldb==0.193
|
leveldb==0.193
|
||||||
miniupnpc==1.9
|
miniupnpc==1.9
|
||||||
pbkdf2==1.3
|
pbkdf2==1.3
|
||||||
protobuf==3.0.0b2
|
protobuf==3.0.0b3
|
||||||
pycrypto==2.6.1
|
pycrypto==2.6.1
|
||||||
python-bitcoinrpc==0.1
|
python-bitcoinrpc==0.1
|
||||||
qrcode==5.2.2
|
qrcode==5.2.2
|
||||||
|
|
110
scripts/migrate_lbryum_to_lbrycrd.py
Normal file
110
scripts/migrate_lbryum_to_lbrycrd.py
Normal file
|
@ -0,0 +1,110 @@
|
||||||
|
import argparse
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import base58
|
||||||
|
|
||||||
|
from lbryum import SimpleConfig, Network
|
||||||
|
from lbryum.wallet import WalletStorage, Wallet
|
||||||
|
from lbryum.commands import known_commands, Commands
|
||||||
|
from lbryum import lbrycrd
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('--wallet', help='path to lbryum wallet')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
ensureCliIsOnPathAndServerIsRunning()
|
||||||
|
|
||||||
|
wallet = getWallet(args.wallet)
|
||||||
|
addresses = wallet.addresses(True)
|
||||||
|
for addr in addresses[:-1]:
|
||||||
|
printBalance(wallet, addr)
|
||||||
|
saveAddr(wallet, addr)
|
||||||
|
# on the last one, rescan. Don't rescan early for sake of efficiency
|
||||||
|
addr = addresses[-1]
|
||||||
|
printBalance(wallet, addr)
|
||||||
|
saveAddr(wallet, addr, "true")
|
||||||
|
|
||||||
|
|
||||||
|
def ensureCliIsOnPathAndServerIsRunning():
|
||||||
|
try:
|
||||||
|
output = subprocess.check_output(['lbrycrd-cli', 'getinfo'])
|
||||||
|
except OSError:
|
||||||
|
print 'Failed to run: lbrycrd-cli needs to be on the PATH'
|
||||||
|
sys.exit(1)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
print 'Failed to run: could not connect to the lbrycrd server.'
|
||||||
|
print 'Make sure it is running and able to be connected to.'
|
||||||
|
print 'One way to do this is to run:'
|
||||||
|
print ' lbrycrdd -server -printtoconsole'
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def validateAddress(addr):
|
||||||
|
raw_output = subprocess.check_output(
|
||||||
|
['lbrycrd-cli', 'validateaddress', addr])
|
||||||
|
output = json.loads(raw_output)
|
||||||
|
if not output['isvalid']:
|
||||||
|
raise Exception('Address {} is not valid'.format(addr))
|
||||||
|
if not output['ismine']:
|
||||||
|
raise Exception('Address {} is not yours'.format(addr))
|
||||||
|
|
||||||
|
|
||||||
|
def printBalance(wallet, addr):
|
||||||
|
balance = getBalance(wallet, addr)
|
||||||
|
print 'Importing private key for %s with balance %s' % (addr, balance)
|
||||||
|
|
||||||
|
|
||||||
|
def getBalance(wallet, addr):
|
||||||
|
return sum(wallet.get_addr_balance(addr))
|
||||||
|
|
||||||
|
|
||||||
|
def getWallet(path=None):
|
||||||
|
if not path:
|
||||||
|
config = SimpleConfig()
|
||||||
|
path = config.get_wallet_path()
|
||||||
|
storage = WalletStorage(path)
|
||||||
|
if not storage.file_exists:
|
||||||
|
print "Failed to run: No wallet to migrate"
|
||||||
|
sys.exit(1)
|
||||||
|
return Wallet(storage)
|
||||||
|
|
||||||
|
|
||||||
|
def saveAddr(wallet, addr, rescan="false"):
|
||||||
|
keys = wallet.get_private_key(addr, None)
|
||||||
|
assert len(keys) == 1, 'Address {} has {} keys. Expected 1'.format(addr, len(keys))
|
||||||
|
key = keys[0]
|
||||||
|
# copied from lbrycrd.regenerate_key
|
||||||
|
b = lbrycrd.ASecretToSecret(key)
|
||||||
|
pkey = b[0:32]
|
||||||
|
is_compressed = lbrycrd.is_compressed(key)
|
||||||
|
wif = pkeyToWif(pkey, is_compressed)
|
||||||
|
subprocess.check_call(
|
||||||
|
['lbrycrd-cli', 'importprivkey', wif, "", rescan])
|
||||||
|
validateAddress(addr)
|
||||||
|
|
||||||
|
|
||||||
|
def pkeyToWif(pkey, compressed):
|
||||||
|
# Follow https://en.bitcoin.it/wiki/Wallet_import_format
|
||||||
|
# to convert from a private key to the wallet import format
|
||||||
|
prefix = '\x1c'
|
||||||
|
wif = prefix + pkey
|
||||||
|
if compressed:
|
||||||
|
wif += '\x01'
|
||||||
|
intermediate_checksum = hashlib.sha256(wif).digest()
|
||||||
|
checksum = hashlib.sha256(intermediate_checksum).digest()
|
||||||
|
wif = wif + checksum[:4]
|
||||||
|
return base58.b58encode(wif)
|
||||||
|
|
||||||
|
|
||||||
|
def wifToPkey(wif):
|
||||||
|
pkey = base58.b58decode(wif)
|
||||||
|
return pkey[1:-4]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main())
|
3
setup.py
3
setup.py
|
@ -11,8 +11,7 @@ from setuptools import setup, find_packages
|
||||||
base_dir = os.path.abspath(os.path.dirname(__file__))
|
base_dir = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|
||||||
|
|
||||||
console_scripts = ['lbrynet-console = lbrynet.lbrynet_console.LBRYConsole:launch_lbry_console',
|
console_scripts = ['lbrynet-stdin-uploader = lbrynet.lbrynet_console.LBRYStdinUploader:launch_stdin_uploader',
|
||||||
'lbrynet-stdin-uploader = lbrynet.lbrynet_console.LBRYStdinUploader:launch_stdin_uploader',
|
|
||||||
'lbrynet-stdout-downloader = lbrynet.lbrynet_console.LBRYStdoutDownloader:launch_stdout_downloader',
|
'lbrynet-stdout-downloader = lbrynet.lbrynet_console.LBRYStdoutDownloader:launch_stdout_downloader',
|
||||||
'lbrynet-create-network = lbrynet.create_network:main',
|
'lbrynet-create-network = lbrynet.create_network:main',
|
||||||
'lbrynet-launch-node = lbrynet.dht.node:main',
|
'lbrynet-launch-node = lbrynet.dht.node:main',
|
||||||
|
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
0
tests/lbrynet/__init__.py
Normal file
0
tests/lbrynet/__init__.py
Normal file
0
tests/lbrynet/core/__init__.py
Normal file
0
tests/lbrynet/core/__init__.py
Normal file
0
tests/lbrynet/core/server/__init__.py
Normal file
0
tests/lbrynet/core/server/__init__.py
Normal file
127
tests/lbrynet/core/server/test_BlobRequestHandler.py
Normal file
127
tests/lbrynet/core/server/test_BlobRequestHandler.py
Normal file
|
@ -0,0 +1,127 @@
|
||||||
|
import StringIO
|
||||||
|
|
||||||
|
import mock
|
||||||
|
from twisted.internet import defer, protocol
|
||||||
|
from twisted.test import proto_helpers
|
||||||
|
from twisted.trial import unittest
|
||||||
|
|
||||||
|
from lbrynet.core import Peer
|
||||||
|
from lbrynet.core.server import BlobRequestHandler
|
||||||
|
|
||||||
|
|
||||||
|
class TestBlobRequestHandlerQueries(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.blob_manager = mock.Mock()
|
||||||
|
self.payment_rate_manager = mock.Mock()
|
||||||
|
self.handler = BlobRequestHandler.BlobRequestHandler(
|
||||||
|
self.blob_manager, None, self.payment_rate_manager)
|
||||||
|
|
||||||
|
def test_empty_response_when_empty_query(self):
|
||||||
|
self.assertEqual(
|
||||||
|
{}, self.successResultOf(self.handler.handle_queries({})))
|
||||||
|
|
||||||
|
def test_error_set_when_rate_is_missing(self):
|
||||||
|
query = {'requested_blob': 'blob'}
|
||||||
|
deferred = self.handler.handle_queries(query)
|
||||||
|
response = {'incoming_blob': {'error': 'RATE_UNSET'}}
|
||||||
|
self.assertEqual(response, self.successResultOf(deferred))
|
||||||
|
|
||||||
|
def test_error_set_when_rate_too_low(self):
|
||||||
|
self.payment_rate_manager.accept_rate_blob_data.return_value = False
|
||||||
|
query = {
|
||||||
|
'blob_data_payment_rate': 'way_too_low',
|
||||||
|
'requested_blob': 'blob'
|
||||||
|
}
|
||||||
|
deferred = self.handler.handle_queries(query)
|
||||||
|
response = {
|
||||||
|
'blob_data_payment_rate': 'RATE_TOO_LOW',
|
||||||
|
'incoming_blob': {'error': 'RATE_UNSET'}
|
||||||
|
}
|
||||||
|
self.assertEqual(response, self.successResultOf(deferred))
|
||||||
|
|
||||||
|
def test_response_when_rate_too_low(self):
|
||||||
|
self.payment_rate_manager.accept_rate_blob_data.return_value = False
|
||||||
|
query = {
|
||||||
|
'blob_data_payment_rate': 'way_too_low',
|
||||||
|
}
|
||||||
|
deferred = self.handler.handle_queries(query)
|
||||||
|
response = {
|
||||||
|
'blob_data_payment_rate': 'RATE_TOO_LOW',
|
||||||
|
}
|
||||||
|
self.assertEqual(response, self.successResultOf(deferred))
|
||||||
|
|
||||||
|
def test_blob_unavailable_when_blob_not_validated(self):
|
||||||
|
self.payment_rate_manager.accept_rate_blob_data.return_value = True
|
||||||
|
blob = mock.Mock()
|
||||||
|
blob.is_validated.return_value = False
|
||||||
|
self.blob_manager.get_blob.return_value = defer.succeed(blob)
|
||||||
|
query = {
|
||||||
|
'blob_data_payment_rate': 'rate',
|
||||||
|
'requested_blob': 'blob'
|
||||||
|
}
|
||||||
|
deferred = self.handler.handle_queries(query)
|
||||||
|
response = {
|
||||||
|
'blob_data_payment_rate': 'RATE_ACCEPTED',
|
||||||
|
'incoming_blob': {'error': 'BLOB_UNAVAILABLE'}
|
||||||
|
}
|
||||||
|
self.assertEqual(response, self.successResultOf(deferred))
|
||||||
|
|
||||||
|
def test_blob_unavailable_when_blob_cannot_be_opened(self):
|
||||||
|
self.payment_rate_manager.accept_rate_blob_data.return_value = True
|
||||||
|
blob = mock.Mock()
|
||||||
|
blob.is_validated.return_value = True
|
||||||
|
blob.open_for_reading.return_value = None
|
||||||
|
self.blob_manager.get_blob.return_value = defer.succeed(blob)
|
||||||
|
query = {
|
||||||
|
'blob_data_payment_rate': 'rate',
|
||||||
|
'requested_blob': 'blob'
|
||||||
|
}
|
||||||
|
deferred = self.handler.handle_queries(query)
|
||||||
|
response = {
|
||||||
|
'blob_data_payment_rate': 'RATE_ACCEPTED',
|
||||||
|
'incoming_blob': {'error': 'BLOB_UNAVAILABLE'}
|
||||||
|
}
|
||||||
|
self.assertEqual(response, self.successResultOf(deferred))
|
||||||
|
|
||||||
|
def test_blob_details_are_set_when_all_conditions_are_met(self):
|
||||||
|
self.payment_rate_manager.accept_rate_blob_data.return_value = True
|
||||||
|
blob = mock.Mock()
|
||||||
|
blob.is_validated.return_value = True
|
||||||
|
blob.open_for_reading.return_value = True
|
||||||
|
blob.blob_hash = 'DEADBEEF'
|
||||||
|
blob.length = 42
|
||||||
|
self.blob_manager.get_blob.return_value = defer.succeed(blob)
|
||||||
|
query = {
|
||||||
|
'blob_data_payment_rate': 'rate',
|
||||||
|
'requested_blob': 'blob'
|
||||||
|
}
|
||||||
|
deferred = self.handler.handle_queries(query)
|
||||||
|
response = {
|
||||||
|
'blob_data_payment_rate': 'RATE_ACCEPTED',
|
||||||
|
'incoming_blob': {
|
||||||
|
'blob_hash': 'DEADBEEF',
|
||||||
|
'length': 42
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.assertEqual(response, self.successResultOf(deferred))
|
||||||
|
|
||||||
|
|
||||||
|
class TestBlobRequestHandlerSender(unittest.TestCase):
|
||||||
|
def test_nothing_happens_if_not_currently_uploading(self):
|
||||||
|
handler = BlobRequestHandler.BlobRequestHandler(None, None, None)
|
||||||
|
handler.currently_uploading = None
|
||||||
|
deferred = handler.send_blob_if_requested(None)
|
||||||
|
self.assertEqual(True, self.successResultOf(deferred))
|
||||||
|
|
||||||
|
def test_file_is_sent_to_consumer(self):
|
||||||
|
# TODO: also check that the expected payment values are set
|
||||||
|
consumer = proto_helpers.StringTransport()
|
||||||
|
test_file = StringIO.StringIO('test')
|
||||||
|
handler = BlobRequestHandler.BlobRequestHandler(None, None, None)
|
||||||
|
handler.peer = mock.create_autospec(Peer.Peer)
|
||||||
|
handler.currently_uploading = mock.Mock()
|
||||||
|
handler.read_handle = test_file
|
||||||
|
handler.send_blob_if_requested(consumer)
|
||||||
|
while consumer.producer:
|
||||||
|
consumer.producer.resumeProducing()
|
||||||
|
self.assertEqual(consumer.value(), 'test')
|
Loading…
Reference in a new issue