remove extraneous ellipses in messages
This commit is contained in:
parent
962edb8e4c
commit
628defa891
10 changed files with 22 additions and 21 deletions
|
@ -859,7 +859,7 @@ class LBRYumWallet(Wallet):
|
||||||
def setup_network():
|
def setup_network():
|
||||||
self.config = make_config(self._config)
|
self.config = make_config(self._config)
|
||||||
self.network = Network(self.config)
|
self.network = Network(self.config)
|
||||||
alert.info("Loading the wallet...")
|
alert.info("Loading the wallet")
|
||||||
return defer.succeed(self.network.start())
|
return defer.succeed(self.network.start())
|
||||||
|
|
||||||
d = setup_network()
|
d = setup_network()
|
||||||
|
@ -867,7 +867,7 @@ class LBRYumWallet(Wallet):
|
||||||
def check_started():
|
def check_started():
|
||||||
if self.network.is_connecting():
|
if self.network.is_connecting():
|
||||||
if not self.printed_retrieving_headers and self.network.blockchain.retrieving_headers:
|
if not self.printed_retrieving_headers and self.network.blockchain.retrieving_headers:
|
||||||
alert.info("Running the wallet for the first time...this may take a moment.")
|
alert.info("Running the wallet for the first time. This may take a moment.")
|
||||||
self.printed_retrieving_headers = True
|
self.printed_retrieving_headers = True
|
||||||
return False
|
return False
|
||||||
self._start_check.stop()
|
self._start_check.stop()
|
||||||
|
|
|
@ -14,7 +14,7 @@ amount = 0
|
||||||
|
|
||||||
|
|
||||||
def destroyNetwork(nodes):
|
def destroyNetwork(nodes):
|
||||||
print 'Destroying Kademlia network...'
|
print 'Destroying Kademlia network'
|
||||||
i = 0
|
i = 0
|
||||||
for node in nodes:
|
for node in nodes:
|
||||||
i += 1
|
i += 1
|
||||||
|
@ -50,12 +50,12 @@ def main():
|
||||||
else:
|
else:
|
||||||
import socket
|
import socket
|
||||||
ipAddress = socket.gethostbyname(socket.gethostname())
|
ipAddress = socket.gethostbyname(socket.gethostname())
|
||||||
print 'Network interface IP address omitted; using %s...' % ipAddress
|
print 'Network interface IP address omitted; using %s' % ipAddress
|
||||||
|
|
||||||
startPort = 4000
|
startPort = 4000
|
||||||
port = startPort+1
|
port = startPort+1
|
||||||
nodes = []
|
nodes = []
|
||||||
print 'Creating Kademlia network...'
|
print 'Creating Kademlia network'
|
||||||
try:
|
try:
|
||||||
node = os.spawnlp(
|
node = os.spawnlp(
|
||||||
os.P_NOWAIT, 'lbrynet-launch-node', 'lbrynet-launch-node', str(startPort))
|
os.P_NOWAIT, 'lbrynet-launch-node', 'lbrynet-launch-node', str(startPort))
|
||||||
|
|
|
@ -16,10 +16,10 @@ def print_usage():
|
||||||
def join_network(udp_port, known_nodes):
|
def join_network(udp_port, known_nodes):
|
||||||
lbryid = generate_id()
|
lbryid = generate_id()
|
||||||
|
|
||||||
log.info('Creating Node...')
|
log.info('Creating Node')
|
||||||
node = Node(udpPort=udp_port, lbryid=lbryid)
|
node = Node(udpPort=udp_port, lbryid=lbryid)
|
||||||
|
|
||||||
log.info('Joining network...')
|
log.info('Joining network')
|
||||||
d = node.joinNetwork(known_nodes)
|
d = node.joinNetwork(known_nodes)
|
||||||
|
|
||||||
def log_network_size():
|
def log_network_size():
|
||||||
|
|
|
@ -59,7 +59,7 @@ def storeValueCallback(*args, **kwargs):
|
||||||
""" Callback function that is invoked when the storeValue() operation succeeds """
|
""" Callback function that is invoked when the storeValue() operation succeeds """
|
||||||
print 'Value has been stored in the DHT'
|
print 'Value has been stored in the DHT'
|
||||||
# Now that the value has been stored, schedule that the value is read again after 2.5 seconds
|
# Now that the value has been stored, schedule that the value is read again after 2.5 seconds
|
||||||
print 'Scheduling retrieval in 2.5 seconds...'
|
print 'Scheduling retrieval in 2.5 seconds'
|
||||||
twisted.internet.reactor.callLater(2.5, getValue)
|
twisted.internet.reactor.callLater(2.5, getValue)
|
||||||
|
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ def getValue():
|
||||||
""" Retrieves the value of the specified key (KEY) from the DHT """
|
""" Retrieves the value of the specified key (KEY) from the DHT """
|
||||||
global node, KEY
|
global node, KEY
|
||||||
# Get the value for the specified key (immediately returns a Twisted deferred result)
|
# Get the value for the specified key (immediately returns a Twisted deferred result)
|
||||||
print ('\nRetrieving value from DHT for key "%s"...' %
|
print ('\nRetrieving value from DHT for key "%s"' %
|
||||||
binascii.unhexlify("f7d9dc4de674eaa2c5a022eb95bc0d33ec2e75c6"))
|
binascii.unhexlify("f7d9dc4de674eaa2c5a022eb95bc0d33ec2e75c6"))
|
||||||
deferredResult = node.iterativeFindValue(
|
deferredResult = node.iterativeFindValue(
|
||||||
binascii.unhexlify("f7d9dc4de674eaa2c5a022eb95bc0d33ec2e75c6"))
|
binascii.unhexlify("f7d9dc4de674eaa2c5a022eb95bc0d33ec2e75c6"))
|
||||||
|
@ -91,13 +91,13 @@ def getValueCallback(result):
|
||||||
print result
|
print result
|
||||||
|
|
||||||
# Either way, schedule a "delete" operation for the key
|
# Either way, schedule a "delete" operation for the key
|
||||||
print 'Scheduling shutdown in 2.5 seconds...'
|
print 'Scheduling shutdown in 2.5 seconds'
|
||||||
twisted.internet.reactor.callLater(2.5, stop)
|
twisted.internet.reactor.callLater(2.5, stop)
|
||||||
|
|
||||||
|
|
||||||
def stop():
|
def stop():
|
||||||
""" Stops the Twisted reactor, and thus the script """
|
""" Stops the Twisted reactor, and thus the script """
|
||||||
print '\nStopping Kademlia node and terminating script...'
|
print '\nStopping Kademlia node and terminating script'
|
||||||
twisted.internet.reactor.stop()
|
twisted.internet.reactor.stop()
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -145,7 +145,7 @@ if __name__ == '__main__':
|
||||||
#
|
#
|
||||||
# If you wish to have a pure Kademlia network, use the
|
# If you wish to have a pure Kademlia network, use the
|
||||||
# entangled.kademlia.node.Node class instead
|
# entangled.kademlia.node.Node class instead
|
||||||
print 'Creating Node...'
|
print 'Creating Node'
|
||||||
node = Node(udpPort=int(sys.argv[1]), lbryid=lbryid)
|
node = Node(udpPort=int(sys.argv[1]), lbryid=lbryid)
|
||||||
|
|
||||||
# Schedule the node to join the Kademlia/Entangled DHT
|
# Schedule the node to join the Kademlia/Entangled DHT
|
||||||
|
|
|
@ -75,7 +75,7 @@ DOWNLOAD_TIMEOUT_CODE = 'timeout'
|
||||||
DOWNLOAD_RUNNING_CODE = 'running'
|
DOWNLOAD_RUNNING_CODE = 'running'
|
||||||
DOWNLOAD_STOPPED_CODE = 'stopped'
|
DOWNLOAD_STOPPED_CODE = 'stopped'
|
||||||
STREAM_STAGES = [
|
STREAM_STAGES = [
|
||||||
(INITIALIZING_CODE, 'Initializing...'),
|
(INITIALIZING_CODE, 'Initializing'),
|
||||||
(DOWNLOAD_METADATA_CODE, 'Downloading metadata'),
|
(DOWNLOAD_METADATA_CODE, 'Downloading metadata'),
|
||||||
(DOWNLOAD_RUNNING_CODE, 'Started %s, got %s/%s blobs, stream status: %s'),
|
(DOWNLOAD_RUNNING_CODE, 'Started %s, got %s/%s blobs, stream status: %s'),
|
||||||
(DOWNLOAD_STOPPED_CODE, 'Paused stream'),
|
(DOWNLOAD_STOPPED_CODE, 'Paused stream'),
|
||||||
|
@ -637,7 +637,7 @@ class Daemon(AuthJSONRPCServer):
|
||||||
def _setup_data_directory(self):
|
def _setup_data_directory(self):
|
||||||
old_revision = 1
|
old_revision = 1
|
||||||
self.startup_status = STARTUP_STAGES[1]
|
self.startup_status = STARTUP_STAGES[1]
|
||||||
log.info("Loading databases...")
|
log.info("Loading databases")
|
||||||
if self.created_data_dir:
|
if self.created_data_dir:
|
||||||
self._write_db_revision_file(self.current_db_revision)
|
self._write_db_revision_file(self.current_db_revision)
|
||||||
log.debug("Created the db revision file: %s", self.db_revision_file)
|
log.debug("Created the db revision file: %s", self.db_revision_file)
|
||||||
|
@ -662,7 +662,7 @@ class Daemon(AuthJSONRPCServer):
|
||||||
|
|
||||||
if old_revision < self.current_db_revision:
|
if old_revision < self.current_db_revision:
|
||||||
from lbrynet.db_migrator import dbmigrator
|
from lbrynet.db_migrator import dbmigrator
|
||||||
log.info("Upgrading your databases...")
|
log.info("Upgrading your databases")
|
||||||
d = threads.deferToThread(
|
d = threads.deferToThread(
|
||||||
dbmigrator.migrate_db, self.db_dir, old_revision, self.current_db_revision)
|
dbmigrator.migrate_db, self.db_dir, old_revision, self.current_db_revision)
|
||||||
d.addCallback(lambda _: update_version_file_and_print_success())
|
d.addCallback(lambda _: update_version_file_and_print_success())
|
||||||
|
@ -1315,7 +1315,8 @@ class Daemon(AuthJSONRPCServer):
|
||||||
elif 'function' in p:
|
elif 'function' in p:
|
||||||
fn = self.callable_methods.get(p['function'])
|
fn = self.callable_methods.get(p['function'])
|
||||||
if fn is None:
|
if fn is None:
|
||||||
return self._render_response("Function not found", OK_CODE)
|
return self._render_response(
|
||||||
|
"Function '" + p['function'] + "' is not a valid function", OK_CODE)
|
||||||
return self._render_response(textwrap.dedent(fn.__doc__), OK_CODE)
|
return self._render_response(textwrap.dedent(fn.__doc__), OK_CODE)
|
||||||
else:
|
else:
|
||||||
return self._render_response(textwrap.dedent(self.jsonrpc_help.__doc__), OK_CODE)
|
return self._render_response(textwrap.dedent(self.jsonrpc_help.__doc__), OK_CODE)
|
||||||
|
|
|
@ -18,7 +18,7 @@ DOWNLOAD_RUNNING_CODE = 'running'
|
||||||
# TODO: is this ever used?
|
# TODO: is this ever used?
|
||||||
DOWNLOAD_STOPPED_CODE = 'stopped'
|
DOWNLOAD_STOPPED_CODE = 'stopped'
|
||||||
STREAM_STAGES = [
|
STREAM_STAGES = [
|
||||||
(INITIALIZING_CODE, 'Initializing...'),
|
(INITIALIZING_CODE, 'Initializing'),
|
||||||
(DOWNLOAD_METADATA_CODE, 'Downloading metadata'),
|
(DOWNLOAD_METADATA_CODE, 'Downloading metadata'),
|
||||||
(DOWNLOAD_RUNNING_CODE, 'Started stream'),
|
(DOWNLOAD_RUNNING_CODE, 'Started stream'),
|
||||||
(DOWNLOAD_STOPPED_CODE, 'Paused stream'),
|
(DOWNLOAD_STOPPED_CODE, 'Paused stream'),
|
||||||
|
|
|
@ -34,7 +34,7 @@ class LBRYURIHandler(object):
|
||||||
except:
|
except:
|
||||||
cmd = r'DIR = "$( cd "$(dirname "${BASH_SOURCE[0]}" )" && pwd )"' \
|
cmd = r'DIR = "$( cd "$(dirname "${BASH_SOURCE[0]}" )" && pwd )"' \
|
||||||
r'if [-z "$(pgrep lbrynet-daemon)"]; then' \
|
r'if [-z "$(pgrep lbrynet-daemon)"]; then' \
|
||||||
r'echo "running lbrynet-daemon..."' \
|
r'echo "running lbrynet-daemon"' \
|
||||||
r'$DIR / lbrynet - daemon &' \
|
r'$DIR / lbrynet - daemon &' \
|
||||||
r'sleep 3 # let the daemon load before connecting' \
|
r'sleep 3 # let the daemon load before connecting' \
|
||||||
r'fi'
|
r'fi'
|
||||||
|
|
|
@ -27,7 +27,7 @@ DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||||
|
|
||||||
|
|
||||||
if [ -z "$(pgrep lbrynet-daemon)" ]; then
|
if [ -z "$(pgrep lbrynet-daemon)" ]; then
|
||||||
echo "running lbrynet-daemon..."
|
echo "running lbrynet-daemon"
|
||||||
$DIR/lbrynet-daemon --no-launch &
|
$DIR/lbrynet-daemon --no-launch &
|
||||||
sleep 3 # let the daemon load before connecting
|
sleep 3 # let the daemon load before connecting
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -27,7 +27,7 @@ class LBRYURIHandler(object):
|
||||||
if not self.started_daemon:
|
if not self.started_daemon:
|
||||||
cmd = r'DIR = "$( cd "$(dirname "${BASH_SOURCE[0]}" )" && pwd )"' \
|
cmd = r'DIR = "$( cd "$(dirname "${BASH_SOURCE[0]}" )" && pwd )"' \
|
||||||
r'if [-z "$(pgrep lbrynet-daemon)"]; then' \
|
r'if [-z "$(pgrep lbrynet-daemon)"]; then' \
|
||||||
r'echo "running lbrynet-daemon..."' \
|
r'echo "running lbrynet-daemon"' \
|
||||||
r'$DIR / lbrynet - daemon &' \
|
r'$DIR / lbrynet - daemon &' \
|
||||||
r'sleep 3 # let the daemon load before connecting' \
|
r'sleep 3 # let the daemon load before connecting' \
|
||||||
r'fi'
|
r'fi'
|
||||||
|
|
|
@ -944,7 +944,7 @@ class TestTransfer(TestCase):
|
||||||
self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be")
|
self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be")
|
||||||
|
|
||||||
def delete_lbry_file():
|
def delete_lbry_file():
|
||||||
logging.debug("deleting the file...")
|
logging.debug("deleting the file")
|
||||||
d = self.lbry_file_manager.delete_lbry_file(downloaders[0])
|
d = self.lbry_file_manager.delete_lbry_file(downloaders[0])
|
||||||
d.addCallback(lambda _: self.lbry_file_manager.get_count_for_stream_hash(downloaders[0].stream_hash))
|
d.addCallback(lambda _: self.lbry_file_manager.get_count_for_stream_hash(downloaders[0].stream_hash))
|
||||||
d.addCallback(
|
d.addCallback(
|
||||||
|
|
Loading…
Add table
Reference in a new issue