Merge remote-tracking branch 'origin/master' into electron

This commit is contained in:
jobevers 2017-02-21 12:07:36 -06:00
commit 70cf179139
11 changed files with 269 additions and 33 deletions

View file

@ -8,23 +8,26 @@ can and probably will change functionality and break backwards compatability
at anytime.
## [Unreleased]
\#\# [0.8.6] - 2017-02-19
\#\# [0.8.6rc0] - 2017-02-19
## [0.8.7] - 2017-02-21
## [0.8.6] - 2017-02-19
## [0.8.6rc0] - 2017-02-19
### Changed
* Add `file_get` by stream hash
* Add utils.call_later to replace reactor.callLater
###
### Fixed
* Fix unhandled error in `get`
* Fix sd blob timeout handling in `get_availability`, return 0.0
## [0.8.5] - 2017-02-18
## [0.8.5rc0] - 2017-02-18
### Fixed
* Fix result expected by ui from file_get for missing files
## [0.8.4] - 2017-02-17
## [0.8.4rc0] - 2017-02-17
@ -37,19 +40,18 @@ at anytime.
### Fixed
* add misssing traceback to logging
## [0.8.3] - 2017-02-15
### Fixed
* Get lbry files with pending claims
* Add better logging to help track down [#478](https://github.com/lbryio/lbry/issues/478)
* Catch UnknownNameErrors when resolving a name. [#479](https://github.com/lbryio/lbry/issues/479)
### Changed
* Add blob_get, descriptor_get, and blob_delete
* Add filter keyword args to blob_list
* Refactor get_availability
* Add optional peer search timeout, add peer_search_timeout setting
## [0.8.3rc3] - 2017-02-14
## [0.8.3rc2] - 2017-02-13

View file

@ -242,6 +242,7 @@ class BlobFile(HashBlob):
self.readers += 1
return file_handle
except IOError:
log.exception('Failed to open %s', self.file_path)
self.close_read_handle(file_handle)
return None

View file

@ -64,6 +64,8 @@ class StreamCreator(object):
def _finished(self):
pass
# TODO: move the stream creation process to its own thread and
# remove the reactor from this process.
def write(self, data):
from twisted.internet import reactor
self._write(data)

View file

@ -77,6 +77,12 @@ class EncryptedFileStreamCreator(CryptStreamCreator):
return d
# TODO: this should be run its own thread. Encrypting a large file can
# be very cpu intensive and there is no need to run that on the
# main reactor thread. The FileSender mechanism that is used is
# great when sending over the network, but this is all local so
# we can simply read the file from the disk without needing to
# involve reactor.
def create_lbry_file(session, lbry_file_manager, file_name, file_handle, key=None,
iv_generator=None, suggested_file_name=None):
"""Turn a plain file into an LBRY File.
@ -146,6 +152,10 @@ def create_lbry_file(session, lbry_file_manager, file_name, file_handle, key=Non
suggested_file_name)
def start_stream():
# TODO: Using FileSender isn't necessary, we can just read
# straight from the disk. The stream creation process
# should be in its own thread anyway so we don't need to
# worry about interacting with the twisted reactor
file_sender = FileSender()
d = file_sender.beginFileTransfer(file_handle, lbry_file_creator)
d.addCallback(lambda _: stop_file(lbry_file_creator))

View file

@ -41,6 +41,7 @@ class EncryptedFileManager(object):
def __init__(self, session, stream_info_manager, sd_identifier, download_directory=None):
self.session = session
self.stream_info_manager = stream_info_manager
# TODO: why is sd_identifier part of the file manager?
self.sd_identifier = sd_identifier
self.lbry_files = []
self.sql_db = None

View file

@ -1106,7 +1106,7 @@ class Daemon(AuthJSONRPCServer):
############################################################################
@defer.inlineCallbacks
def jsonrpc_status(self, session_status=False, blockchain_status=False):
def jsonrpc_status(self, session_status=False):
"""
Return daemon status
@ -1117,6 +1117,10 @@ class Daemon(AuthJSONRPCServer):
daemon status
"""
has_wallet = self.session and self.session.wallet
local_height = self.session.wallet.network.get_local_height() if has_wallet else 0
remote_height = self.session.wallet.network.get_server_height() if has_wallet else 0
best_hash = (yield self.session.wallet.get_best_blockhash()) if has_wallet else None
response = {
'lbry_id': base58.b58encode(self.lbryid)[:SHORT_ID_LEN],
'installation_id': conf.settings.get_installation_id()[:SHORT_ID_LEN],
@ -1134,11 +1138,12 @@ class Daemon(AuthJSONRPCServer):
else ''
),
},
'blocks_behind': (
self.session.wallet.blocks_behind
if has_wallet and self.wallet_type == LBRYUM_WALLET
else 'unknown'
),
'blocks_behind': remote_height - local_height, # deprecated. remove from UI, then here
'blockchain_status': {
'blocks': local_height,
'blocks_behind': remote_height - local_height,
'best_blockhash': best_hash,
}
}
if session_status:
blobs = yield self.session.blob_manager.get_all_verified_blobs()
@ -1146,22 +1151,14 @@ class Daemon(AuthJSONRPCServer):
'managed_blobs': len(blobs),
'managed_streams': len(self.lbry_file_manager.lbry_files),
}
if blockchain_status and has_wallet:
# calculate blocks_behind more accurately
local_height = self.session.wallet.network.get_local_height()
remote_height = self.session.wallet.network.get_server_height()
response['blocks_behind'] = remote_height - local_height
response['local_height'] = local_height
response['remote_height'] = remote_height
best_hash = yield self.session.wallet.get_best_blockhash()
response['blockchain_status'] = {'best_blockhash': best_hash}
defer.returnValue(response)
def jsonrpc_get_best_blockhash(self):
"""
DEPRECATED. Use `status blockchain_status=True` instead
"""
d = self.jsonrpc_status(blockchain_status=True)
d = self.jsonrpc_status()
d.addCallback(lambda x: self._render_response(
x['blockchain_status']['best_blockhash']))
return d
@ -1190,9 +1187,11 @@ class Daemon(AuthJSONRPCServer):
elif status['startup_status']['code'] == LOADING_WALLET_CODE:
message = "Catching up with the blockchain."
progress = 0
if status['blocks_behind'] > 0:
message += ' ' + str(status['blocks_behind']) + " blocks behind."
progress = status['blocks_behind']
if status['blockchain_status']['blocks_behind'] > 0:
message += (
' ' + str(status['blockchain_status']['blocks_behind']) + " blocks behind."
)
progress = status['blockchain_status']['blocks_behind']
return {
'message': message,
@ -1211,7 +1210,7 @@ class Daemon(AuthJSONRPCServer):
"""
DEPRECATED. Use `status` instead
"""
d = self.jsonrpc_status(blockchain_status=True)
d = self.jsonrpc_status()
d.addCallback(lambda x: self._render_response(x['is_first_run']))
return d
@ -1232,8 +1231,8 @@ class Daemon(AuthJSONRPCServer):
"""
DEPRECATED. Use `status` instead
"""
d = self.jsonrpc_status(blockchain_status=True)
d.addCallback(lambda x: self._render_response(x['blocks_behind']))
d = self.jsonrpc_status()
d.addCallback(lambda x: self._render_response(x['blockchain_status']['blocks_behind']))
return d
def jsonrpc_version(self):

View file

@ -35,9 +35,9 @@ def main():
if message:
if (
status['startup_status']['code'] == LOADING_WALLET_CODE
and status['blocks_behind'] > 0
and status['blockchain_status']['blocks_behind'] > 0
):
message += '. Blocks left: ' + str(status['blocks_behind'])
message += '. Blocks left: ' + str(status['blockchain_status']['blocks_behind'])
print " Status: " + message
return 1

57
scripts/decrypt_blob.py Normal file
View file

@ -0,0 +1,57 @@
"""Decrypt a single blob"""
import argparse
import binascii
import logging
import os
import sys
from twisted.internet import defer
from twisted.internet import reactor
from lbrynet import conf
from lbrynet.cryptstream import CryptBlob
from lbrynet.core import HashBlob
from lbrynet.core import log_support
log = logging.getLogger('decrypt_blob')
def main():
conf.initialize_settings()
parser = argparse.ArgumentParser()
parser.add_argument('blob_file')
parser.add_argument('hex_key')
parser.add_argument('hex_iv')
parser.add_argument('output')
args = parser.parse_args()
log_support.configure_console()
d = run(args)
reactor.run()
@defer.inlineCallbacks
def run(args):
try:
yield decrypt_blob(args.blob_file, args.hex_key, args.hex_iv, args.output)
except Exception:
log.exception('Failed to decrypt blob')
finally:
reactor.callLater(0, reactor.stop)
@defer.inlineCallbacks
def decrypt_blob(blob_file, key, iv, output):
filename = os.path.abspath(blob_file)
length = os.path.getsize(filename)
directory, blob_hash = os.path.split(filename)
blob = HashBlob.BlobFile(directory, blob_hash, True, length)
decryptor = CryptBlob.StreamBlobDecryptor(
blob, binascii.unhexlify(key), binascii.unhexlify(iv), length)
with open(output, 'w') as f:
yield decryptor.decrypt(f.write)
if __name__ == '__main__':
sys.exit(main())

71
scripts/encrypt_blob.py Normal file
View file

@ -0,0 +1,71 @@
"""Encrypt a single file using the given key and iv"""
import argparse
import binascii
import logging
import os
import StringIO
import sys
from twisted.internet import defer
from twisted.internet import reactor
from lbrynet import conf
from lbrynet.cryptstream import CryptBlob
from lbrynet.core import HashBlob
from lbrynet.core import log_support
from lbrynet.core import cryptoutils
log = logging.getLogger('decrypt_blob')
def main():
conf.initialize_settings()
parser = argparse.ArgumentParser()
parser.add_argument('filename')
parser.add_argument('hex_key')
parser.add_argument('hex_iv')
args = parser.parse_args()
log_support.configure_console(level='DEBUG')
d = run(args)
reactor.run()
@defer.inlineCallbacks
def run(args):
try:
yield encrypt_blob(args.filename, args.hex_key, args.hex_iv)
except Exception:
log.exception('Failed to encrypt blob')
finally:
reactor.callLater(0, reactor.stop)
def encrypt_blob(filename, key, iv):
blob = Blob()
blob_maker = CryptBlob.CryptStreamBlobMaker(
binascii.unhexlify(key), binascii.unhexlify(iv), 0, blob)
with open(filename) as fin:
blob_maker.write(fin.read())
blob_maker.close()
class Blob(object):
def __init__(self):
self.data = StringIO.StringIO()
def write(self, data):
self.data.write(data)
def close(self):
hashsum = cryptoutils.get_lbry_hash_obj()
buffer = self.data.getvalue()
hashsum.update(buffer)
with open(hashsum.hexdigest(), 'w') as fout:
fout.write(buffer)
return defer.succeed(True)
if __name__ == '__main__':
sys.exit(main())

View file

@ -22,7 +22,6 @@ from lbrynet import conf
from lbrynet.core import Error
from lbrynet.core import Wallet
from lbrynet.core import BlobAvailability
from lbrynet.core import BlobManager
from lbrynet.core import HashAnnouncer
from lbrynet.core import PeerManager
from lbrynet.core import Session

94
scripts/reseed_file.py Normal file
View file

@ -0,0 +1,94 @@
"""Reseed a file.
Given a file and a matching sd_blob,
re-chunk and encrypt the file, adding
the new blobs to the manager.
"""
import argparse
import binascii
import logging
import json
import os
import sys
from twisted.internet import defer
from twisted.internet import reactor
from twisted.protocols import basic
from lbrynet import conf
from lbrynet.core import BlobManager
from lbrynet.core import HashAnnouncer
from lbrynet.core import log_support
from lbrynet.cryptstream import CryptStreamCreator
log = logging.getLogger('reseed_file')
def main():
conf.initialize_settings()
parser = argparse.ArgumentParser()
parser.add_argument('input_file')
parser.add_argument('sd_blob', help='a json file containing a key and the IVs')
args = parser.parse_args()
log_support.configure_console()
run(args)
reactor.run()
@defer.inlineCallbacks
def run(args):
try:
yield reseed_file(args.input_file, args.sd_blob)
except Exception as e:
log.exception('Failed to reseed')
finally:
reactor.stop()
@defer.inlineCallbacks
def reseed_file(input_file, sd_blob):
sd_blob = SdBlob.new_instance(sd_blob)
db_dir = conf.settings['data_dir']
blobfile_dir = os.path.join(db_dir, "blobfiles")
announcer = HashAnnouncer.DummyHashAnnouncer()
blob_manager = BlobManager.DiskBlobManager(announcer, blobfile_dir, db_dir)
yield blob_manager.setup()
creator = CryptStreamCreator.CryptStreamCreator(
blob_manager, None, sd_blob.key(), sd_blob.iv_generator())
file_sender = basic.FileSender()
with open(input_file) as f:
yield file_sender.beginFileTransfer(f, creator)
yield creator.stop()
for blob_info in sd_blob.blob_infos():
if 'blob_hash' not in blob_info:
# the last blob is always empty and without a hash
continue
blob = yield blob_manager.get_blob(blob_info['blob_hash'], True)
if not blob.verified:
print "Blob {} is not verified".format(blob)
class SdBlob(object):
def __init__(self, contents):
self.contents = contents
def key(self):
return binascii.unhexlify(self.contents['key'])
def iv_generator(self):
for blob_info in self.blob_infos():
yield binascii.unhexlify(blob_info['iv'])
def blob_infos(self):
return self.contents['blobs']
@classmethod
def new_instance(cls, filename):
with open(filename) as f:
return cls(json.load(f))
if __name__ == '__main__':
sys.exit(main())