Merge branch 'remove_claim_info_from_file'

This commit is contained in:
Jack Robison 2017-11-28 12:58:10 -05:00
commit 188822ef20
No known key found for this signature in database
GPG key ID: 284699E7404E3CFF
5 changed files with 44 additions and 124 deletions

View file

@ -15,7 +15,6 @@ at anytime.
### Fixed
* Fixed unnecessarily verbose exchange rate error (https://github.com/lbryio/lbry/issues/984)
* Merged two sepereate dht test folders into one
*
### Deprecated
* `channel_list_mine`, replaced with `channel_list`
@ -25,13 +24,14 @@ at anytime.
* Check claim schema in `publish` before trying to make the claim, return better error messages
* Renamed `channel_list_mine` to `channel_list`
* Changed `channel_list` to include channels where the certificate info has been imported but the claim is not in the wallet
* Changed `file_list`, `file_delete`, `file_set_status`, and `file_reflect` to no longer return claim related information.
### Added
* Added `channel_import` and `channel_export` commands
* Added `is_mine` field to `channel_list` results
### Removed
*
* Removed claim related filter arguments `name`, `claim_id`, and `outpoint` from `file_list`, `file_delete`, `file_set_status`, and `file_reflect`
*
@ -57,7 +57,6 @@ at anytime.
* Removed some alternate methods of reading from blob files
* Removed `@AuthJSONRPCServer.queued` decorator
## [0.17.1] - 2017-10-25
### Fixed
* Fixed slow startup for nodes with many lbry files

View file

@ -112,12 +112,9 @@ class Checker(object):
class _FileID(IterableContainer):
"""The different ways a file can be identified"""
NAME = 'name'
SD_HASH = 'sd_hash'
FILE_NAME = 'file_name'
STREAM_HASH = 'stream_hash'
CLAIM_ID = "claim_id"
OUTPOINT = "outpoint"
ROWID = "rowid"
@ -394,7 +391,7 @@ class Daemon(AuthJSONRPCServer):
def _stop_streams(self):
"""stop pending GetStream downloads"""
for claim_id, stream in self.streams.iteritems():
for sd_hash, stream in self.streams.iteritems():
stream.cancel(reason="daemon shutdown")
def _shutdown(self):
@ -642,7 +639,7 @@ class Daemon(AuthJSONRPCServer):
defer.returnValue(report)
@defer.inlineCallbacks
def _download_name(self, name, claim_dict, claim_id, timeout=None, file_name=None):
def _download_name(self, name, claim_dict, sd_hash, timeout=None, file_name=None):
"""
Add a lbry file to the file manager, start the download, and return the new lbry file.
If it already exists in the file manager, return the existing lbry file
@ -652,28 +649,27 @@ class Daemon(AuthJSONRPCServer):
def _download_finished(download_id, name, claim_dict):
report = yield self._get_stream_analytics_report(claim_dict)
self.analytics_manager.send_download_finished(download_id, name, report, claim_dict)
@defer.inlineCallbacks
def _download_failed(error, download_id, name, claim_dict):
report = yield self._get_stream_analytics_report(claim_dict)
self.analytics_manager.send_download_errored(error, download_id, name, claim_dict,
report)
if claim_id in self.streams:
downloader = self.streams[claim_id]
if sd_hash in self.streams:
downloader = self.streams[sd_hash]
result = yield downloader.finished_deferred
defer.returnValue(result)
else:
download_id = utils.random_string()
self.analytics_manager.send_download_started(download_id, name, claim_dict)
self.streams[claim_id] = GetStream(self.sd_identifier, self.session,
self.streams[sd_hash] = GetStream(self.sd_identifier, self.session,
self.exchange_rate_manager, self.max_key_fee,
self.disable_max_key_fee,
conf.settings['data_rate'], timeout,
file_name)
try:
lbry_file, finished_deferred = yield self.streams[claim_id].start(claim_dict, name)
lbry_file, finished_deferred = yield self.streams[sd_hash].start(claim_dict, name)
finished_deferred.addCallbacks(lambda _: _download_finished(download_id, name,
claim_dict),
lambda e: _download_failed(e, download_id, name,
@ -686,11 +682,11 @@ class Daemon(AuthJSONRPCServer):
log.warning('Failed to get %s (%s)', name, err)
else:
log.error('Failed to get %s (%s)', name, err)
if self.streams[claim_id].downloader:
yield self.streams[claim_id].downloader.stop(err)
if self.streams[sd_hash].downloader:
yield self.streams[sd_hash].downloader.stop(err)
result = {'error': err.message}
finally:
del self.streams[claim_id]
del self.streams[sd_hash]
defer.returnValue(result)
@defer.inlineCallbacks
@ -880,32 +876,6 @@ class Daemon(AuthJSONRPCServer):
size = None
message = None
claim = yield self.session.wallet.get_claim_by_claim_id(lbry_file.claim_id,
check_expire=False)
if claim and 'value' in claim:
metadata = claim['value']
else:
metadata = None
if claim and 'channel_name' in claim:
channel_name = claim['channel_name']
else:
channel_name = None
if lbry_file.txid and lbry_file.nout is not None:
outpoint = repr(ClaimOutpoint(lbry_file.txid, lbry_file.nout))
else:
outpoint = None
if claim and 'has_signature' in claim:
has_signature = claim['has_signature']
else:
has_signature = None
if claim and 'signature_is_valid' in claim:
signature_is_valid = claim['signature_is_valid']
else:
signature_is_valid = None
result = {
'completed': lbry_file.completed,
@ -917,23 +887,13 @@ class Daemon(AuthJSONRPCServer):
'stream_name': lbry_file.stream_name,
'suggested_file_name': lbry_file.suggested_file_name,
'sd_hash': lbry_file.sd_hash,
'name': lbry_file.name,
'outpoint': outpoint,
'claim_id': lbry_file.claim_id,
'download_path': full_path,
'mime_type': mime_type,
'key': key,
'total_bytes': size,
'written_bytes': written_bytes,
'message': message,
'metadata': metadata
}
if channel_name is not None:
result['channel_name'] = channel_name
if has_signature is not None:
result['has_signature'] = has_signature
if signature_is_valid is not None:
result['signature_is_valid'] = signature_is_valid
defer.returnValue(result)
@defer.inlineCallbacks
@ -1292,8 +1252,7 @@ class Daemon(AuthJSONRPCServer):
Usage:
file_list [--sd_hash=<sd_hash>] [--file_name=<file_name>] [--stream_hash=<stream_hash>]
[--claim_id=<claim_id>] [--outpoint=<outpoint>] [--rowid=<rowid>]
[--name=<name>]
[--rowid=<rowid>]
[-f]
Options:
@ -1301,10 +1260,7 @@ class Daemon(AuthJSONRPCServer):
--file_name=<file_name> : get file with matching file name in the
downloads folder
--stream_hash=<stream_hash> : get file with matching stream hash
--claim_id=<claim_id> : get file with matching claim id
--outpoint=<outpoint> : get file with matching claim outpoint
--rowid=<rowid> : get file with matching row id
--name=<name> : get file with matching associated name claim
-f : full status, populate the 'message' and 'size' fields
Returns:
@ -1321,16 +1277,12 @@ class Daemon(AuthJSONRPCServer):
'stream_name': (str) stream name ,
'suggested_file_name': (str) suggested file name,
'sd_hash': (str) sd hash of file,
'name': (str) name claim attached to file
'outpoint': (str) claim outpoint attached to file
'claim_id': (str) claim ID attached to file,
'download_path': (str) download path of file,
'mime_type': (str) mime type of file,
'key': (str) key attached to file,
'total_bytes': (int) file size in bytes, None if full_status is false
'written_bytes': (int) written size in bytes
'message': (str), None if full_status is false
'metadata': (dict) Metadata dictionary
},
]
"""
@ -1554,14 +1506,14 @@ class Daemon(AuthJSONRPCServer):
resolved = resolved['claim']
name = resolved['name']
claim_id = resolved['claim_id']
claim_dict = ClaimDict.load_dict(resolved['value'])
sd_hash = claim_dict.source_hash
if claim_id in self.streams:
if sd_hash in self.streams:
log.info("Already waiting on lbry://%s to start downloading", name)
yield self.streams[claim_id].data_downloading_deferred
yield self.streams[sd_hash].data_downloading_deferred
lbry_file = yield self._get_lbry_file(FileID.CLAIM_ID, claim_id, return_json=False)
lbry_file = yield self._get_lbry_file(FileID.SD_HASH, sd_hash, return_json=False)
if lbry_file:
if not os.path.isfile(os.path.join(lbry_file.download_directory, lbry_file.file_name)):
@ -1572,7 +1524,7 @@ class Daemon(AuthJSONRPCServer):
log.info('Already have a file for %s', name)
result = yield self._get_lbry_file_dict(lbry_file, full_status=True)
else:
result = yield self._download_name(name, claim_dict, claim_id, timeout=timeout,
result = yield self._download_name(name, claim_dict, sd_hash, timeout=timeout,
file_name=file_name)
response = yield self._render_response(result)
defer.returnValue(response)
@ -1585,19 +1537,14 @@ class Daemon(AuthJSONRPCServer):
Usage:
file_set_status <status> [--sd_hash=<sd_hash>] [--file_name=<file_name>]
[--stream_hash=<stream_hash>] [--claim_id=<claim_id>]
[--outpoint=<outpoint>] [--rowid=<rowid>]
[--name=<name>]
[--stream_hash=<stream_hash>] [--rowid=<rowid>]
Options:
--sd_hash=<sd_hash> : set status of file with matching sd hash
--file_name=<file_name> : set status of file with matching file name in the
downloads folder
--stream_hash=<stream_hash> : set status of file with matching stream hash
--claim_id=<claim_id> : set status of file with matching claim id
--outpoint=<outpoint> : set status of file with matching claim outpoint
--rowid=<rowid> : set status of file with matching row id
--name=<name> : set status of file with matching associated name claim
Returns:
(str) Confirmation message
@ -1631,9 +1578,7 @@ class Daemon(AuthJSONRPCServer):
Usage:
file_delete [-f] [--delete_all] [--sd_hash=<sd_hash>] [--file_name=<file_name>]
[--stream_hash=<stream_hash>] [--claim_id=<claim_id>]
[--outpoint=<outpoint>] [--rowid=<rowid>]
[--name=<name>]
[--stream_hash=<stream_hash>] [--rowid=<rowid>]
Options:
-f, --delete_from_download_dir : delete file from download directory,
@ -1644,10 +1589,7 @@ class Daemon(AuthJSONRPCServer):
--sd_hash=<sd_hash> : delete by file sd hash
--file_name<file_name> : delete by file name in downloads folder
--stream_hash=<stream_hash> : delete by file stream hash
--claim_id=<claim_id> : delete by file claim id
--outpoint=<outpoint> : delete by file claim outpoint
--rowid=<rowid> : delete by file row id
--name=<name> : delete by associated name claim of file
Returns:
(bool) true if deletion was successful
@ -1671,8 +1613,8 @@ class Daemon(AuthJSONRPCServer):
else:
for lbry_file in lbry_files:
file_name, stream_hash = lbry_file.file_name, lbry_file.stream_hash
if lbry_file.claim_id in self.streams:
del self.streams[lbry_file.claim_id]
if lbry_file.sd_hash in self.streams:
del self.streams[lbry_file.sd_hash]
yield self.lbry_file_manager.delete_lbry_file(lbry_file,
delete_file=delete_from_download_dir)
log.info("Deleted file: %s", file_name)
@ -2690,8 +2632,7 @@ class Daemon(AuthJSONRPCServer):
Usage:
file_reflect [--sd_hash=<sd_hash>] [--file_name=<file_name>]
[--stream_hash=<stream_hash>] [--claim_id=<claim_id>]
[--outpoint=<outpoint>] [--rowid=<rowid>] [--name=<name>]
[--stream_hash=<stream_hash>] [--rowid=<rowid>]
[--reflector=<reflector>]
Options:
@ -2699,10 +2640,7 @@ class Daemon(AuthJSONRPCServer):
--file_name=<file_name> : get file with matching file name in the
downloads folder
--stream_hash=<stream_hash> : get file with matching stream hash
--claim_id=<claim_id> : get file with matching claim id
--outpoint=<outpoint> : get file with matching claim outpoint
--rowid=<rowid> : get file with matching row id
--name=<name> : get file with matching associated name claim
--reflector=<reflector> : reflector server, ip address or url
by default choose a server from the config

View file

@ -7,7 +7,7 @@ from zope.interface import implements
from twisted.internet import defer
from lbrynet.core.client.StreamProgressManager import FullStreamProgressManager
from lbrynet.core.Error import NoSuchSDHash, NoSuchStreamHash
from lbrynet.core.Error import NoSuchStreamHash
from lbrynet.core.utils import short_hash
from lbrynet.core.StreamDescriptor import StreamMetadata
from lbrynet.lbry_file.client.EncryptedFileDownloader import EncryptedFileSaver
@ -15,12 +15,11 @@ from lbrynet.lbry_file.client.EncryptedFileDownloader import EncryptedFileDownlo
from lbrynet.file_manager.EncryptedFileStatusReport import EncryptedFileStatusReport
from lbrynet.interfaces import IStreamDownloaderFactory
from lbrynet.lbry_file.StreamDescriptor import save_sd_info
from lbrynet.core.Wallet import ClaimOutpoint
log = logging.getLogger(__name__)
def log_status(name, sd_hash, status):
def log_status(file_name, sd_hash, status):
if status == ManagedEncryptedFileDownloader.STATUS_RUNNING:
status_string = "running"
elif status == ManagedEncryptedFileDownloader.STATUS_STOPPED:
@ -29,7 +28,7 @@ def log_status(name, sd_hash, status):
status_string = "finished"
else:
status_string = "unknown"
log.info("lbry://%s (%s) is %s", name, short_hash(sd_hash), status_string)
log.info("%s (%s) is %s", file_name, short_hash(sd_hash), status_string)
class ManagedEncryptedFileDownloader(EncryptedFileSaver):
@ -48,11 +47,6 @@ class ManagedEncryptedFileDownloader(EncryptedFileSaver):
download_directory,
file_name)
self.sd_hash = None
self.txid = None
self.nout = None
self.outpoint = None
self.name = None
self.claim_id = None
self.rowid = rowid
self.lbry_file_manager = lbry_file_manager
self._saving_status = False
@ -66,7 +60,7 @@ class ManagedEncryptedFileDownloader(EncryptedFileSaver):
yield self.load_file_attributes()
status = yield self.lbry_file_manager.get_lbry_file_status(self)
log_status(self.name, self.sd_hash, status)
log_status(self.file_name, self.sd_hash, status)
if status == ManagedEncryptedFileDownloader.STATUS_RUNNING:
# start returns self.finished_deferred
@ -118,16 +112,6 @@ class ManagedEncryptedFileDownloader(EncryptedFileSaver):
else:
self.sd_hash = sd_hash
stream_metadata = yield self.wallet.get_claim_metadata_for_sd_hash(self.sd_hash)
if stream_metadata:
name, txid, nout = stream_metadata
self.name = name
self.txid = txid
self.nout = nout
self.outpoint = ClaimOutpoint(self.txid, self.nout)
else:
raise NoSuchSDHash(self.sd_hash)
self.claim_id = yield self.wallet.get_claimid(self.txid, self.nout)
defer.returnValue(None)
@defer.inlineCallbacks
@ -135,7 +119,7 @@ class ManagedEncryptedFileDownloader(EncryptedFileSaver):
yield EncryptedFileSaver._start(self)
yield self.load_file_attributes()
status = yield self._save_status()
log_status(self.name, self.sd_hash, status)
log_status(self.file_name, self.sd_hash, status)
defer.returnValue(status)
def _get_finished_deferred_callback_value(self):

View file

@ -39,8 +39,8 @@ class EncryptedFileReflectorClient(Protocol):
lambda err: log.warning("An error occurred immediately: %s", err.getTraceback()))
@property
def name(self):
return "lbry://%s" % self.factory.name
def file_name(self):
return self.factory.file_name
@property
def blob_manager(self):
@ -77,20 +77,20 @@ class EncryptedFileReflectorClient(Protocol):
if reason.check(error.ConnectionDone):
if not self.needed_blobs:
log.info("Reflector has all blobs for %s (%s)",
self.name, self.stream_descriptor)
self.file_name, self.stream_descriptor)
elif not self.reflected_blobs:
log.info("No more completed blobs for %s (%s) to reflect, %i are still needed",
self.name, self.stream_descriptor, len(self.needed_blobs))
self.file_name, self.stream_descriptor, len(self.needed_blobs))
else:
log.info('Finished sending reflector %i blobs for %s (%s)',
len(self.reflected_blobs), self.name, self.stream_descriptor)
len(self.reflected_blobs), self.file_name, self.stream_descriptor)
self.factory.finished_deferred.callback(self.reflected_blobs)
elif reason.check(error.ConnectionLost):
log.warning("Stopped reflecting %s (%s) after sending %i blobs", self.name,
log.warning("Stopped reflecting %s (%s) after sending %i blobs", self.file_name,
self.stream_descriptor, len(self.reflected_blobs))
self.factory.finished_deferred.callback(self.reflected_blobs)
else:
log.info('Reflector finished for %s (%s): %s', self.name, self.stream_descriptor,
log.info('Reflector finished for %s (%s): %s', self.file_name, self.stream_descriptor,
reason)
self.factory.finished_deferred.callback(reason)
@ -134,7 +134,7 @@ class EncryptedFileReflectorClient(Protocol):
log.info("Reflector needs %s%i blobs for %s",
needs_desc,
len(filtered),
self.name)
self.file_name)
return filtered
d = self.factory.stream_info_manager.get_blobs_for_stream(self.factory.stream_hash)
@ -227,7 +227,7 @@ class EncryptedFileReflectorClient(Protocol):
log.info("Sent reflector descriptor %s", self.next_blob_to_send)
else:
log.warning("Reflector failed to receive descriptor %s for %s",
self.next_blob_to_send, self.name)
self.next_blob_to_send, self.file_name)
self.blob_hashes_to_send.append(self.next_blob_to_send.blob_hash)
return self.set_not_uploading()
@ -240,7 +240,7 @@ class EncryptedFileReflectorClient(Protocol):
return defer.succeed(True)
else:
log.warning("Reflector already has %s for %s", self.next_blob_to_send,
self.name)
self.file_name)
return self.set_not_uploading()
else: # Expecting Server Blob Response
if 'received_blob' not in response_dict:
@ -249,10 +249,10 @@ class EncryptedFileReflectorClient(Protocol):
if response_dict['received_blob']:
self.reflected_blobs.append(self.next_blob_to_send.blob_hash)
log.debug("Sent reflector blob %s for %s", self.next_blob_to_send,
self.name)
self.file_name)
else:
log.warning("Reflector failed to receive blob %s for %s",
self.next_blob_to_send, self.name)
self.next_blob_to_send, self.file_name)
self.blob_hashes_to_send.append(self.next_blob_to_send.blob_hash)
return self.set_not_uploading()
@ -288,12 +288,12 @@ class EncryptedFileReflectorClient(Protocol):
err.trap(ValueError)
if blob_hash not in self.failed_blob_hashes:
log.warning("Failed to reflect blob %s for %s, reason: %s",
str(blob_hash)[:16], self.name, err.getTraceback())
str(blob_hash)[:16], self.file_name, err.getTraceback())
self.blob_hashes_to_send.append(blob_hash)
self.failed_blob_hashes.append(blob_hash)
else:
log.warning("Failed second try reflecting blob %s for %s, giving up, reason: %s",
str(blob_hash)[:16], self.name, err.getTraceback())
str(blob_hash)[:16], self.file_name, err.getTraceback())
def send_next_request(self):
if self.file_sender is not None:
@ -340,8 +340,8 @@ class EncryptedFileReflectorClientFactory(ClientFactory):
return self._lbry_file.stream_hash
@property
def name(self):
return self._lbry_file.name
def file_name(self):
return self._lbry_file.file_name
@property
def protocol_version(self):

View file

@ -16,8 +16,7 @@ class FakeLBRYFile(object):
self.blob_manager = blob_manager
self.stream_info_manager = stream_info_manager
self.stream_hash = stream_hash
self.name = "fake_uri"
self.file_name = 'fake_lbry_file'
class Node(object):
def __init__(self, *args, **kwargs):