update file_list docs, add blobs_remaining to file list args and to file dict result

This commit is contained in:
Jack Robison 2019-02-01 16:17:10 -05:00
parent 744375b2c0
commit c75665d3f0
No known key found for this signature in database
GPG key ID: DF25C68FE0239BB2
3 changed files with 27 additions and 13 deletions

View file

@ -1308,8 +1308,9 @@ class Daemon(metaclass=JSONRPCServerType):
file_list [--sd_hash=<sd_hash>] [--file_name=<file_name>] [--stream_hash=<stream_hash>] file_list [--sd_hash=<sd_hash>] [--file_name=<file_name>] [--stream_hash=<stream_hash>]
[--rowid=<rowid>] [--claim_id=<claim_id>] [--outpoint=<outpoint>] [--txid=<txid>] [--nout=<nout>] [--rowid=<rowid>] [--claim_id=<claim_id>] [--outpoint=<outpoint>] [--txid=<txid>] [--nout=<nout>]
[--channel_claim_id=<channel_claim_id>] [--channel_name=<channel_name>] [--channel_claim_id=<channel_claim_id>] [--channel_name=<channel_name>]
[--claim_name=<claim_name>] [--sort=<sort_by>] [--reverse] [--comparison=<comparison>] [--claim_name=<claim_name>] [--blobs_in_stream=<blobs_in_stream>]
[--full_status=<full_status>] [--blobs_remaining=<blobs_remaining>] [--sort=<sort_by>]
[--comparison=<comparison>] [--full_status=<full_status>] [--reverse]
Options: Options:
--sd_hash=<sd_hash> : (str) get file with matching sd hash --sd_hash=<sd_hash> : (str) get file with matching sd hash
@ -1322,11 +1323,12 @@ class Daemon(metaclass=JSONRPCServerType):
--txid=<txid> : (str) get file with matching claim txid --txid=<txid> : (str) get file with matching claim txid
--nout=<nout> : (int) get file with matching claim nout --nout=<nout> : (int) get file with matching claim nout
--channel_claim_id=<channel_claim_id> : (str) get file with matching channel claim id --channel_claim_id=<channel_claim_id> : (str) get file with matching channel claim id
--channel_name=<channel_name> : (str) get file with matching channel name --channel_name=<channel_name> : (str) get file with matching channel name
--claim_name=<claim_name> : (str) get file with matching claim name --claim_name=<claim_name> : (str) get file with matching claim name
--sort=<sort_method> : (str) sort by any property, like 'file_name' --blobs_in_stream<blobs_in_stream> : (int) get file with matching blobs in stream
or 'metadata.author'; to specify direction --blobs_remaining=<blobs_remaining> : (int) amount of remaining blobs to download
append ',asc' or ',desc' --sort=<sort_by> : (str) field to sort by (one of the above filter fields)
--comparison=<comparison> : (str) logical comparision, (eq | ne | g | ge | l | le)
Returns: Returns:
(list) List of files (list) List of files
@ -1345,21 +1347,24 @@ class Daemon(metaclass=JSONRPCServerType):
'download_path': (str) download path of file, 'download_path': (str) download path of file,
'mime_type': (str) mime type of file, 'mime_type': (str) mime type of file,
'key': (str) key attached to file, 'key': (str) key attached to file,
'total_bytes': (int) file size in bytes, 'total_bytes_lower_bound': (int) lower bound file size in bytes,
'total_bytes': (int) file upper bound size in bytes,
'written_bytes': (int) written size in bytes, 'written_bytes': (int) written size in bytes,
'blobs_completed': (int) number of fully downloaded blobs, 'blobs_completed': (int) number of fully downloaded blobs,
'blobs_in_stream': (int) total blobs on stream, 'blobs_in_stream': (int) total blobs on stream,
'blobs_remaining': (int) total blobs remaining to download,
'status': (str) downloader status 'status': (str) downloader status
'claim_id': (str) None if claim is not found else the claim id, 'claim_id': (str) None if claim is not found else the claim id,
'outpoint': (str) None if claim is not found else the tx and output,
'txid': (str) None if claim is not found else the transaction id, 'txid': (str) None if claim is not found else the transaction id,
'nout': (int) None if claim is not found else the transaction output index, 'nout': (int) None if claim is not found else the transaction output index,
'outpoint': (str) None if claim is not found else the tx and output,
'metadata': (dict) None if claim is not found else the claim metadata, 'metadata': (dict) None if claim is not found else the claim metadata,
'channel_claim_id': (str) None if claim is not found or not signed, 'channel_claim_id': (str) None if claim is not found or not signed,
'channel_name': (str) None if claim is not found or not signed, 'channel_name': (str) None if claim is not found or not signed,
'claim_name': (str) None if claim is not found else the claim name 'claim_name': (str) None if claim is not found else the claim name
}, },
] ]
}
""" """
sort = sort or 'status' sort = sort or 'status'
comparison = comparison or 'eq' comparison = comparison or 'eq'
@ -1582,8 +1587,10 @@ class Daemon(metaclass=JSONRPCServerType):
log.info("already have matching stream for %s", uri) log.info("already have matching stream for %s", uri)
stream = existing[0] stream = existing[0]
if not stream.running: if not stream.running:
log.info("resuming download") full_path = os.path.join(stream.download_directory, stream.file_name)
await self.stream_manager.start_stream(stream) if not os.path.isfile(full_path):
log.info("resuming download")
await self.stream_manager.start_stream(stream)
else: else:
stream = await self.stream_manager.download_stream_from_claim( stream = await self.stream_manager.download_stream_from_claim(
self.dht_node, resolved, file_name, timeout, fee_amount, fee_address self.dht_node, resolved, file_name, timeout, fee_amount, fee_address
@ -1620,7 +1627,7 @@ class Daemon(metaclass=JSONRPCServerType):
if not streams: if not streams:
raise Exception(f'Unable to find a file for {kwargs}') raise Exception(f'Unable to find a file for {kwargs}')
stream = streams[0] stream = streams[0]
if status == 'start' and not stream.running and not stream.finished: if status == 'start' and not stream.running:
await self.stream_manager.start_stream(stream) await self.stream_manager.start_stream(stream)
msg = "Resumed download" msg = "Resumed download"
elif status == 'stop' and stream.running: elif status == 'stop' and stream.running:

View file

@ -100,6 +100,10 @@ class ManagedStream:
def sd_hash(self): def sd_hash(self):
return self.descriptor.sd_hash return self.descriptor.sd_hash
@property
def blobs_remaining(self) -> int:
return self.blobs_in_stream - self.blobs_completed
def as_dict(self) -> typing.Dict: def as_dict(self) -> typing.Dict:
full_path = os.path.join(self.download_directory, self.file_name) full_path = os.path.join(self.download_directory, self.file_name)
if not os.path.isfile(full_path): if not os.path.isfile(full_path):
@ -130,6 +134,7 @@ class ManagedStream:
'written_bytes': written_bytes, 'written_bytes': written_bytes,
'blobs_completed': self.blobs_completed, 'blobs_completed': self.blobs_completed,
'blobs_in_stream': self.blobs_in_stream, 'blobs_in_stream': self.blobs_in_stream,
'blobs_remaining': self.blobs_remaining,
'status': self.status, 'status': self.status,
'claim_id': self.claim_id, 'claim_id': self.claim_id,
'txid': self.txid, 'txid': self.txid,

View file

@ -32,7 +32,9 @@ filter_fields = [
'nout', 'nout',
'channel_claim_id', 'channel_claim_id',
'channel_name', 'channel_name',
'full_status' 'full_status', # TODO: remove
'blobs_remaining',
'blobs_in_stream'
] ]
comparison_operators = { comparison_operators = {
@ -66,7 +68,7 @@ class StreamManager:
async def start_stream(self, stream: ManagedStream): async def start_stream(self, stream: ManagedStream):
path = os.path.join(stream.download_directory, stream.file_name) path = os.path.join(stream.download_directory, stream.file_name)
if not stream.running or not os.path.isfile(path): if not stream.running and not os.path.isfile(path):
if stream.downloader: if stream.downloader:
stream.downloader.stop() stream.downloader.stop()
stream.downloader = None stream.downloader = None