Merge pull request #2447 from lbryio/file_encoder

use json_file_encoder for managed streams (file list)
This commit is contained in:
Jack Robison 2019-09-13 11:21:36 -04:00 committed by GitHub
commit a7b205603c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 44 additions and 167 deletions

View file

@ -218,15 +218,44 @@ class JSONResponseEncoder(JSONEncoder):
return result
def encode_file(self, managed_stream):
file = managed_stream.as_dict()
output_exists = managed_stream.output_file_exists
tx_height = managed_stream.stream_claim_info.height
best_height = self.ledger.headers.height
file.update({
return {
'streaming_url': managed_stream.stream_url,
'completed': managed_stream.completed,
'file_name': managed_stream.file_name if output_exists else None,
'download_directory': managed_stream.download_directory if output_exists else None,
'download_path': managed_stream.full_path if output_exists else None,
'points_paid': 0.0,
'stopped': not managed_stream.running,
'stream_hash': managed_stream.stream_hash,
'stream_name': managed_stream.descriptor.stream_name,
'suggested_file_name': managed_stream.descriptor.suggested_file_name,
'sd_hash': managed_stream.descriptor.sd_hash,
'mime_type': managed_stream.mime_type,
'key': managed_stream.descriptor.key,
'total_bytes_lower_bound': managed_stream.descriptor.lower_bound_decrypted_length(),
'total_bytes': managed_stream.descriptor.upper_bound_decrypted_length(),
'written_bytes': managed_stream.written_bytes,
'blobs_completed': managed_stream.blobs_completed,
'blobs_in_stream': managed_stream.blobs_in_stream,
'blobs_remaining': managed_stream.blobs_remaining,
'status': managed_stream.status,
'claim_id': managed_stream.claim_id,
'txid': managed_stream.txid,
'nout': managed_stream.nout,
'outpoint': managed_stream.outpoint,
'metadata': managed_stream.metadata,
'protobuf': managed_stream.metadata_protobuf,
'channel_claim_id': managed_stream.channel_claim_id,
'channel_name': managed_stream.channel_name,
'claim_name': managed_stream.claim_name,
'content_fee': managed_stream.content_fee,
'height': tx_height,
'confirmations': (best_height+1) - tx_height if tx_height > 0 else tx_height,
'confirmations': (best_height + 1) - tx_height if tx_height > 0 else tx_height,
'timestamp': self.ledger.headers[tx_height]['timestamp'] if 0 < tx_height <= best_height else None
})
return file
}
def encode_claim(self, claim):
encoded = getattr(claim, claim.claim_type).to_dict()

View file

@ -90,20 +90,6 @@ def get_claims_from_stream_hashes(transaction: sqlite3.Connection,
}
def get_content_claim_from_outpoint(transaction: sqlite3.Connection,
outpoint: str) -> typing.Optional[StoredStreamClaim]:
query = (
"select content_claim.stream_hash, c.*, case when c.channel_claim_id is not null then "
" (select claim_name from claim where claim_id==c.channel_claim_id) "
" else null end as channel_name "
" from content_claim "
" inner join claim c on c.claim_outpoint=content_claim.claim_outpoint and content_claim.claim_outpoint=?"
)
claim_fields = transaction.execute(query, (outpoint, )).fetchone()
if claim_fields:
return StoredStreamClaim(*claim_fields)
def _batched_select(transaction, query, parameters, batch_size=900):
for start_index in range(0, len(parameters), batch_size):
current_batch = parameters[start_index:start_index+batch_size]
@ -325,31 +311,6 @@ class SQLiteStorage(SQLiteMixin):
"select status from blob where blob_hash=?", blob_hash
)
def should_announce(self, blob_hash: str):
return self.run_and_return_one_or_none(
"select should_announce from blob where blob_hash=?", blob_hash
)
def count_should_announce_blobs(self):
return self.run_and_return_one_or_none(
"select count(*) from blob where should_announce=1 and status='finished'"
)
def get_all_should_announce_blobs(self):
return self.run_and_return_list(
"select blob_hash from blob where should_announce=1 and status='finished'"
)
def get_all_finished_blobs(self):
return self.run_and_return_list(
"select blob_hash from blob where status='finished'"
)
def count_finished_blobs(self):
return self.run_and_return_one_or_none(
"select count(*) from blob where status='finished'"
)
def update_last_announced_blobs(self, blob_hashes: typing.List[str]):
def _update_last_announced_blobs(transaction: sqlite3.Connection):
last_announced = self.time_getter()
@ -427,26 +388,6 @@ class SQLiteStorage(SQLiteMixin):
}
return self.db.run(_sync_blobs)
def sync_files_to_blobs(self):
def _sync_blobs(transaction: sqlite3.Connection):
transaction.executemany(
"update file set status='stopped' where stream_hash=?",
transaction.execute(
"select distinct sb.stream_hash from stream_blob sb "
"inner join blob b on b.blob_hash=sb.blob_hash and b.status=='pending'"
).fetchall()
)
return self.db.run(_sync_blobs)
def set_files_as_streaming(self, stream_hashes: typing.List[str]):
def _set_streaming(transaction: sqlite3.Connection):
transaction.executemany(
"update file set file_name=null, download_directory=null where stream_hash=?",
[(stream_hash, ) for stream_hash in stream_hashes]
)
return self.db.run(_set_streaming)
# # # # # # # # # stream functions # # # # # # # # #
async def stream_exists(self, sd_hash: str) -> bool:
@ -459,11 +400,6 @@ class SQLiteStorage(SQLiteMixin):
"s.stream_hash=f.stream_hash and s.sd_hash=?", sd_hash)
return streams is not None
def rowid_for_stream(self, stream_hash: str) -> typing.Awaitable[typing.Optional[int]]:
return self.run_and_return_one_or_none(
"select rowid from file where stream_hash=?", stream_hash
)
def store_stream(self, sd_blob: 'BlobFile', descriptor: 'StreamDescriptor'):
return self.db.run(store_stream, sd_blob, descriptor)
@ -509,12 +445,6 @@ class SQLiteStorage(SQLiteMixin):
"select stream_hash from stream where sd_hash = ?", sd_blob_hash
)
def get_stream_info_for_sd_hash(self, sd_blob_hash):
return self.run_and_return_one_or_none(
"select stream_hash, stream_name, suggested_filename, stream_key from stream where sd_hash = ?",
sd_blob_hash
)
def delete_stream(self, descriptor: 'StreamDescriptor'):
return self.db.run_with_foreign_keys_disabled(delete_stream, descriptor)
@ -788,55 +718,6 @@ class SQLiteStorage(SQLiteMixin):
claim['effective_amount'] = calculate_effective_amount(claim['amount'], supports)
return claim
async def get_claims_from_stream_hashes(self, stream_hashes: typing.List[str],
include_supports: typing.Optional[bool] = True):
claims = await self.db.run(get_claims_from_stream_hashes, stream_hashes)
return {stream_hash: claim_info.as_dict() for stream_hash, claim_info in claims.items()}
async def get_claim(self, claim_outpoint, include_supports=True):
claim_info = await self.db.run(get_content_claim_from_outpoint, claim_outpoint)
if not claim_info:
return
result = claim_info.as_dict()
if include_supports:
supports = await self.get_supports(result['claim_id'])
result['supports'] = supports
result['effective_amount'] = calculate_effective_amount(result['amount'], supports)
return result
def get_unknown_certificate_ids(self):
def _get_unknown_certificate_claim_ids(transaction):
return [
claim_id for (claim_id,) in transaction.execute(
"select distinct c1.channel_claim_id from claim as c1 "
"where c1.channel_claim_id!='' "
"and c1.channel_claim_id not in "
"(select c2.claim_id from claim as c2)"
).fetchall()
]
return self.db.run(_get_unknown_certificate_claim_ids)
async def get_pending_claim_outpoints(self):
claim_outpoints = await self.run_and_return_list("select claim_outpoint from claim where height=-1")
results = {} # {txid: [nout, ...]}
for outpoint_str in claim_outpoints:
txid, nout = outpoint_str.split(":")
outputs = results.get(txid, [])
outputs.append(int(nout))
results[txid] = outputs
if results:
log.debug("missing transaction heights for %i claims", len(results))
return results
def save_claim_tx_heights(self, claim_tx_heights):
def _save_claim_heights(transaction):
for outpoint, height in claim_tx_heights.items():
transaction.execute(
"update claim set height=? where claim_outpoint=? and height=-1",
(height, outpoint)
)
return self.db.run(_save_claim_heights)
# # # # # # # # # reflector functions # # # # # # # # #
def update_reflected_stream(self, sd_hash, reflector_address, success=True):

View file

@ -123,6 +123,14 @@ class ManagedStream:
def written_bytes(self) -> int:
return 0 if not self.output_file_exists else os.stat(self.full_path).st_size
@property
def completed(self):
return self.written_bytes >= self.descriptor.lower_bound_decrypted_length()
@property
def stream_url(self):
return f"http://{self.config.streaming_host}:{self.config.streaming_port}/stream/{self.sd_hash}"
async def update_status(self, status: str):
assert status in [self.STATUS_RUNNING, self.STATUS_STOPPED, self.STATUS_FINISHED]
self._status = status
@ -203,47 +211,6 @@ class ManagedStream:
def mime_type(self):
return guess_media_type(os.path.basename(self.descriptor.suggested_file_name))[0]
def as_dict(self) -> typing.Dict:
full_path = self.full_path
file_name = self.file_name
download_directory = self.download_directory
if not self.output_file_exists:
full_path = None
file_name = None
download_directory = None
return {
'streaming_url': f"http://{self.config.streaming_host}:{self.config.streaming_port}/stream/{self.sd_hash}",
'completed': self.written_bytes >= self.descriptor.lower_bound_decrypted_length(),
'file_name': file_name,
'download_directory': download_directory,
'points_paid': 0.0,
'stopped': not self.running,
'stream_hash': self.stream_hash,
'stream_name': self.descriptor.stream_name,
'suggested_file_name': self.descriptor.suggested_file_name,
'sd_hash': self.descriptor.sd_hash,
'download_path': full_path,
'mime_type': self.mime_type,
'key': self.descriptor.key,
'total_bytes_lower_bound': self.descriptor.lower_bound_decrypted_length(),
'total_bytes': self.descriptor.upper_bound_decrypted_length(),
'written_bytes': self.written_bytes,
'blobs_completed': self.blobs_completed,
'blobs_in_stream': self.blobs_in_stream,
'blobs_remaining': self.blobs_remaining,
'status': self.status,
'claim_id': self.claim_id,
'txid': self.txid,
'nout': self.nout,
'outpoint': self.outpoint,
'metadata': self.metadata,
'protobuf': self.metadata_protobuf,
'channel_claim_id': self.channel_claim_id,
'channel_name': self.channel_name,
'claim_name': self.claim_name,
'content_fee': self.content_fee
}
@classmethod
async def create(cls, loop: asyncio.AbstractEventLoop, config: 'Config', blob_manager: 'BlobManager',
file_path: str, key: typing.Optional[bytes] = None,

View file

@ -43,11 +43,11 @@ class TestManagedStream(BlobExchangeTestBase):
async def test_status_file_completed(self):
await self._test_transfer_stream(10)
self.assertTrue(self.stream.output_file_exists)
self.assertTrue(self.stream.as_dict()['completed'])
self.assertTrue(self.stream.completed)
with open(self.stream.full_path, 'w+b') as outfile:
outfile.truncate(1)
self.assertTrue(self.stream.output_file_exists)
self.assertFalse(self.stream.as_dict()['completed'])
self.assertFalse(self.stream.completed)
async def _test_transfer_stream(self, blob_count: int, mock_accumulate_peers=None, stop_when_done=True):
await self.setup_stream(blob_count)