use stream size from claim if available

This commit is contained in:
Jack Robison 2019-05-08 14:01:19 -04:00
parent a480b2d25f
commit 3ca2fee592
No known key found for this signature in database
GPG key ID: DF25C68FE0239BB2
3 changed files with 23 additions and 6 deletions

View file

@ -215,8 +215,6 @@ class Stream(BaseClaim):
if 'sd_hash' in kwargs:
self.source.sd_hash = kwargs.pop('sd_hash')
if 'file_size' in kwargs:
self.source.size = kwargs.pop('file_size')
if 'file_name' in kwargs:
self.source.name = kwargs.pop('file_name')
if 'file_hash' in kwargs:
@ -230,6 +228,9 @@ class Stream(BaseClaim):
elif self.source.media_type:
stream_type = guess_stream_type(self.source.media_type)
if 'file_size' in kwargs:
self.source.size = kwargs.pop('file_size')
if stream_type in ('image', 'video', 'audio'):
media = getattr(self, stream_type)
media_args = {'file_metadata': None}

View file

@ -462,8 +462,18 @@ class ManagedStream:
get_range = get_range.split('=')[1]
start, end = get_range.split('-')
size = 0
for blob in self.descriptor.blobs[:-1]:
size += blob.length - 1
if self.stream_claim_info and self.stream_claim_info.claim.stream.source.size:
size_from_claim = int(self.stream_claim_info.claim.stream.source.size)
if not size_from_claim <= size <= size_from_claim + 16:
raise ValueError("claim contains implausible stream size")
log.debug("using stream size from claim")
size = size_from_claim
elif self.stream_claim_info:
log.debug("estimating stream size")
start = int(start)
end = int(end) if end else size - 1
skip_blobs = start // 2097150

View file

@ -24,11 +24,11 @@ class RangeRequests(CommandTestCase):
await self.daemon.stream_manager.start()
return
async def _setup_stream(self, data: bytes, save_blobs: bool = True, save_files: bool = False):
async def _setup_stream(self, data: bytes, save_blobs: bool = True, save_files: bool = False, file_size=0):
self.daemon.conf.save_blobs = save_blobs
self.daemon.conf.save_files = save_files
self.data = data
await self.stream_create('foo', '0.01', data=self.data)
await self.stream_create('foo', '0.01', data=self.data, file_size=file_size)
if save_blobs:
self.assertTrue(len(os.listdir(self.daemon.blob_manager.blob_dir)) > 1)
await self.daemon.jsonrpc_file_list()[0].fully_reflected.wait()
@ -70,9 +70,10 @@ class RangeRequests(CommandTestCase):
self.assertEqual('bytes 0-14/15', content_range)
async def test_range_requests_0_padded_bytes(self, size: int = (MAX_BLOB_SIZE - 1) * 4,
expected_range: str = 'bytes 0-8388603/8388604', padding=b''):
expected_range: str = 'bytes 0-8388603/8388604', padding=b'',
file_size=0):
self.data = get_random_bytes(size)
await self._setup_stream(self.data)
await self._setup_stream(self.data, file_size=file_size)
streamed, content_range, content_length = await self._test_range_requests()
self.assertEqual(len(self.data + padding), content_length)
self.assertEqual(streamed, self.data + padding)
@ -93,6 +94,11 @@ class RangeRequests(CommandTestCase):
((MAX_BLOB_SIZE - 1) * 4) - 14, padding=b'\x00' * 14
)
async def test_range_requests_no_padding_size_from_claim(self):
size = ((MAX_BLOB_SIZE - 1) * 4) - 14
await self.test_range_requests_0_padded_bytes(size, padding=b'', file_size=size,
expected_range=f"bytes 0-{size}/{size+1}")
async def test_range_requests_15_padded_bytes(self):
await self.test_range_requests_0_padded_bytes(
((MAX_BLOB_SIZE - 1) * 4) - 15, padding=b'\x00' * 15