forked from LBRYCommunity/lbry-sdk
Merge pull request #1951 from lbryio/fix_invalid_blob_decode_startup
Fix invalid blob decode startup
This commit is contained in:
commit
3f4ed0a896
4 changed files with 26 additions and 1 deletions
|
@ -151,6 +151,7 @@ class BlobFile:
|
|||
os.remove(self.file_path)
|
||||
self.verified.clear()
|
||||
self.finished_writing.clear()
|
||||
self.length = None
|
||||
|
||||
def decrypt(self, key: bytes, iv: bytes) -> bytes:
|
||||
"""
|
||||
|
|
|
@ -131,7 +131,11 @@ class StreamDescriptor:
|
|||
assert os.path.isfile(blob.file_path)
|
||||
with open(blob.file_path, 'rb') as f:
|
||||
json_bytes = f.read()
|
||||
decoded = json.loads(json_bytes.decode())
|
||||
try:
|
||||
decoded = json.loads(json_bytes.decode())
|
||||
except json.JSONDecodeError:
|
||||
blob.delete()
|
||||
raise InvalidStreamDescriptorError("Does not decode as valid JSON")
|
||||
if decoded['blobs'][-1]['length'] != 0:
|
||||
raise InvalidStreamDescriptorError("Does not end with a zero-length blob.")
|
||||
if any([blob_info['length'] == 0 for blob_info in decoded['blobs'][:-1]]):
|
||||
|
|
|
@ -133,6 +133,7 @@ class FileCommands(CommandTestCase):
|
|||
# restore blob
|
||||
os.rename(missing_blob.file_path + '__', missing_blob.file_path)
|
||||
self.server_blob_manager.blobs.clear()
|
||||
missing_blob = self.server_blob_manager.get_blob(missing_blob_hash)
|
||||
await self.server_blob_manager.blob_completed(missing_blob)
|
||||
await asyncio.wait_for(self.wait_files_to_complete(), timeout=1)
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ import tempfile
|
|||
import shutil
|
||||
import json
|
||||
|
||||
from lbrynet.blob.blob_file import BlobFile
|
||||
from torba.testcase import AsyncioTestCase
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.error import InvalidStreamDescriptorError
|
||||
|
@ -107,3 +108,21 @@ class TestRecoverOldStreamDescriptors(AsyncioTestCase):
|
|||
self.assertEqual(stream_hash, descriptor.get_stream_hash())
|
||||
self.assertEqual(sd_hash, descriptor.calculate_old_sort_sd_hash())
|
||||
self.assertNotEqual(sd_hash, descriptor.calculate_sd_hash())
|
||||
|
||||
async def test_decode_corrupt_blob_raises_proper_exception_and_deletes_corrupt_file(self):
|
||||
loop = asyncio.get_event_loop()
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
self.addCleanup(lambda: shutil.rmtree(tmp_dir))
|
||||
sd_hash = '9313d1807551186126acc3662e74d9de29cede78d4f133349ace846273ef116b9bb86be86c54509eb84840e4b032f6b2'
|
||||
with open(os.path.join(tmp_dir, sd_hash), 'wb') as handle:
|
||||
handle.write(b'doesnt work')
|
||||
blob = BlobFile(loop, tmp_dir, sd_hash)
|
||||
self.assertTrue(blob.file_exists)
|
||||
self.assertIsNotNone(blob.length)
|
||||
with self.assertRaises(InvalidStreamDescriptorError):
|
||||
await StreamDescriptor.from_stream_descriptor_blob(
|
||||
loop, tmp_dir, blob
|
||||
)
|
||||
self.assertFalse(blob.file_exists)
|
||||
# fixme: this is an emergency PR, plase move this to blob_file tests later
|
||||
self.assertIsNone(blob.length)
|
||||
|
|
Loading…
Reference in a new issue