From 2034104cfc237b9b0ee7fc02e1a7f3233e4752be Mon Sep 17 00:00:00 2001 From: Lex Berezhny Date: Sat, 20 Apr 2019 02:11:19 -0400 Subject: [PATCH] test fixes --- lbrynet/schema/claim.py | 3 +++ lbrynet/schema/compat.py | 5 ++++- tests/integration/test_file_commands.py | 8 ++++---- tests/integration/test_internal_transaction_api.py | 2 +- tests/integration/test_resolve_command.py | 6 ++++-- 5 files changed, 16 insertions(+), 8 deletions(-) diff --git a/lbrynet/schema/claim.py b/lbrynet/schema/claim.py index 3994f4930..bb15d15fe 100644 --- a/lbrynet/schema/claim.py +++ b/lbrynet/schema/claim.py @@ -713,6 +713,9 @@ class Stream(BaseClaimSubType): def to_dict(self): claim = super().to_dict() claim.update(claim.pop('stream')) + if 'source' in claim: + if 'sd_hash' in claim['source']: + claim['source']['sd_hash'] = self.source.sd_hash fee = claim.get('fee', {}) if 'address' in fee: fee['address'] = self.fee.address diff --git a/lbrynet/schema/compat.py b/lbrynet/schema/compat.py index 20d462a7c..c2ec532c9 100644 --- a/lbrynet/schema/compat.py +++ b/lbrynet/schema/compat.py @@ -9,7 +9,10 @@ from lbrynet.schema.types.v1.fee_pb2 import Fee as FeeMessage def from_old_json_schema(claim, payload: bytes): - value = json.loads(payload) + try: + value = json.loads(payload) + except: + raise DecodeError('Could not parse JSON.') stream = claim.stream stream.source.sd_hash = value['sources']['lbry_sd_hash'] stream.source.media_type = value.get( diff --git a/tests/integration/test_file_commands.py b/tests/integration/test_file_commands.py index 02a456bbf..8a6265eaf 100644 --- a/tests/integration/test_file_commands.py +++ b/tests/integration/test_file_commands.py @@ -28,7 +28,7 @@ class FileCommands(CommandTestCase): async def test_download_different_timeouts(self): tx = await self.stream_create('foo', '0.01') - sd_hash = tx['outputs'][0]['value']['stream']['sd_hash'] + sd_hash = tx['outputs'][0]['value']['source']['sd_hash'] await self.daemon.jsonrpc_file_delete(claim_name='foo') all_except_sd = [ blob_hash for blob_hash in self.server.blob_manager.completed_blob_hashes if blob_hash != sd_hash @@ -70,7 +70,7 @@ class FileCommands(CommandTestCase): async def test_incomplete_downloads_erases_output_file_on_stop(self): tx = await self.stream_create('foo', '0.01') - sd_hash = tx['outputs'][0]['value']['stream']['sd_hash'] + sd_hash = tx['outputs'][0]['value']['source']['sd_hash'] file_info = self.sout(self.daemon.jsonrpc_file_list())[0] await self.daemon.jsonrpc_file_delete(claim_name='foo') blobs = await self.server_storage.get_blobs_for_stream( @@ -89,7 +89,7 @@ class FileCommands(CommandTestCase): async def test_incomplete_downloads_retry(self): tx = await self.stream_create('foo', '0.01') - sd_hash = tx['outputs'][0]['value']['stream']['sd_hash'] + sd_hash = tx['outputs'][0]['value']['source']['sd_hash'] await self.daemon.jsonrpc_file_delete(claim_name='foo') blobs = await self.server_storage.get_blobs_for_stream( await self.server_storage.get_stream_hash_for_sd_hash(sd_hash) @@ -129,7 +129,7 @@ class FileCommands(CommandTestCase): async def test_unban_recovers_stream(self): BlobDownloader.BAN_TIME = .5 # fixme: temporary field, will move to connection manager or a conf tx = await self.stream_create('foo', '0.01', data=bytes([0] * (1 << 23))) - sd_hash = tx['outputs'][0]['value']['stream']['sd_hash'] + sd_hash = tx['outputs'][0]['value']['source']['sd_hash'] missing_blob_hash = (await self.daemon.jsonrpc_blob_list(sd_hash=sd_hash))[-2] await self.daemon.jsonrpc_file_delete(claim_name='foo') # backup blob diff --git a/tests/integration/test_internal_transaction_api.py b/tests/integration/test_internal_transaction_api.py index c425d2734..eaf28b057 100644 --- a/tests/integration/test_internal_transaction_api.py +++ b/tests/integration/test_internal_transaction_api.py @@ -36,7 +36,7 @@ class BasicTransactionTest(IntegrationTestCase): channel_tx = await Transaction.create([], [channel_txo], [self.account], self.account) stream = Claim() - stream.stream.media_type = "video/mp4" + stream.stream.source.media_type = "video/mp4" stream_txo = Output.pay_claim_name_pubkey_hash( l2d('1.0'), 'foo', stream, self.account.ledger.address_to_hash160(address1) ) diff --git a/tests/integration/test_resolve_command.py b/tests/integration/test_resolve_command.py index d4b4d243f..37056579a 100644 --- a/tests/integration/test_resolve_command.py +++ b/tests/integration/test_resolve_command.py @@ -1,4 +1,5 @@ import json +from binascii import hexlify from lbrynet.testcase import CommandTestCase @@ -69,13 +70,14 @@ class ResolveCommand(CommandTestCase): self.assertEqual(claim['claim']['depth'], json.loads(tx_details)['confirmations']) # resolve handles invalid data - txid = await self.blockchain_claim_name("gibberish", "cafecafe", "0.1") + txid = await self.blockchain_claim_name( + "gibberish", hexlify(b"{'invalid':'json'}").decode(), "0.1") await self.generate(1) response = await self.resolve("lbry://gibberish") self.assertSetEqual({'lbry://gibberish'}, set(response)) claim = response['lbry://gibberish']['claim'] self.assertEqual(claim['name'], 'gibberish') - self.assertEqual(claim['hex'], 'cafecafe') + self.assertEqual(claim['hex'], hexlify(b"{'invalid':'json'}").decode()) self.assertFalse(claim['decoded_claim']) self.assertEqual(claim['txid'], txid) self.assertEqual(claim['effective_amount'], "0.1")