From 4fcfa0b193ade85f95cad3ad8520dae8954f50a9 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Mon, 7 Sep 2020 15:51:49 -0300 Subject: [PATCH 01/17] add channel information on resolve protobuf and test for its presence --- lbry/db/queries/resolve.py | 24 +++++++++++++------ lbry/schema/result.py | 7 +++++- .../integration/blockchain/test_blockchain.py | 15 ++++++++++++ 3 files changed, 38 insertions(+), 8 deletions(-) diff --git a/lbry/db/queries/resolve.py b/lbry/db/queries/resolve.py index 5139f18ea..9b4f36c4b 100644 --- a/lbry/db/queries/resolve.py +++ b/lbry/db/queries/resolve.py @@ -1,6 +1,5 @@ import logging import itertools -from operator import itemgetter from typing import List, Dict from lbry.schema.url import URL @@ -15,29 +14,37 @@ from .search import search_claims log = logging.getLogger(__name__) -def _get_referenced_rows(txo_rows: List[dict], censor_channels: List[bytes]): +def _get_referenced_rows(txo_rows: List[Output], censor_channels: List[bytes]): # censor = context().get_resolve_censor() - repost_hashes = set(filter(None, map(itemgetter('reposted_claim_hash'), txo_rows))) + repost_hashes = set(txo.reposted_claim.claim_hash for txo in txo_rows if txo.reposted_claim) channel_hashes = set(itertools.chain( - filter(None, map(itemgetter('channel_hash'), txo_rows)), + (txo.channel.claim_hash for txo in txo_rows if txo.channel), censor_channels )) reposted_txos = [] if repost_hashes: reposted_txos = search_claims(**{'claim.claim_hash__in': repost_hashes}) - channel_hashes |= set(filter(None, map(itemgetter('channel_hash'), reposted_txos))) + if reposted_txos: + reposted_txos = reposted_txos[0] + channel_hashes |= set(txo.channel.claim_hash for txo in reposted_txos if txo.channel) channel_txos = [] if channel_hashes: channel_txos = search_claims(**{'claim.claim_hash__in': channel_hashes}) + channel_txos = channel_txos[0] if channel_txos else [] # channels must come first for client side inflation to work properly return channel_txos + reposted_txos def protobuf_resolve(urls, **kwargs) -> str: - return ResultOutput.to_base64([resolve_url(raw_url) for raw_url in urls], []) + txo_rows = [resolve_url(raw_url) for raw_url in urls] + extra_txo_rows = _get_referenced_rows( + txo_rows, + [txo.censor_hash for txo in txo_rows if isinstance(txo, ResolveCensoredError)] + ) + return ResultOutput.to_base64(txo_rows, extra_txo_rows) def resolve(urls, **kwargs) -> Dict[str, Output]: @@ -86,7 +93,10 @@ def resolve_url(raw_url): # matches = search_claims(censor, **q, limit=1) matches = search_claims(**q, limit=1)[0] if matches: - return matches[0] + stream = matches[0] + if channel: + stream.channel = channel + return stream elif censor.censored: return ResolveCensoredError(raw_url, next(iter(censor.censored))) else: diff --git a/lbry/schema/result.py b/lbry/schema/result.py index 2a8ffcdcc..ebdd404a2 100644 --- a/lbry/schema/result.py +++ b/lbry/schema/result.py @@ -206,5 +206,10 @@ class Outputs: #txo_message.claim.trending_mixed = txo['trending_mixed'] #txo_message.claim.trending_local = txo['trending_local'] #txo_message.claim.trending_global = txo['trending_global'] - #set_reference(txo_message.claim.channel, txo['channel_hash'], extra_txo_rows) + if txo.channel: + reference = txo_message.claim.channel + hash = txo.channel.hash + reference.tx_hash = hash[:32] + reference.nout = struct.unpack(' Date: Wed, 9 Sep 2020 18:36:01 -0300 Subject: [PATCH 02/17] fix not found raising, add a test --- lbry/db/queries/resolve.py | 2 +- tests/integration/blockchain/test_blockchain.py | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/lbry/db/queries/resolve.py b/lbry/db/queries/resolve.py index 9b4f36c4b..ba45eb5bb 100644 --- a/lbry/db/queries/resolve.py +++ b/lbry/db/queries/resolve.py @@ -41,7 +41,7 @@ def _get_referenced_rows(txo_rows: List[Output], censor_channels: List[bytes]): def protobuf_resolve(urls, **kwargs) -> str: txo_rows = [resolve_url(raw_url) for raw_url in urls] extra_txo_rows = _get_referenced_rows( - txo_rows, + [txo_row for txo_row in txo_rows if isinstance(txo_row, Output)], [txo.censor_hash for txo in txo_rows if isinstance(txo, ResolveCensoredError)] ) return ResultOutput.to_base64(txo_rows, extra_txo_rows) diff --git a/tests/integration/blockchain/test_blockchain.py b/tests/integration/blockchain/test_blockchain.py index 72afdf1f7..2782fe7b7 100644 --- a/tests/integration/blockchain/test_blockchain.py +++ b/tests/integration/blockchain/test_blockchain.py @@ -912,6 +912,9 @@ class TestGeneralBlockchainSync(SyncingBlockchainTestCase): claim = resolutions[0][0] self.assertTrue(claim.is_signed_by(claim.channel, self.chain.ledger)) + resolutions = Outputs.from_base64(await self.db.protobuf_resolve(["@foo#ab/notfound"])) + self.assertEqual(len(resolutions.txs), 0) + class TestClaimtrieSync(SyncingBlockchainTestCase): From c7eb60619f0be7db3d82531b70d410393192ba33 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Thu, 10 Sep 2020 17:19:41 -0300 Subject: [PATCH 03/17] separate test for not found message --- tests/integration/blockchain/test_blockchain.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/integration/blockchain/test_blockchain.py b/tests/integration/blockchain/test_blockchain.py index 2782fe7b7..9e74fa642 100644 --- a/tests/integration/blockchain/test_blockchain.py +++ b/tests/integration/blockchain/test_blockchain.py @@ -912,8 +912,13 @@ class TestGeneralBlockchainSync(SyncingBlockchainTestCase): claim = resolutions[0][0] self.assertTrue(claim.is_signed_by(claim.channel, self.chain.ledger)) + async def test_resolve_not_found(self): + await self.get_claim(await self.create_claim(claim_id_startswith='ab', is_channel=True)) + await self.generate(1) resolutions = Outputs.from_base64(await self.db.protobuf_resolve(["@foo#ab/notfound"])) - self.assertEqual(len(resolutions.txs), 0) + self.assertEqual(resolutions.txos[0].error.text, "Could not find claim at \"@foo#ab/notfound\".") + resolutions = Outputs.from_base64(await self.db.protobuf_resolve(["@notfound#ab/notfound"])) + self.assertEqual(resolutions.txos[0].error.text, "Could not find channel in \"@notfound#ab/notfound\".") class TestClaimtrieSync(SyncingBlockchainTestCase): From 6690e63ea4f18d7b0018f289f20e58cc49c0f17c Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Thu, 10 Sep 2020 18:13:08 -0300 Subject: [PATCH 04/17] remove commented code and unused import --- lbry/db/queries/resolve.py | 6 ------ lbry/schema/result.py | 1 - tests/integration/blockchain/test_blockchain.py | 1 - 3 files changed, 8 deletions(-) diff --git a/lbry/db/queries/resolve.py b/lbry/db/queries/resolve.py index ba45eb5bb..7221bf15e 100644 --- a/lbry/db/queries/resolve.py +++ b/lbry/db/queries/resolve.py @@ -49,12 +49,6 @@ def protobuf_resolve(urls, **kwargs) -> str: def resolve(urls, **kwargs) -> Dict[str, Output]: return {url: resolve_url(url) for url in urls} - #txo_rows = [resolve_url(raw_url) for raw_url in urls] - #extra_txo_rows = _get_referenced_rows( - # [txo for txo in txo_rows if isinstance(txo, dict)], - # [txo.censor_hash for txo in txo_rows if isinstance(txo, ResolveCensoredError)] - #) - #return txo_rows, extra_txo_rows def resolve_url(raw_url): diff --git a/lbry/schema/result.py b/lbry/schema/result.py index ebdd404a2..5f4519c72 100644 --- a/lbry/schema/result.py +++ b/lbry/schema/result.py @@ -1,7 +1,6 @@ import base64 import struct from typing import List -from binascii import hexlify from itertools import chain from lbry.error import ResolveCensoredError diff --git a/tests/integration/blockchain/test_blockchain.py b/tests/integration/blockchain/test_blockchain.py index 9e74fa642..f991cc1a5 100644 --- a/tests/integration/blockchain/test_blockchain.py +++ b/tests/integration/blockchain/test_blockchain.py @@ -899,7 +899,6 @@ class TestGeneralBlockchainSync(SyncingBlockchainTestCase): self.assertEqual(stream_cd.claim_id, await self.resolve_to_claim_id("@foo#ab/foo#cd")) async def test_resolve_protobuf_includes_enough_information_for_signature_validation(self): - # important for old sdk chan_ab = await self.get_claim( await self.create_claim(claim_id_startswith='ab', is_channel=True)) await self.create_claim(claim_id_startswith='cd', sign=chan_ab) From fa34ff88bcf463670c954736afbdfb0f79c8fbe3 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Thu, 10 Sep 2020 21:52:23 -0300 Subject: [PATCH 05/17] refactor db, make resolve censor right --- lbry/db/queries/resolve.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/lbry/db/queries/resolve.py b/lbry/db/queries/resolve.py index 7221bf15e..7b6eff9e9 100644 --- a/lbry/db/queries/resolve.py +++ b/lbry/db/queries/resolve.py @@ -6,16 +6,22 @@ from lbry.schema.url import URL from lbry.schema.result import Outputs as ResultOutput from lbry.error import ResolveCensoredError from lbry.blockchain.transaction import Output +from . import rows_to_txos from ..query_context import context -from .search import search_claims +from .search import select_claims log = logging.getLogger(__name__) +def resolve_claims(**constraints): + censor = context().get_resolve_censor() + rows = context().fetchall(select_claims(**constraints)) + return rows_to_txos(rows), censor + + def _get_referenced_rows(txo_rows: List[Output], censor_channels: List[bytes]): - # censor = context().get_resolve_censor() repost_hashes = set(txo.reposted_claim.claim_hash for txo in txo_rows if txo.reposted_claim) channel_hashes = set(itertools.chain( (txo.channel.claim_hash for txo in txo_rows if txo.channel), @@ -24,14 +30,14 @@ def _get_referenced_rows(txo_rows: List[Output], censor_channels: List[bytes]): reposted_txos = [] if repost_hashes: - reposted_txos = search_claims(**{'claim.claim_hash__in': repost_hashes}) + reposted_txos = resolve_claims(**{'claim.claim_hash__in': repost_hashes}) if reposted_txos: reposted_txos = reposted_txos[0] channel_hashes |= set(txo.channel.claim_hash for txo in reposted_txos if txo.channel) channel_txos = [] if channel_hashes: - channel_txos = search_claims(**{'claim.claim_hash__in': channel_hashes}) + channel_txos = resolve_claims(**{'claim.claim_hash__in': channel_hashes}) channel_txos = channel_txos[0] if channel_txos else [] # channels must come first for client side inflation to work properly @@ -52,8 +58,6 @@ def resolve(urls, **kwargs) -> Dict[str, Output]: def resolve_url(raw_url): - censor = context().get_resolve_censor() - try: url = URL.parse(raw_url) except ValueError as e: @@ -67,13 +71,12 @@ def resolve_url(raw_url): q['is_controlling'] = True else: q['order_by'] = ['^creation_height'] - #matches = search_claims(censor, **q, limit=1) - matches = search_claims(**q, limit=1)[0] + matches, censor = resolve_claims(**q, limit=1) if matches: channel = matches[0] elif censor.censored: return ResolveCensoredError(raw_url, next(iter(censor.censored))) - else: + elif not channel: return LookupError(f'Could not find channel in "{raw_url}".') if url.has_stream: @@ -84,12 +87,10 @@ def resolve_url(raw_url): q['is_signature_valid'] = True elif set(q) == {'name'}: q['is_controlling'] = True - # matches = search_claims(censor, **q, limit=1) - matches = search_claims(**q, limit=1)[0] + matches, censor = resolve_claims(**q, limit=1) if matches: stream = matches[0] - if channel: - stream.channel = channel + stream.channel = channel return stream elif censor.censored: return ResolveCensoredError(raw_url, next(iter(censor.censored))) From 9c1d593e54e06213f6d7cd6289cb07ce20120228 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Thu, 10 Sep 2020 22:14:31 -0300 Subject: [PATCH 06/17] fix set_reference --- lbry/schema/result.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/lbry/schema/result.py b/lbry/schema/result.py index 5f4519c72..3d197de0b 100644 --- a/lbry/schema/result.py +++ b/lbry/schema/result.py @@ -15,10 +15,10 @@ BLOCKED = ErrorMessage.Code.Name(ErrorMessage.BLOCKED) def set_reference(reference, claim_hash, rows): if claim_hash: for txo in rows: - if claim_hash == txo['claim_hash']: - reference.tx_hash = txo['txo_hash'][:32] - reference.nout = struct.unpack(' Date: Fri, 18 Sep 2020 02:43:28 -0300 Subject: [PATCH 07/17] fix effective_amount --- lbry/db/queries/search.py | 10 ++++++++++ tests/integration/blockchain/test_blockchain.py | 11 +++++++++++ 2 files changed, 21 insertions(+) diff --git a/lbry/db/queries/search.py b/lbry/db/queries/search.py index cfa77c09d..2d0ef2604 100644 --- a/lbry/db/queries/search.py +++ b/lbry/db/queries/search.py @@ -32,6 +32,15 @@ BASE_SELECT_SUPPORT_COLUMNS = BASE_SELECT_TXO_COLUMNS + [ ] +def compat_layer(**constraints): + # for old sdk, to be removed later + replacements = {"effective_amount": "staked_amount"} + for old_key, new_key in replacements.items(): + if old_key in constraints: + constraints[new_key] = constraints.pop(old_key) + return constraints + + def select_supports(cols: List = None, **constraints) -> Select: if cols is None: cols = BASE_SELECT_SUPPORT_COLUMNS @@ -82,6 +91,7 @@ BASE_SELECT_CLAIM_COLUMNS = BASE_SELECT_TXO_COLUMNS + [ def select_claims(cols: List = None, for_count=False, **constraints) -> Select: + constraints = compat_layer(**constraints) if cols is None: cols = BASE_SELECT_CLAIM_COLUMNS if 'order_by' in constraints: diff --git a/tests/integration/blockchain/test_blockchain.py b/tests/integration/blockchain/test_blockchain.py index f991cc1a5..c08900607 100644 --- a/tests/integration/blockchain/test_blockchain.py +++ b/tests/integration/blockchain/test_blockchain.py @@ -919,6 +919,17 @@ class TestGeneralBlockchainSync(SyncingBlockchainTestCase): resolutions = Outputs.from_base64(await self.db.protobuf_resolve(["@notfound#ab/notfound"])) self.assertEqual(resolutions.txos[0].error.text, "Could not find channel in \"@notfound#ab/notfound\".") + async def test_claim_search_effective_amount(self): + claim = await self.get_claim(await self.create_claim(claim_id_startswith='ab', is_channel=True, amount='0.42')) + await self.generate(1) + results = await self.db.search_claims(staked_amount=42000000) + self.assertEqual(claim.claim_id, results[0].claim_id) + # compat layer + results = await self.db.search_claims(effective_amount=42000000) + self.assertEqual(claim.claim_id, results[0].claim_id) + + + class TestClaimtrieSync(SyncingBlockchainTestCase): From fdac6416a1f2825ea6af504ac9682e849fd7ebec Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Fri, 18 Sep 2020 12:58:06 -0300 Subject: [PATCH 08/17] tests: find_claim_txo for reliability in some places --- tests/integration/blockchain/test_blockchain.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/tests/integration/blockchain/test_blockchain.py b/tests/integration/blockchain/test_blockchain.py index c08900607..9914be686 100644 --- a/tests/integration/blockchain/test_blockchain.py +++ b/tests/integration/blockchain/test_blockchain.py @@ -401,11 +401,7 @@ class TestMultiBlockFileSyncing(BasicBlockchainTestCase): signed = await self.chain.sign_raw_transaction_with_wallet(funded['hex']) await self.chain.send_raw_transaction(signed['hex']) tx = Transaction(unhexlify(signed['hex'])) - claim = None - for txo in tx.outputs: - if txo.is_claim: - claim = txo - break + claim = self.find_claim_txo(tx) support_tx = Transaction().add_outputs([ Output.pay_support_pubkey_hash(CENT, claim.claim_name, claim.claim_id, address), ]) @@ -416,7 +412,7 @@ class TestMultiBlockFileSyncing(BasicBlockchainTestCase): # supports \w data aren't supported until block 350, fast forward a little await self.chain.generate(60) - claim = tx.outputs[0] + claim = self.find_claim_txo(tx) tx = Transaction().add_outputs([ Output.pay_support_pubkey_hash(CENT, claim.claim_name, claim.claim_id, address), Output.pay_support_data_pubkey_hash( From 25b63c988f81ef1e4f7a96d04e0c4adc35038df4 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Fri, 18 Sep 2020 20:16:37 -0300 Subject: [PATCH 09/17] further fixes to effective amount --- lbry/db/constants.py | 2 +- lbry/db/queries/search.py | 5 ++++- tests/integration/blockchain/test_blockchain.py | 2 +- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/lbry/db/constants.py b/lbry/db/constants.py index 0f901b13f..88ba55ab4 100644 --- a/lbry/db/constants.py +++ b/lbry/db/constants.py @@ -53,7 +53,7 @@ SEARCH_INTEGER_PARAMS = { 'height', 'creation_height', 'activation_height', 'expiration_height', 'timestamp', 'creation_timestamp', 'duration', 'release_time', 'fee_amount', 'tx_position', 'channel_join', 'reposted', - 'amount', 'effective_amount', 'support_amount', + 'amount', 'staked_amount', 'support_amount', 'trending_group', 'trending_mixed', 'trending_local', 'trending_global', } diff --git a/lbry/db/queries/search.py b/lbry/db/queries/search.py index 2d0ef2604..8b16efce1 100644 --- a/lbry/db/queries/search.py +++ b/lbry/db/queries/search.py @@ -38,6 +38,9 @@ def compat_layer(**constraints): for old_key, new_key in replacements.items(): if old_key in constraints: constraints[new_key] = constraints.pop(old_key) + order_by = constraints.get("order_by", []) + if old_key in order_by: + constraints["order_by"] = [order_key if order_key != old_key else new_key for order_key in order_by] return constraints @@ -133,7 +136,7 @@ def select_claims(cols: List = None, for_count=False, **constraints) -> Select: constraints['offset'] = int(constraints.pop('sequence')) - 1 constraints['limit'] = 1 if 'amount_order' in constraints: - constraints['order_by'] = 'effective_amount DESC' + constraints['order_by'] = 'staked_amount DESC' constraints['offset'] = int(constraints.pop('amount_order')) - 1 constraints['limit'] = 1 diff --git a/tests/integration/blockchain/test_blockchain.py b/tests/integration/blockchain/test_blockchain.py index 9914be686..a24c9b3bb 100644 --- a/tests/integration/blockchain/test_blockchain.py +++ b/tests/integration/blockchain/test_blockchain.py @@ -921,7 +921,7 @@ class TestGeneralBlockchainSync(SyncingBlockchainTestCase): results = await self.db.search_claims(staked_amount=42000000) self.assertEqual(claim.claim_id, results[0].claim_id) # compat layer - results = await self.db.search_claims(effective_amount=42000000) + results = await self.db.search_claims(effective_amount=42000000, amount_order=1, order_by=["effective_amount"]) self.assertEqual(claim.claim_id, results[0].claim_id) From 39ba2a8b7f6969d9c53377aee1cacb0cb1277d53 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Fri, 18 Sep 2020 20:18:48 -0300 Subject: [PATCH 10/17] log errors for on_rpc --- lbry/service/daemon.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/lbry/service/daemon.py b/lbry/service/daemon.py index 5af54fb52..55b67a5e2 100644 --- a/lbry/service/daemon.py +++ b/lbry/service/daemon.py @@ -137,12 +137,16 @@ class Daemon: data = await request.json() params = data.get('params', {}) method = getattr(self.api, data['method']) - result = await method(**params) - encoded_result = jsonrpc_dumps_pretty(result, service=self.service) - return Response( - text=encoded_result, - content_type='application/json' - ) + try: + result = await method(**params) + encoded_result = jsonrpc_dumps_pretty(result, service=self.service) + return Response( + text=encoded_result, + content_type='application/json' + ) + except Exception as e: + log.exception("RPC error") + raise e async def on_connect(self, request): web_socket = WebSocketManager() From 1a5292aaf4a84a3b7e1c63b095b5205291201328 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Wed, 23 Sep 2020 00:20:08 -0300 Subject: [PATCH 11/17] add expiration_height --- lbry/db/queries/search.py | 1 + lbry/db/queries/txio.py | 2 +- lbry/schema/result.py | 2 +- tests/integration/blockchain/test_blockchain.py | 9 +++++++++ 4 files changed, 12 insertions(+), 2 deletions(-) diff --git a/lbry/db/queries/search.py b/lbry/db/queries/search.py index 8b16efce1..fc79b5369 100644 --- a/lbry/db/queries/search.py +++ b/lbry/db/queries/search.py @@ -75,6 +75,7 @@ BASE_SELECT_CLAIM_COLUMNS = BASE_SELECT_TXO_COLUMNS + [ Claim.c.activation_height, Claim.c.takeover_height, Claim.c.creation_height, + Claim.c.expiration_height, Claim.c.is_controlling, Claim.c.channel_hash, Claim.c.reposted_count, diff --git a/lbry/db/queries/txio.py b/lbry/db/queries/txio.py index 05f680b1c..a8c8f6ad8 100644 --- a/lbry/db/queries/txio.py +++ b/lbry/db/queries/txio.py @@ -372,7 +372,7 @@ META_ATTRS = ( 'activation_height', 'takeover_height', 'creation_height', 'staked_amount', 'short_url', 'canonical_url', 'staked_support_amount', 'staked_support_count', 'signed_claim_count', 'signed_support_count', 'is_signature_valid', - 'reposted_count', + 'reposted_count', 'expiration_height' ) diff --git a/lbry/schema/result.py b/lbry/schema/result.py index 3d197de0b..48f43a1a9 100644 --- a/lbry/schema/result.py +++ b/lbry/schema/result.py @@ -196,7 +196,7 @@ class Outputs: # txo_message.claim.take_over_height = txo['last_take_over_height'] txo_message.claim.creation_height = txo.meta['creation_height'] txo_message.claim.activation_height = txo.meta['activation_height'] - #txo_message.claim.expiration_height = txo['expiration_height'] + txo_message.claim.expiration_height = txo.meta['expiration_height'] if txo.meta['signed_claim_count'] is not None: txo_message.claim.claims_in_channel = txo.meta['signed_claim_count'] txo_message.claim.effective_amount = txo.meta['staked_amount'] diff --git a/tests/integration/blockchain/test_blockchain.py b/tests/integration/blockchain/test_blockchain.py index a24c9b3bb..f23e7211b 100644 --- a/tests/integration/blockchain/test_blockchain.py +++ b/tests/integration/blockchain/test_blockchain.py @@ -924,6 +924,15 @@ class TestGeneralBlockchainSync(SyncingBlockchainTestCase): results = await self.db.search_claims(effective_amount=42000000, amount_order=1, order_by=["effective_amount"]) self.assertEqual(claim.claim_id, results[0].claim_id) + async def test_meta_fields_are_translated_to_protobuf(self): + chan_ab = await self.get_claim( + await self.create_claim(claim_id_startswith='ab', is_channel=True)) + await self.create_claim(claim_id_startswith='cd', sign=chan_ab) + await self.generate(1) + resolutions = Outputs.from_base64(await self.db.protobuf_resolve(["@foo#ab/foo#cd"])) + claim = resolutions.txos[0].claim + self.assertEqual(claim.effective_amount, 1000000) + self.assertEqual(claim.expiration_height, 602) From 18b25f514649190e52d5c847ef7936fa48e129c8 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Wed, 23 Sep 2020 00:36:33 -0300 Subject: [PATCH 12/17] add and test takeover info on resolve --- lbry/schema/result.py | 4 ++-- tests/integration/blockchain/test_blockchain.py | 13 +++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/lbry/schema/result.py b/lbry/schema/result.py index 48f43a1a9..87a165411 100644 --- a/lbry/schema/result.py +++ b/lbry/schema/result.py @@ -192,8 +192,8 @@ class Outputs: if txo.meta['canonical_url'] is not None: txo_message.claim.canonical_url = txo.meta['canonical_url'] txo_message.claim.is_controlling = bool(txo.meta['takeover_height']) - #if txo['last_take_over_height'] is not None: - # txo_message.claim.take_over_height = txo['last_take_over_height'] + if txo_message.claim.is_controlling: + txo_message.claim.take_over_height = txo.meta['takeover_height'] txo_message.claim.creation_height = txo.meta['creation_height'] txo_message.claim.activation_height = txo.meta['activation_height'] txo_message.claim.expiration_height = txo.meta['expiration_height'] diff --git a/tests/integration/blockchain/test_blockchain.py b/tests/integration/blockchain/test_blockchain.py index f23e7211b..c2f9b6ebd 100644 --- a/tests/integration/blockchain/test_blockchain.py +++ b/tests/integration/blockchain/test_blockchain.py @@ -933,6 +933,19 @@ class TestGeneralBlockchainSync(SyncingBlockchainTestCase): claim = resolutions.txos[0].claim self.assertEqual(claim.effective_amount, 1000000) self.assertEqual(claim.expiration_height, 602) + self.assertEqual(claim.take_over_height, 102) + self.assertTrue(claim.is_controlling) + # takeover + await self.create_claim(claim_id_startswith='ad', sign=chan_ab, amount='1.1') + await self.generate(1) + resolutions = Outputs.from_base64(await self.db.protobuf_resolve(["@foo#ab/foo#cd"])) + claim = resolutions.txos[0].claim + self.assertEqual(claim.take_over_height, 0) + self.assertFalse(claim.is_controlling) + resolutions = Outputs.from_base64(await self.db.protobuf_resolve(["@foo#ab/foo#ad"])) + claim = resolutions.txos[0].claim + self.assertEqual(claim.take_over_height, 103) + self.assertTrue(claim.is_controlling) From 0a2c161ace2e751fb408d39a92c7c971d91fcd0d Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Wed, 23 Sep 2020 01:12:09 -0300 Subject: [PATCH 13/17] make short_url and canonical_url match old SDK --- lbry/blockchain/database.py | 3 ++- tests/integration/blockchain/test_blockchain.py | 13 +++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/lbry/blockchain/database.py b/lbry/blockchain/database.py index 66709b82b..4cf73079c 100644 --- a/lbry/blockchain/database.py +++ b/lbry/blockchain/database.py @@ -16,7 +16,8 @@ FILES = [ def make_short_url(r): try: - return f'{normalize_name(r["name"].decode())}#{r["shortestID"] or r["claimID"][::-1].hex()[0]}' + # fixme: we describe it as normalized but the old SDK didnt do that + return f'{r["name"].decode()}#{r["shortestID"] or r["claimID"][::-1].hex()[0]}' except UnicodeDecodeError: # print(f'failed making short url due to name parse error for claim_id: {r["claimID"][::-1].hex()}') return "INVALID NAME" diff --git a/tests/integration/blockchain/test_blockchain.py b/tests/integration/blockchain/test_blockchain.py index c2f9b6ebd..c25e8cc61 100644 --- a/tests/integration/blockchain/test_blockchain.py +++ b/tests/integration/blockchain/test_blockchain.py @@ -947,6 +947,19 @@ class TestGeneralBlockchainSync(SyncingBlockchainTestCase): self.assertEqual(claim.take_over_height, 103) self.assertTrue(claim.is_controlling) + async def test_uris_and_uppercase(self): + # fixme: this is a bug but its how the old SDK expects it (non-normalized URIs) + # to be decided if we are going to ignore it or how its used today + chan_ab = await self.get_claim( + await self.create_claim(claim_id_startswith='ab', is_channel=True, name="@Chá")) + await self.create_claim(claim_id_startswith='cd', sign=chan_ab, name="Hortelã") + await self.generate(1) + + resolutions = Outputs.from_base64(await self.db.protobuf_resolve(["Hortelã"])) + self.assertEqual(1, len(resolutions.txos)) + claim = resolutions.txos[0].claim + self.assertEqual("@Chá#a/Hortelã#c", claim.canonical_url) + self.assertEqual("Hortelã#c", claim.short_url) class TestClaimtrieSync(SyncingBlockchainTestCase): From 24386c74f488c1cbf453df9dbb351696e3280c49 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Wed, 23 Sep 2020 02:21:30 -0300 Subject: [PATCH 14/17] fix test_nulls --- lbry/blockchain/database.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lbry/blockchain/database.py b/lbry/blockchain/database.py index 4cf73079c..1ce18b51b 100644 --- a/lbry/blockchain/database.py +++ b/lbry/blockchain/database.py @@ -17,7 +17,8 @@ FILES = [ def make_short_url(r): try: # fixme: we describe it as normalized but the old SDK didnt do that - return f'{r["name"].decode()}#{r["shortestID"] or r["claimID"][::-1].hex()[0]}' + name = r["name"].decode().replace("\x00", "") + return f'{name}#{r["shortestID"] or r["claimID"][::-1].hex()[0]}' except UnicodeDecodeError: # print(f'failed making short url due to name parse error for claim_id: {r["claimID"][::-1].hex()}') return "INVALID NAME" From 2d7929f1137b0a9468dc582e5b7f90a573ee2bfd Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Wed, 23 Sep 2020 14:39:24 -0300 Subject: [PATCH 15/17] fixme->TODO --- lbry/blockchain/database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lbry/blockchain/database.py b/lbry/blockchain/database.py index 1ce18b51b..baa68eab4 100644 --- a/lbry/blockchain/database.py +++ b/lbry/blockchain/database.py @@ -16,7 +16,7 @@ FILES = [ def make_short_url(r): try: - # fixme: we describe it as normalized but the old SDK didnt do that + # TODO: we describe it as normalized but the old SDK didnt do that name = r["name"].decode().replace("\x00", "") return f'{name}#{r["shortestID"] or r["claimID"][::-1].hex()[0]}' except UnicodeDecodeError: From 2853bb437d3a93b0ce91c7c738d3ba7f0d67d86f Mon Sep 17 00:00:00 2001 From: Lex Berezhny Date: Thu, 24 Sep 2020 12:27:06 -0400 Subject: [PATCH 16/17] make TXs pickleable --- lbry/blockchain/script.py | 5 ++++- lbry/blockchain/util.py | 4 ---- tests/unit/blockchain/test_transaction.py | 22 ++++++++++++++++++++++ 3 files changed, 26 insertions(+), 5 deletions(-) diff --git a/lbry/blockchain/script.py b/lbry/blockchain/script.py index cbeeaf5cc..89944115c 100644 --- a/lbry/blockchain/script.py +++ b/lbry/blockchain/script.py @@ -4,7 +4,10 @@ from binascii import hexlify from collections import namedtuple from .bcd_data_stream import BCDataStream -from .util import subclass_tuple + + +def subclass_tuple(name, base): + return type(name, (base,), {'__slots__': ()}) # bitcoin opcodes diff --git a/lbry/blockchain/util.py b/lbry/blockchain/util.py index e1d1940eb..40458c0ad 100644 --- a/lbry/blockchain/util.py +++ b/lbry/blockchain/util.py @@ -16,10 +16,6 @@ class ReadOnlyList(Sequence[T]): return len(self.lst) -def subclass_tuple(name, base): - return type(name, (base,), {'__slots__': ()}) - - class ArithUint256: # https://github.com/bitcoin/bitcoin/blob/master/src/arith_uint256.cpp diff --git a/tests/unit/blockchain/test_transaction.py b/tests/unit/blockchain/test_transaction.py index 649cc6022..fc9f7b403 100644 --- a/tests/unit/blockchain/test_transaction.py +++ b/tests/unit/blockchain/test_transaction.py @@ -1,4 +1,5 @@ from unittest import TestCase +from pickle import dumps from binascii import hexlify, unhexlify from lbry import Config, Ledger, Transaction @@ -253,3 +254,24 @@ class TestTransactionSerialization(TestCase): txo = t.outputs[0] self.assertEqual(txo.script.template.name, 'pay_script_hash') self.assertEqual(txo.get_address(self.ledger), 'rVBhueRT9E8RPdVcpCdXV5gRiiXVjE6VD9') + + def test_tx_with_claim_can_pickle(self): + # used to fail with this error: + # _pickle.PicklingError: Can't pickle : + # attribute lookup PUSH_SINGLE on lbry.blockchain.util failed + raw = unhexlify( + "01000000012433e1b327603843b083344dbae5306ff7927f87ebbc5ae9eb50856c5b53fd1d000000006a4" + "7304402201a91e1023d11c383a11e26bf8f9034087b15d8ada78fa565e0610455ffc8505e0220038a63a6" + "ecb399723d4f1f78a20ddec0a78bf8fb6c75e63e166ef780f3944fbf0121021810150a2e4b088ec51b20c" + "be1b335962b634545860733367824d5dc3eda767dffffffff028096980000000000fdff00b50463617473" + "4cdc080110011a7808011230080410011a084d616361726f6e6922002a003214416c6c207269676874732" + "072657365727665642e38004a0052005a001a42080110011a30add80aaf02559ba09853636a0658c42b72" + "7cb5bb4ba8acedb4b7fe656065a47a31878dbf9912135ddb9e13806cc1479d220a696d6167652f6a70656" + "72a5c080110031a404180cc0fa4d3839ee29cca866baed25fafb43fca1eb3b608ee889d351d3573d042c7" + "b83e2e643db0d8e062a04e6e9ae6b90540a2f95fe28638d0f18af4361a1c2214f73de93f4299fb32c32f9" + "49e02198a8e91101abd6d7576a914be16e4b0f9bd8f6d47d02b3a887049c36d3b84cb88ac0cd2520b0000" + "00001976a914f521178feb733a719964e1da4a9efb09dcc39cfa88ac00000000" + ) + tx = Transaction(raw) + tx.outputs[0].script.values # triggers parsing, needed to reproduce pickle error + dumps(tx) From bba9aec4f248c6d2c1578ced209ab40f11be027f Mon Sep 17 00:00:00 2001 From: Lex Berezhny Date: Thu, 24 Sep 2020 15:19:30 -0400 Subject: [PATCH 17/17] added respoted_count calculation --- lbry/blockchain/sync/claims.py | 28 ++++++++++++++++++- lbry/blockchain/sync/synchronizer.py | 13 +++++++-- lbry/db/queries/txio.py | 25 +++++++++++++++++ lbry/db/query_context.py | 6 +++- lbry/db/tables.py | 17 +++++++++-- lbry/testcase.py | 6 ++-- .../integration/blockchain/test_blockchain.py | 26 +++++++++++++++-- 7 files changed, 108 insertions(+), 13 deletions(-) diff --git a/lbry/blockchain/sync/claims.py b/lbry/blockchain/sync/claims.py index 0594989fd..86ead1cce 100644 --- a/lbry/blockchain/sync/claims.py +++ b/lbry/blockchain/sync/claims.py @@ -8,7 +8,8 @@ from lbry.db.queries.txio import ( minimum_txo_columns, row_to_txo, where_unspent_txos, where_claims_with_changed_supports, count_unspent_txos, where_channels_with_changed_content, - where_abandoned_claims, count_channels_with_changed_content + where_abandoned_claims, count_channels_with_changed_content, + where_claims_with_changed_reposts, ) from lbry.db.query_context import ProgressContext, event_emitter from lbry.db.tables import TX, TXO, Claim, Support, pg_add_claim_and_tag_constraints_and_indexes @@ -56,6 +57,17 @@ def staked_support_count_calc(other): ) +def reposted_claim_count_calc(other): + repost = TXO.alias('repost') + return ( + select(func.coalesce(func.count(repost.c.reposted_claim_hash), 0)) + .where( + (repost.c.reposted_claim_hash == other.c.claim_hash) & + (repost.c.spent_height == 0) + ).scalar_subquery() + ) + + def make_label(action, blocks): if blocks[0] == blocks[-1]: return f"{action} {blocks[0]:>6}" @@ -73,6 +85,7 @@ def select_claims_for_saving( *minimum_txo_columns, TXO.c.claim_hash, staked_support_amount_calc(TXO).label('staked_support_amount'), staked_support_count_calc(TXO).label('staked_support_count'), + reposted_claim_count_calc(TXO).label('reposted_count'), TXO.c.signature, TXO.c.signature_digest, case([( TXO.c.channel_hash.isnot(None), @@ -95,6 +108,7 @@ def row_to_claim_for_saving(row) -> Tuple[Output, dict]: return row_to_txo(row), { 'staked_support_amount': int(row.staked_support_amount), 'staked_support_count': int(row.staked_support_count), + 'reposted_count': int(row.reposted_count), 'signature': row.signature, 'signature_digest': row.signature_digest, 'channel_public_key': row.channel_public_key @@ -237,6 +251,18 @@ def update_stakes(blocks: Tuple[int, int], claims: int, p: ProgressContext): p.step(result.rowcount) +@event_emitter("blockchain.sync.claims.reposts", "claims") +def update_reposts(blocks: Tuple[int, int], claims: int, p: ProgressContext): + p.start(claims) + sql = ( + Claim.update() + .where(where_claims_with_changed_reposts(blocks)) + .values(reposted_count=reposted_claim_count_calc(Claim)) + ) + result = p.ctx.execute(sql) + p.step(result.rowcount) + + @event_emitter("blockchain.sync.claims.channels", "channels") def update_channel_stats(blocks: Tuple[int, int], initial_sync: int, p: ProgressContext): update_sql = Claim.update().values( diff --git a/lbry/blockchain/sync/synchronizer.py b/lbry/blockchain/sync/synchronizer.py index f55299320..ee55d23c5 100644 --- a/lbry/blockchain/sync/synchronizer.py +++ b/lbry/blockchain/sync/synchronizer.py @@ -217,6 +217,9 @@ class BlockchainSync(Sync): async def count_claims_with_changed_supports(self, blocks) -> int: return await self.db.run(q.count_claims_with_changed_supports, blocks) + async def count_claims_with_changed_reposts(self, blocks) -> int: + return await self.db.run(q.count_claims_with_changed_reposts, blocks) + async def count_channels_with_changed_content(self, blocks) -> int: return await self.db.run(q.count_channels_with_changed_content, blocks) @@ -226,13 +229,13 @@ class BlockchainSync(Sync): ) async def sync_claims(self, blocks) -> bool: - delete_claims = takeovers = claims_with_changed_supports = 0 + delete_claims = takeovers = claims_with_changed_supports = claims_with_changed_reposts = 0 initial_sync = not await self.db.has_claims() with Progress(self.db.message_queue, CLAIMS_INIT_EVENT) as p: if initial_sync: total, batches = await self.distribute_unspent_txos(CLAIM_TYPE_CODES) elif blocks: - p.start(4) + p.start(5) # 1. content claims to be inserted or updated total = await self.count_unspent_txos( CLAIM_TYPE_CODES, blocks, missing_or_stale_in_claims_table=True @@ -247,6 +250,10 @@ class BlockchainSync(Sync): claims_with_changed_supports = await self.count_claims_with_changed_supports(blocks) total += claims_with_changed_supports p.step() + # 4. claims to be updated with new repost totals + claims_with_changed_reposts = await self.count_claims_with_changed_reposts(blocks) + total += claims_with_changed_reposts + p.step() # 5. claims to be updated due to name takeovers takeovers = await self.count_takeovers(blocks) total += takeovers @@ -270,6 +277,8 @@ class BlockchainSync(Sync): await self.db.run(claim_phase.update_takeovers, blocks, takeovers) if claims_with_changed_supports: await self.db.run(claim_phase.update_stakes, blocks, claims_with_changed_supports) + if claims_with_changed_reposts: + await self.db.run(claim_phase.update_reposts, blocks, claims_with_changed_reposts) if initial_sync: await self.db.run(claim_phase.claims_constraints_and_indexes) else: diff --git a/lbry/db/queries/txio.py b/lbry/db/queries/txio.py index a8c8f6ad8..e77f834d7 100644 --- a/lbry/db/queries/txio.py +++ b/lbry/db/queries/txio.py @@ -193,6 +193,31 @@ def count_channels_with_changed_content(blocks: Optional[Tuple[int, int]]): return context().fetchone(sql)['total'] +def where_changed_repost_txos(blocks: Optional[Tuple[int, int]]): + return ( + (TXO.c.txo_type == TXO_TYPES['repost']) & ( + between(TXO.c.height, blocks[0], blocks[-1]) | + between(TXO.c.spent_height, blocks[0], blocks[-1]) + ) + ) + + +def where_claims_with_changed_reposts(blocks: Optional[Tuple[int, int]]): + return Claim.c.claim_hash.in_( + select(TXO.c.reposted_claim_hash).where( + where_changed_repost_txos(blocks) + ) + ) + + +def count_claims_with_changed_reposts(blocks: Optional[Tuple[int, int]]): + sql = ( + select(func.count(distinct(TXO.c.reposted_claim_hash)).label('total')) + .where(where_changed_repost_txos(blocks)) + ) + return context().fetchone(sql)['total'] + + def select_transactions(cols, account_ids=None, **constraints): s: Select = select(*cols).select_from(TX) if not {'tx_hash', 'tx_hash__in'}.intersection(constraints): diff --git a/lbry/db/query_context.py b/lbry/db/query_context.py index 5bbd51c5c..cef79243c 100644 --- a/lbry/db/query_context.py +++ b/lbry/db/query_context.py @@ -476,6 +476,7 @@ class BulkLoader: 'channel_hash': None, 'signature': None, 'signature_digest': None, + 'reposted_claim_hash': None, 'public_key': None, 'public_key_hash': None } @@ -488,6 +489,8 @@ class BulkLoader: row['public_key_hash'] = self.ledger.address_to_hash160( self.ledger.public_key_to_address(claim.channel.public_key_bytes) ) + elif claim.is_repost: + row['reposted_claim_hash'] = claim.repost.reference.claim_hash else: row['txo_type'] = TXO_TYPES['stream'] elif txo.is_support: @@ -511,7 +514,7 @@ class BulkLoader: return row def claim_to_rows( - self, txo: Output, staked_support_amount: int, staked_support_count: int, + self, txo: Output, staked_support_amount: int, staked_support_count: int, reposted_count: int, signature: bytes = None, signature_digest: bytes = None, channel_public_key: bytes = None, ) -> Tuple[dict, List]: @@ -540,6 +543,7 @@ class BulkLoader: 'fee_currency': None, # reposts 'reposted_claim_hash': None, + 'reposted_count': reposted_count, # signed claims 'channel_hash': None, 'is_signature_valid': None, diff --git a/lbry/db/tables.py b/lbry/db/tables.py index 0a2d3d169..fdc2c9609 100644 --- a/lbry/db/tables.py +++ b/lbry/db/tables.py @@ -133,6 +133,9 @@ TXO = Table( Column('signature', LargeBinary, nullable=True), Column('signature_digest', LargeBinary, nullable=True), + # reposts + Column('reposted_claim_hash', LargeBinary, nullable=True), + # channels Column('public_key', LargeBinary, nullable=True), Column('public_key_hash', LargeBinary, nullable=True), @@ -158,6 +161,13 @@ pg_add_txo_constraints_and_indexes = [ f"INCLUDE (claim_hash) WHERE txo_type={TXO_TYPES['support']};", f"CREATE INDEX txo_spent_supports_by_height ON txo (spent_height DESC) " f"INCLUDE (claim_hash) WHERE txo_type={TXO_TYPES['support']};", + # for finding claims which need repost totals re-calculated in a block range + f"CREATE INDEX txo_added_reposts_by_height ON txo (height DESC) " + f"INCLUDE (reposted_claim_hash) WHERE txo_type={TXO_TYPES['repost']};", + f"CREATE INDEX txo_spent_reposts_by_height ON txo (spent_height DESC) " + f"INCLUDE (reposted_claim_hash) WHERE txo_type={TXO_TYPES['repost']};", + "CREATE INDEX txo_reposted_claim_hash ON txo (reposted_claim_hash)" + "WHERE reposted_claim_hash IS NOT NULL AND spent_height = 0;", "CREATE INDEX txo_height ON txo (height);", ] @@ -209,7 +219,6 @@ Claim = Table( Column('description', Text, nullable=True), Column('claim_type', SmallInteger), - Column('claim_reposted_count', Integer, server_default='0'), Column('staked_support_count', Integer, server_default='0'), Column('staked_support_amount', BigInteger, server_default='0'), @@ -221,8 +230,8 @@ Claim = Table( Column('duration', Integer, nullable=True), # reposts - Column('reposted_claim_hash', LargeBinary, nullable=True), - Column('reposted_count', Integer, server_default='0'), + Column('reposted_claim_hash', LargeBinary, nullable=True), # on claim doing the repost + Column('reposted_count', Integer, server_default='0'), # on claim being reposted # claims which are channels Column('signed_claim_count', Integer, server_default='0'), @@ -255,6 +264,8 @@ pg_add_claim_and_tag_constraints_and_indexes = [ # used to count()/sum() claims signed by channel "CREATE INDEX signed_content ON claim (channel_hash) " "INCLUDE (amount) WHERE is_signature_valid;", + # used to count()/sum() reposted claims + "CREATE INDEX reposted_content ON claim (reposted_claim_hash);", # basic tag indexes "ALTER TABLE tag ADD PRIMARY KEY (claim_hash, tag);", "CREATE INDEX tags ON tag (tag) INCLUDE (claim_hash);", diff --git a/lbry/testcase.py b/lbry/testcase.py index 8b17175f8..64468c557 100644 --- a/lbry/testcase.py +++ b/lbry/testcase.py @@ -792,7 +792,7 @@ class EventGenerator: yield from self.claims_main_start() yield from self.claims_insert(self.claims) if self.initial_sync: - yield from self.generate("blockchain.sync.claims.indexes", ("steps",), 0, None, (9,), (1,)) + yield from self.generate("blockchain.sync.claims.indexes", ("steps",), 0, None, (10,), (1,)) else: yield from self.claims_takeovers(self.takeovers) yield from self.claims_stakes() @@ -920,12 +920,12 @@ class EventGenerator: def spends_steps(self): yield from self.generate( "blockchain.sync.spends.main", ("steps",), 0, None, - (17 if self.initial_sync else 5,), + (20 if self.initial_sync else 5,), (1,) ) def claims_init(self): - yield from self.generate("blockchain.sync.claims.init", ("steps",), 0, None, (4,), (1,)) + yield from self.generate("blockchain.sync.claims.init", ("steps",), 0, None, (5,), (1,)) def claims_main_start(self): total = ( diff --git a/tests/integration/blockchain/test_blockchain.py b/tests/integration/blockchain/test_blockchain.py index c25e8cc61..148c62130 100644 --- a/tests/integration/blockchain/test_blockchain.py +++ b/tests/integration/blockchain/test_blockchain.py @@ -9,7 +9,7 @@ from distutils.dir_util import copy_tree, remove_tree from lbry import Config, Database, RegTestLedger, Transaction, Output, Input from lbry.crypto.base58 import Base58 -from lbry.schema.claim import Stream, Channel +from lbry.schema.claim import Claim, Stream, Channel from lbry.schema.result import Outputs from lbry.schema.support import Support from lbry.error import LbrycrdEventSubscriptionError, LbrycrdUnauthorizedError @@ -115,10 +115,14 @@ class SyncingBlockchainTestCase(BasicBlockchainTestCase): async def create_claim( self, title='', amount='0.01', name=None, author='', desc='', - claim_id_startswith='', sign=None, is_channel=False) -> str: + claim_id_startswith='', sign=None, is_channel=False, repost=None) -> str: name = name or ('@foo' if is_channel else 'foo') if not claim_id_startswith and sign is None and not is_channel: - claim = Stream().update(title=title, author=author, description=desc).claim + if repost: + claim = Claim() + claim.repost.reference.claim_id = repost + else: + claim = Stream().update(title=title, author=author, description=desc).claim return await self.chain.claim_name( name, hexlify(claim.to_bytes()).decode(), amount ) @@ -872,6 +876,22 @@ class TestGeneralBlockchainSync(SyncingBlockchainTestCase): self.assertEqual(0, r.meta['signed_claim_count']) # channel2 lost abandoned claim self.assertEqual(0, r.meta['signed_support_count']) + async def test_reposts(self): + self.stream1 = await self.get_claim(await self.create_claim()) + claim_id = self.stream1.claim_id + + # in same block + self.stream2 = await self.get_claim(await self.create_claim(repost=claim_id)) + await self.generate(1) + r, = await self.db.search_claims(claim_id=claim_id) + self.assertEqual(1, r.meta['reposted_count']) + + # in subsequent block + self.stream3 = await self.get_claim(await self.create_claim(repost=claim_id)) + await self.generate(1) + r, = await self.db.search_claims(claim_id=claim_id) + self.assertEqual(2, r.meta['reposted_count']) + async def resolve_to_claim_id(self, url): return (await self.db.resolve([url]))[url].claim_id