forked from LBRYCommunity/lbry-sdk
fix abandoning signed claims in the same tx as their channel
-fix canonical/short url in es
This commit is contained in:
parent
ca57dcfc2f
commit
c26a99e65c
4 changed files with 48 additions and 22 deletions
|
@ -490,13 +490,15 @@ class CommandTestCase(IntegrationTestCase):
|
||||||
""" Synchronous version of `out` method. """
|
""" Synchronous version of `out` method. """
|
||||||
return json.loads(jsonrpc_dumps_pretty(value, ledger=self.ledger))['result']
|
return json.loads(jsonrpc_dumps_pretty(value, ledger=self.ledger))['result']
|
||||||
|
|
||||||
async def confirm_and_render(self, awaitable, confirm) -> Transaction:
|
async def confirm_and_render(self, awaitable, confirm, return_tx=False) -> Transaction:
|
||||||
tx = await awaitable
|
tx = await awaitable
|
||||||
if confirm:
|
if confirm:
|
||||||
await self.ledger.wait(tx)
|
await self.ledger.wait(tx)
|
||||||
await self.generate(1)
|
await self.generate(1)
|
||||||
await self.ledger.wait(tx, self.blockchain.block_expected)
|
await self.ledger.wait(tx, self.blockchain.block_expected)
|
||||||
|
if not return_tx:
|
||||||
return self.sout(tx)
|
return self.sout(tx)
|
||||||
|
return tx
|
||||||
|
|
||||||
def create_upload_file(self, data, prefix=None, suffix=None):
|
def create_upload_file(self, data, prefix=None, suffix=None):
|
||||||
file_path = tempfile.mktemp(prefix=prefix or "tmp", suffix=suffix or "", dir=self.daemon.conf.upload_dir)
|
file_path = tempfile.mktemp(prefix=prefix or "tmp", suffix=suffix or "", dir=self.daemon.conf.upload_dir)
|
||||||
|
@ -507,19 +509,19 @@ class CommandTestCase(IntegrationTestCase):
|
||||||
|
|
||||||
async def stream_create(
|
async def stream_create(
|
||||||
self, name='hovercraft', bid='1.0', file_path=None,
|
self, name='hovercraft', bid='1.0', file_path=None,
|
||||||
data=b'hi!', confirm=True, prefix=None, suffix=None, **kwargs):
|
data=b'hi!', confirm=True, prefix=None, suffix=None, return_tx=False, **kwargs):
|
||||||
if file_path is None and data is not None:
|
if file_path is None and data is not None:
|
||||||
file_path = self.create_upload_file(data=data, prefix=prefix, suffix=suffix)
|
file_path = self.create_upload_file(data=data, prefix=prefix, suffix=suffix)
|
||||||
return await self.confirm_and_render(
|
return await self.confirm_and_render(
|
||||||
self.daemon.jsonrpc_stream_create(name, bid, file_path=file_path, **kwargs), confirm
|
self.daemon.jsonrpc_stream_create(name, bid, file_path=file_path, **kwargs), confirm, return_tx
|
||||||
)
|
)
|
||||||
|
|
||||||
async def stream_update(
|
async def stream_update(
|
||||||
self, claim_id, data=None, prefix=None, suffix=None, confirm=True, **kwargs):
|
self, claim_id, data=None, prefix=None, suffix=None, confirm=True, return_tx=False, **kwargs):
|
||||||
if data is not None:
|
if data is not None:
|
||||||
file_path = self.create_upload_file(data=data, prefix=prefix, suffix=suffix)
|
file_path = self.create_upload_file(data=data, prefix=prefix, suffix=suffix)
|
||||||
return await self.confirm_and_render(
|
return await self.confirm_and_render(
|
||||||
self.daemon.jsonrpc_stream_update(claim_id, file_path=file_path, **kwargs), confirm
|
self.daemon.jsonrpc_stream_update(claim_id, file_path=file_path, **kwargs), confirm, return_tx
|
||||||
)
|
)
|
||||||
return await self.confirm_and_render(
|
return await self.confirm_and_render(
|
||||||
self.daemon.jsonrpc_stream_update(claim_id, **kwargs), confirm
|
self.daemon.jsonrpc_stream_update(claim_id, **kwargs), confirm
|
||||||
|
|
|
@ -614,7 +614,8 @@ class BlockProcessor:
|
||||||
self.db_op_stack.extend(claim.get_invalidate_signature_ops())
|
self.db_op_stack.extend(claim.get_invalidate_signature_ops())
|
||||||
|
|
||||||
for staged in list(self.txo_to_claim.values()):
|
for staged in list(self.txo_to_claim.values()):
|
||||||
if staged.signing_hash == claim_hash and staged.claim_hash not in self.doesnt_have_valid_signature:
|
needs_invalidate = staged.claim_hash not in self.doesnt_have_valid_signature
|
||||||
|
if staged.signing_hash == claim_hash and needs_invalidate:
|
||||||
self.db_op_stack.extend(staged.get_invalidate_signature_ops())
|
self.db_op_stack.extend(staged.get_invalidate_signature_ops())
|
||||||
self.txo_to_claim[self.claim_hash_to_txo[staged.claim_hash]] = staged.invalidate_signature()
|
self.txo_to_claim[self.claim_hash_to_txo[staged.claim_hash]] = staged.invalidate_signature()
|
||||||
self.signatures_changed.add(staged.claim_hash)
|
self.signatures_changed.add(staged.claim_hash)
|
||||||
|
@ -1173,8 +1174,18 @@ class BlockProcessor:
|
||||||
)
|
)
|
||||||
|
|
||||||
# Handle abandoned claims
|
# Handle abandoned claims
|
||||||
|
abandoned_channels = {}
|
||||||
|
# abandon the channels last to handle abandoned signed claims in the same tx,
|
||||||
|
# see test_abandon_channel_and_claims_in_same_tx
|
||||||
for abandoned_claim_hash, (tx_num, nout, name) in spent_claims.items():
|
for abandoned_claim_hash, (tx_num, nout, name) in spent_claims.items():
|
||||||
# print(f"\tabandon {abandoned_claim_hash.hex()} {tx_num} {nout}")
|
if name.startswith('@'):
|
||||||
|
abandoned_channels[abandoned_claim_hash] = (tx_num, nout, name)
|
||||||
|
else:
|
||||||
|
# print(f"\tabandon {name} {abandoned_claim_hash.hex()} {tx_num} {nout}")
|
||||||
|
self._abandon_claim(abandoned_claim_hash, tx_num, nout, name)
|
||||||
|
|
||||||
|
for abandoned_claim_hash, (tx_num, nout, name) in abandoned_channels.items():
|
||||||
|
# print(f"\tabandon {name} {abandoned_claim_hash.hex()} {tx_num} {nout}")
|
||||||
self._abandon_claim(abandoned_claim_hash, tx_num, nout, name)
|
self._abandon_claim(abandoned_claim_hash, tx_num, nout, name)
|
||||||
|
|
||||||
self.db.total_transactions.append(tx_hash)
|
self.db.total_transactions.append(tx_hash)
|
||||||
|
|
|
@ -219,9 +219,6 @@ class LevelDB:
|
||||||
return f'{name}#{k.partial_claim_id}'
|
return f'{name}#{k.partial_claim_id}'
|
||||||
break
|
break
|
||||||
print(f"{claim_id} has a collision")
|
print(f"{claim_id} has a collision")
|
||||||
# FIXME: there are a handful of claims that appear to have short id collisions but really do not
|
|
||||||
# these claims are actually abandoned, but are not handled correctly because they are abandoned in the
|
|
||||||
# same tx as their channel.
|
|
||||||
return f'{name}#{claim_id}'
|
return f'{name}#{claim_id}'
|
||||||
|
|
||||||
def _prepare_resolve_result(self, tx_num: int, position: int, claim_hash: bytes, name: str, root_tx_num: int,
|
def _prepare_resolve_result(self, tx_num: int, position: int, claim_hash: bytes, name: str, root_tx_num: int,
|
||||||
|
@ -246,8 +243,10 @@ class LevelDB:
|
||||||
if channel_hash:
|
if channel_hash:
|
||||||
channel_vals = self.get_claim_txo(channel_hash)
|
channel_vals = self.get_claim_txo(channel_hash)
|
||||||
if channel_vals:
|
if channel_vals:
|
||||||
channel_name = channel_vals.name
|
channel_short_url = self.get_short_claim_id_url(
|
||||||
canonical_url = f'{channel_name}#{channel_hash.hex()}/{name}#{claim_hash.hex()}'
|
channel_vals.name, channel_hash, channel_vals.root_tx_num, channel_vals.root_position
|
||||||
|
)
|
||||||
|
canonical_url = f'{channel_short_url}/{short_url}'
|
||||||
return ResolveResult(
|
return ResolveResult(
|
||||||
name, claim_hash, tx_num, position, tx_hash, height, claim_amount, short_url=short_url,
|
name, claim_hash, tx_num, position, tx_hash, height, claim_amount, short_url=short_url,
|
||||||
is_controlling=controlling_claim.claim_hash == claim_hash, canonical_url=canonical_url,
|
is_controlling=controlling_claim.claim_hash == claim_hash, canonical_url=canonical_url,
|
||||||
|
@ -552,11 +551,6 @@ class LevelDB:
|
||||||
)
|
)
|
||||||
tags = list(set(claim_tags).union(set(reposted_tags)))
|
tags = list(set(claim_tags).union(set(reposted_tags)))
|
||||||
languages = list(set(claim_languages).union(set(reposted_languages)))
|
languages = list(set(claim_languages).union(set(reposted_languages)))
|
||||||
canonical_url = f'{claim.name}#{claim.claim_hash.hex()}'
|
|
||||||
if metadata.is_signed:
|
|
||||||
channel = self.get_claim_txo(metadata.signing_channel_hash[::-1])
|
|
||||||
if channel:
|
|
||||||
canonical_url = f'{channel.name}#{metadata.signing_channel_hash[::-1].hex()}/{canonical_url}'
|
|
||||||
value = {
|
value = {
|
||||||
'claim_hash': claim_hash[::-1],
|
'claim_hash': claim_hash[::-1],
|
||||||
# 'claim_id': claim_hash.hex(),
|
# 'claim_id': claim_hash.hex(),
|
||||||
|
@ -576,10 +570,8 @@ class LevelDB:
|
||||||
'support_amount': claim.support_amount,
|
'support_amount': claim.support_amount,
|
||||||
'is_controlling': claim.is_controlling,
|
'is_controlling': claim.is_controlling,
|
||||||
'last_take_over_height': claim.last_takeover_height,
|
'last_take_over_height': claim.last_takeover_height,
|
||||||
|
'short_url': claim.short_url,
|
||||||
'short_url': f'{claim.name}#{claim.claim_hash.hex()}', # TODO: fix
|
'canonical_url': claim.canonical_url,
|
||||||
'canonical_url': canonical_url,
|
|
||||||
|
|
||||||
'title': None if not metadata.is_stream else metadata.stream.title,
|
'title': None if not metadata.is_stream else metadata.stream.title,
|
||||||
'author': None if not metadata.is_stream else metadata.stream.author,
|
'author': None if not metadata.is_stream else metadata.stream.author,
|
||||||
'description': None if not metadata.is_stream else metadata.stream.description,
|
'description': None if not metadata.is_stream else metadata.stream.description,
|
||||||
|
|
|
@ -130,6 +130,27 @@ class ResolveCommand(BaseResolveTestCase):
|
||||||
await self.assertResolvesToClaimId(f'@abc#{colliding_claim_ids[1][:17]}', colliding_claim_ids[1])
|
await self.assertResolvesToClaimId(f'@abc#{colliding_claim_ids[1][:17]}', colliding_claim_ids[1])
|
||||||
await self.assertResolvesToClaimId(f'@abc#{colliding_claim_ids[1]}', colliding_claim_ids[1])
|
await self.assertResolvesToClaimId(f'@abc#{colliding_claim_ids[1]}', colliding_claim_ids[1])
|
||||||
|
|
||||||
|
async def test_abandon_channel_and_claims_in_same_tx(self):
|
||||||
|
channel_id = self.get_claim_id(
|
||||||
|
await self.channel_create('@abc', '0.01')
|
||||||
|
)
|
||||||
|
await self.stream_create('foo', '0.01', channel_id=channel_id)
|
||||||
|
await self.channel_update(channel_id, bid='0.001')
|
||||||
|
foo2_id = self.get_claim_id(await self.stream_create('foo2', '0.01', channel_id=channel_id))
|
||||||
|
await self.stream_update(foo2_id, bid='0.0001', channel_id=channel_id, confirm=False)
|
||||||
|
tx = await self.stream_create('foo3', '0.01', channel_id=channel_id, confirm=False, return_tx=True)
|
||||||
|
await self.ledger.wait(tx)
|
||||||
|
|
||||||
|
# db = self.conductor.spv_node.server.bp.db
|
||||||
|
# claims = list(db.all_claims_producer())
|
||||||
|
# print("claims", claims)
|
||||||
|
await self.daemon.jsonrpc_txo_spend(blocking=True)
|
||||||
|
await self.generate(1)
|
||||||
|
await self.assertNoClaimForName('@abc')
|
||||||
|
await self.assertNoClaimForName('foo')
|
||||||
|
await self.assertNoClaimForName('foo2')
|
||||||
|
await self.assertNoClaimForName('foo3')
|
||||||
|
|
||||||
async def test_resolve_response(self):
|
async def test_resolve_response(self):
|
||||||
channel_id = self.get_claim_id(
|
channel_id = self.get_claim_id(
|
||||||
await self.channel_create('@abc', '0.01')
|
await self.channel_create('@abc', '0.01')
|
||||||
|
|
Loading…
Reference in a new issue