lbry-sdk/lbry/lbrynet/wallet/resolve.py

405 lines
19 KiB
Python
Raw Normal View History

import logging
import asyncio
2019-04-20 00:16:21 +02:00
from binascii import unhexlify, hexlify
from lbrynet.utils import lru_cache_concurrent
2019-03-29 22:07:02 +01:00
from lbrynet.wallet.account import validate_claim_id
from lbrynet.wallet.dewies import dewies_to_lbc
2019-01-22 18:55:02 +01:00
from lbrynet.error import UnknownNameError, UnknownClaimID, UnknownURI, UnknownOutpoint
from lbrynet.schema.claim import Claim
2019-03-20 06:46:23 +01:00
from google.protobuf.message import DecodeError
2019-04-29 06:38:58 +02:00
from lbrynet.schema.url import URL
from lbrynet.wallet.claim_proofs import verify_proof, InvalidProofError
2019-03-22 00:46:37 +01:00
log = logging.getLogger(__name__)
class Resolver:
2019-04-02 22:07:48 +02:00
def __init__(self, ledger):
self.transaction_class = ledger.transaction_class
self.network = ledger.network
self.ledger = ledger
2019-03-29 22:07:02 +01:00
async def resolve(self, page, page_size, *uris):
uris = set(uris)
2019-03-29 22:07:02 +01:00
try:
for uri in uris:
2019-04-29 06:38:58 +02:00
for part in URL.parse(uri).parts:
if part.claim_id:
validate_claim_id(part.claim_id)
2019-04-20 00:16:21 +02:00
claim_trie_root = self.ledger.headers.claim_trie_root
resolutions = await self.network.get_values_for_uris(self.ledger.headers.hash().decode(), *uris)
if len(uris) > 1:
2019-04-20 00:16:21 +02:00
return await self._batch_handle(resolutions, uris, page, page_size, claim_trie_root)
return await self._handle_resolutions(resolutions, uris, page, page_size, claim_trie_root)
2019-05-05 04:58:56 +02:00
except ValueError as err:
2019-03-29 22:07:02 +01:00
return {'error': err.args[0]}
except Exception as e:
log.exception(e)
return {'error': str(e)}
2019-04-20 00:16:21 +02:00
async def _batch_handle(self, resolutions, uris, page, page_size, claim_trie_root):
futs = []
for uri in uris:
2019-04-20 00:16:21 +02:00
futs.append(
asyncio.ensure_future(self._handle_resolutions(resolutions, [uri], page, page_size, claim_trie_root))
)
results = await asyncio.gather(*futs)
2019-04-19 22:55:42 +02:00
return dict(list(map(lambda result: list(result.items())[0], results)))
@lru_cache_concurrent(256)
async def _fetch_tx(self, txid):
return self.transaction_class(unhexlify(await self.network.get_transaction(txid)))
2019-04-20 00:16:21 +02:00
async def _handle_resolutions(self, resolutions, requested_uris, page, page_size, claim_trie_root):
results = {}
for uri in requested_uris:
resolution = (resolutions or {}).get(uri, {})
if resolution:
try:
results[uri] = _handle_claim_result(
2019-04-20 00:16:21 +02:00
await self._handle_resolve_uri_response(uri, resolution, claim_trie_root, page, page_size),
2019-03-31 23:40:38 +02:00
uri
)
except (UnknownNameError, UnknownClaimID, UnknownURI) as err:
2019-03-22 00:46:37 +01:00
log.exception(err)
results[uri] = {'error': str(err)}
else:
results[uri] = {'error': "URI lbry://{} cannot be resolved".format(uri.replace("lbry://", ""))}
2018-10-15 23:16:43 +02:00
return results
2019-04-20 00:16:21 +02:00
async def _handle_resolve_uri_response(self, uri, resolution, claim_trie_root, page=0, page_size=10):
result = {}
2019-05-05 04:58:56 +02:00
parsed_uri = URL.parse(uri)
2019-03-31 22:25:59 +02:00
certificate_response = None
# parse an included certificate
if 'certificate' in resolution:
certificate_response = resolution['certificate']['result']
certificate_resolution_type = resolution['certificate']['resolution_type']
if certificate_resolution_type == "winning" and certificate_response:
if 'height' in certificate_response:
2019-05-05 04:58:56 +02:00
certificate_response = _verify_proof(parsed_uri.stream.name,
claim_trie_root,
certificate_response,
2019-03-31 22:25:59 +02:00
ledger=self.ledger)
elif certificate_resolution_type not in ['winning', 'claim_id', 'sequence']:
2019-03-31 23:40:38 +02:00
raise Exception(f"unknown response type: {certificate_resolution_type}")
2019-03-31 22:25:59 +02:00
result['certificate'] = await self.parse_and_validate_claim_result(certificate_response)
result['claims_in_channel'] = len(resolution.get('unverified_claims_in_channel', []))
# if this was a resolution for a name, parse the result
if 'claim' in resolution:
claim_response = resolution['claim']['result']
claim_resolution_type = resolution['claim']['resolution_type']
if claim_resolution_type == "winning" and claim_response:
if 'height' in claim_response:
2019-05-05 04:58:56 +02:00
claim_response = _verify_proof(parsed_uri.stream.name,
2019-03-31 22:25:59 +02:00
claim_trie_root,
claim_response,
ledger=self.ledger)
elif claim_resolution_type not in ["sequence", "winning", "claim_id"]:
2019-03-31 23:40:38 +02:00
raise Exception(f"unknown response type: {claim_resolution_type}")
2019-03-31 22:25:59 +02:00
result['claim'] = await self.parse_and_validate_claim_result(claim_response,
certificate_response)
# if this was a resolution for a name in a channel make sure there is only one valid
# match
elif 'unverified_claims_for_name' in resolution and 'certificate' in result:
unverified_claims_for_name = resolution['unverified_claims_for_name']
2018-10-15 23:16:43 +02:00
channel_info = await self.get_channel_claims_page(unverified_claims_for_name,
result['certificate'], page=1)
claims_in_channel, upper_bound = channel_info
2018-10-15 23:16:43 +02:00
if not claims_in_channel:
log.error("No valid claims for this name for this channel")
2018-10-15 23:16:43 +02:00
elif len(claims_in_channel) > 1:
2018-12-07 07:53:52 +01:00
log.warning("Multiple signed claims for the same name.")
winner = pick_winner_from_channel_path_collision(claims_in_channel)
if winner:
result['claim'] = winner
else:
log.error("No valid claims for this name for this channel")
else:
result['claim'] = claims_in_channel[0]
# parse and validate claims in a channel iteratively into pages of results
elif 'unverified_claims_in_channel' in resolution and 'certificate' in result:
ids_to_check = resolution['unverified_claims_in_channel']
2018-10-15 23:16:43 +02:00
channel_info = await self.get_channel_claims_page(ids_to_check, result['certificate'],
page=page, page_size=page_size)
claims_in_channel, upper_bound = channel_info
if claims_in_channel:
2019-03-26 03:06:36 +01:00
result['total_claims'] = upper_bound
result['claims_in_channel'] = claims_in_channel
elif 'error' not in result:
return {'error': 'claim not found', 'success': False, 'uri': str(parsed_uri)}
# invalid signatures can only return outside a channel
if result.get('claim', {}).get('has_signature', False):
2019-05-05 04:58:56 +02:00
if parsed_uri.has_stream and not result['claim']['signature_is_valid']:
return {'error': 'claim not found', 'success': False, 'uri': str(parsed_uri)}
2018-10-15 23:16:43 +02:00
return result
2019-03-31 22:25:59 +02:00
async def parse_and_validate_claim_result(self, claim_result, certificate=None):
if not claim_result or 'value' not in claim_result:
return claim_result
2019-03-31 22:25:59 +02:00
claim_result = _decode_claim_result(claim_result)
2019-04-29 04:55:54 +02:00
if claim_result.get('height'):
claim_result['timestamp'] = self.ledger.headers[claim_result['height']]['timestamp']
2019-04-29 05:03:15 +02:00
if claim_result.get('depth'):
claim_result['confirmations'] = claim_result.pop('depth')
2019-03-31 22:25:59 +02:00
if claim_result['value']:
claim_result['has_signature'] = False
2019-03-31 22:25:59 +02:00
if claim_result['value'].is_signed:
claim_result['has_signature'] = True
claim_tx = await self._fetch_tx(claim_result['txid'])
if certificate is None:
log.info("fetching certificate to check claim signature")
2019-03-31 22:25:59 +02:00
channel_id = claim_result['value'].signing_channel_id
2019-04-29 06:38:58 +02:00
certificate = (await self.network.get_claims_by_ids([channel_id])).get(channel_id)
if not certificate:
2019-03-31 22:25:59 +02:00
log.warning('Certificate %s not found', channel_id)
claim_result['channel_name'] = certificate['name'] if certificate else None
cert_tx = await self._fetch_tx(certificate['txid']) if certificate else None
2019-03-31 22:25:59 +02:00
claim_result['signature_is_valid'] = validate_claim_signature_and_get_channel_name(
claim_result, certificate, self.ledger, claim_tx=claim_tx, cert_tx=cert_tx
2019-03-22 00:46:37 +01:00
)
# fixme: workaround while json encoder isnt used here
if cert_tx:
channel_txo = cert_tx.outputs[certificate['nout']]
claim_result['signing_channel'] = {
'name': channel_txo.claim_name,
'claim_id': channel_txo.claim_id,
'value': channel_txo.claim
}
claim_result['is_channel_signature_valid'] = claim_result['signature_is_valid']
if 'amount' in claim_result:
2019-03-31 22:25:59 +02:00
claim_result['amount'] = dewies_to_lbc(claim_result['amount'])
claim_result['effective_amount'] = dewies_to_lbc(claim_result['effective_amount'])
claim_result['supports'] = [
{'txid': txid, 'nout': nout, 'amount': dewies_to_lbc(amount)}
for (txid, nout, amount) in claim_result['supports']
]
claim_result['height'] = claim_result.get('height', -1) or -1
claim_result['permanent_url'] = f"lbry://{claim_result['name']}#{claim_result['claim_id']}"
return claim_result
@staticmethod
def prepare_claim_queries(start_position, query_size, channel_claim_infos):
queries = [tuple()]
names = {}
# a table of index counts for the sorted claim ids, including ignored claims
absolute_position_index = {}
2018-07-15 05:02:19 +02:00
block_sorted_infos = sorted(channel_claim_infos.items(), key=lambda x: int(x[1][1]))
per_block_infos = {}
for claim_id, (name, height) in block_sorted_infos:
claims = per_block_infos.get(height, [])
claims.append((claim_id, name))
per_block_infos[height] = sorted(claims, key=lambda x: int(x[0], 16))
abs_position = 0
for height in sorted(per_block_infos.keys(), reverse=True):
for claim_id, name in per_block_infos[height]:
names[claim_id] = name
absolute_position_index[claim_id] = abs_position
if abs_position >= start_position:
if len(queries[-1]) >= query_size:
queries.append(tuple())
queries[-1] += (claim_id,)
abs_position += 1
return queries, names, absolute_position_index
2018-10-15 23:16:43 +02:00
async def iter_channel_claims_pages(self, queries, claim_positions, claim_names, certificate,
page_size=10):
# lbryum server returns a dict of {claim_id: (name, claim_height)}
# first, sort the claims by block height (and by claim id int value within a block).
# map the sorted claims into getclaimsbyids queries of query_size claim ids each
# send the batched queries to lbryum server and iteratively validate and parse
# the results, yield a page of results at a time.
# these results can include those where `signature_is_valid` is False. if they are skipped,
# page indexing becomes tricky, as the number of results isn't known until after having
# processed them.
# TODO: fix ^ in lbrynet.schema
2018-10-15 23:16:43 +02:00
async def iter_validate_channel_claims():
formatted_claims = []
for claim_ids in queries:
2019-04-29 06:38:58 +02:00
batch_result = await self.network.get_claims_by_ids(claim_ids)
for claim_id in claim_ids:
claim = batch_result[claim_id]
if claim['name'] == claim_names[claim_id]:
2018-11-07 16:05:49 +01:00
formatted_claim = await self.parse_and_validate_claim_result(claim, certificate)
formatted_claim['absolute_channel_position'] = claim_positions[
claim['claim_id']]
formatted_claims.append(formatted_claim)
else:
log.warning("ignoring claim with name mismatch %s %s", claim['name'],
claim['claim_id'])
2018-10-15 23:16:43 +02:00
return formatted_claims
results = []
for claim in await iter_validate_channel_claims():
results.append(claim)
# if there is a full page of results, yield it
if len(results) and len(results) % page_size == 0:
2018-10-15 23:16:43 +02:00
return results[-page_size:]
2018-10-15 23:16:43 +02:00
return results
2018-10-15 23:16:43 +02:00
async def get_channel_claims_page(self, channel_claim_infos, certificate, page, page_size=10):
page = page or 0
page_size = max(page_size, 1)
if page_size > 500:
raise Exception("page size above maximum allowed")
start_position = (page - 1) * page_size
queries, names, claim_positions = self.prepare_claim_queries(start_position, page_size,
channel_claim_infos)
upper_bound = len(claim_positions)
if not page:
2018-10-15 23:16:43 +02:00
return None, upper_bound
if start_position > upper_bound:
raise IndexError("claim %i greater than max %i" % (start_position, upper_bound))
2019-03-31 22:25:59 +02:00
page_generator = await self.iter_channel_claims_pages(queries, claim_positions, names,
certificate, page_size=page_size)
2018-10-15 23:16:43 +02:00
return page_generator, upper_bound
2019-04-05 00:17:11 +02:00
def _verify_proof(name, claim_trie_root, result, ledger):
"""
Verify proof for name claim
"""
2019-03-31 22:25:59 +02:00
support_amount = sum([amt for (stxid, snout, amt) in result['supports']])
2019-03-31 22:25:59 +02:00
def _build_response(name, tx, nOut):
output = tx.outputs[nOut]
r = {
'name': name,
2019-03-31 22:25:59 +02:00
'value': hexlify(output.script.values['claim']),
'claim_id': output.claim_id,
'txid': tx.id,
'nout': nOut,
'amount': output.amount,
'effective_amount': output.amount + support_amount,
2019-04-05 00:17:11 +02:00
'height': result['height'],
2019-04-29 05:03:15 +02:00
'confirmations': result['depth'],
2019-03-31 22:25:59 +02:00
'claim_sequence': result['claim_sequence'],
'address': output.get_address(ledger),
'valid_at_height': result['valid_at_height'],
2019-04-29 04:55:54 +02:00
'timestamp': ledger.headers[result['height']]['timestamp'],
2019-03-31 22:25:59 +02:00
'supports': result['supports']
}
return r
def _parse_proof_result(name, result):
if 'txhash' in result['proof'] and 'nOut' in result['proof']:
if 'transaction' in result:
2019-04-02 22:07:48 +02:00
tx = ledger.transaction_class(raw=unhexlify(result['transaction']))
nOut = result['proof']['nOut']
2018-07-15 20:49:14 +02:00
if result['proof']['txhash'] == tx.id:
if 0 <= nOut < len(tx.outputs):
2019-04-02 22:07:48 +02:00
if tx.outputs[nOut].claim_name == name:
2019-03-31 22:25:59 +02:00
return _build_response(name, tx, nOut)
return {'error': 'name in proof did not match requested name'}
outputs = len(tx['outputs'])
return {'error': 'invalid nOut: %d (let(outputs): %d' % (nOut, outputs)}
return {'error': "computed txid did not match given transaction: %s vs %s" %
2018-07-15 20:49:14 +02:00
(tx.id, result['proof']['txhash'])
}
return {'error': "didn't receive a transaction with the proof"}
return {'error': 'name is not claimed'}
if 'proof' in result:
2019-03-31 22:25:59 +02:00
name = result.get('name', name)
proof_name = result.get('normalized_name', name)
try:
verify_proof(result['proof'], claim_trie_root, proof_name)
except InvalidProofError:
return {'error': "Proof was invalid"}
return _parse_proof_result(name, result)
else:
return {'error': "proof not in result"}
def validate_claim_signature_and_get_channel_name(claim_result, certificate_claim, ledger,
2019-03-22 00:46:37 +01:00
claim_tx=None, cert_tx=None):
valid_signature = False
2019-03-22 00:46:37 +01:00
if cert_tx and certificate_claim and claim_tx and claim_result:
valid_signature = claim_tx.outputs[claim_result['nout']].is_signed_by(
cert_tx.outputs[certificate_claim['nout']], ledger
)
2019-03-31 22:25:59 +02:00
if not valid_signature:
log.warning("lbry://%s#%s has an invalid signature",
claim_result['name'], claim_result['claim_id'])
return valid_signature
# TODO: The following came from code handling lbryum results. Now that it's all in one place a refactor should unify it.
def _decode_claim_result(claim):
2019-03-31 22:25:59 +02:00
if 'decoded_claim' in claim:
return claim
if 'value' not in claim:
log.warning('Got an invalid claim while parsing, please report: %s', claim)
2019-04-21 17:56:19 +02:00
claim['protobuf'] = None
claim['value'] = None
2019-03-31 22:25:59 +02:00
backend_message = ' SDK message: ' + claim.get('error', '')
claim['error'] = "Failed to parse: missing value." + backend_message
return claim
try:
2019-03-20 22:31:00 +01:00
if not isinstance(claim['value'], Claim):
2019-03-29 01:57:03 +01:00
claim['value'] = Claim.from_bytes(unhexlify(claim['value']))
2019-04-21 17:56:19 +02:00
claim['protobuf'] = hexlify(claim['value'].to_bytes())
2019-03-31 22:25:59 +02:00
claim['decoded_claim'] = True
except DecodeError:
2019-03-31 22:25:59 +02:00
claim['decoded_claim'] = False
2019-04-21 17:56:19 +02:00
claim['protobuf'] = claim['value']
claim['value'] = None
return claim
2019-03-31 23:40:38 +02:00
def _handle_claim_result(results, uri):
if not results:
2019-03-31 23:40:38 +02:00
raise UnknownURI(uri)
if 'error' in results:
if results['error'] in ['name is not claimed', 'claim not found']:
if 'claim_id' in results:
raise UnknownClaimID(results['claim_id'])
2019-02-28 18:04:02 +01:00
if 'name' in results:
raise UnknownNameError(results['name'])
2019-02-28 18:04:02 +01:00
if 'uri' in results:
raise UnknownURI(results['uri'])
2019-02-28 18:04:02 +01:00
if 'outpoint' in results:
raise UnknownOutpoint(results['outpoint'])
raise Exception(results['error'])
2019-03-31 23:40:38 +02:00
if not {'value', 'claim', 'certificate'}.intersection(results.keys()):
raise Exception(f'result in unexpected format:{results}')
return results
2018-12-07 07:53:52 +01:00
def pick_winner_from_channel_path_collision(claims_in_channel):
# we should be doing this by effective amount so we pick the controlling claim, however changing the resolved
# claim triggers another issue where 2 claims cant be saved for the same file. This code picks the oldest, so it
# stays the same. Using effective amount would change the resolved claims for a channel path on takeovers,
# potentially triggering that.
2019-03-31 23:40:38 +02:00
winner = {}
2018-12-07 07:53:52 +01:00
for claim in claims_in_channel:
2019-03-31 23:40:38 +02:00
if not winner or claim['height'] < winner['height'] or \
(claim['height'] == winner['height'] and claim['nout'] < winner['nout']):
2019-03-31 22:25:59 +02:00
winner = claim if claim['signature_is_valid'] else winner
2019-03-31 23:40:38 +02:00
return winner or None