2018-07-12 03:31:50 +02:00
|
|
|
import logging
|
|
|
|
|
2019-03-23 05:07:22 +01:00
|
|
|
from cryptography.exceptions import InvalidSignature
|
2018-07-15 21:23:31 +02:00
|
|
|
from binascii import unhexlify, hexlify
|
2019-03-18 23:15:02 +01:00
|
|
|
from lbrynet.wallet.dewies import dewies_to_lbc
|
2019-01-22 18:55:02 +01:00
|
|
|
from lbrynet.error import UnknownNameError, UnknownClaimID, UnknownURI, UnknownOutpoint
|
2019-03-18 23:15:02 +01:00
|
|
|
from lbrynet.schema.claim import Claim
|
2019-03-20 06:46:23 +01:00
|
|
|
from google.protobuf.message import DecodeError
|
2018-09-17 22:31:44 +02:00
|
|
|
from lbrynet.schema.uri import parse_lbry_uri
|
2019-03-18 23:15:02 +01:00
|
|
|
from lbrynet.wallet.claim_proofs import verify_proof, InvalidProofError
|
2019-03-22 00:46:37 +01:00
|
|
|
from lbrynet.wallet.transaction import Transaction
|
|
|
|
|
2018-07-12 03:31:50 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class Resolver:
|
2018-07-12 04:07:20 +02:00
|
|
|
|
2019-03-23 05:07:22 +01:00
|
|
|
def __init__(self, claim_trie_root, height, transaction_class, hash160_to_address, network, ledger):
|
2018-07-12 04:07:20 +02:00
|
|
|
self.claim_trie_root = claim_trie_root
|
|
|
|
self.height = height
|
|
|
|
self.transaction_class = transaction_class
|
|
|
|
self.hash160_to_address = hash160_to_address
|
|
|
|
self.network = network
|
2019-03-23 05:07:22 +01:00
|
|
|
self.ledger = ledger
|
2018-07-12 04:07:20 +02:00
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def _handle_resolutions(self, resolutions, requested_uris, page, page_size):
|
2018-07-12 04:07:20 +02:00
|
|
|
results = {}
|
|
|
|
for uri in requested_uris:
|
|
|
|
resolution = (resolutions or {}).get(uri, {})
|
|
|
|
if resolution:
|
|
|
|
try:
|
|
|
|
results[uri] = _handle_claim_result(
|
2018-10-15 23:16:43 +02:00
|
|
|
await self._handle_resolve_uri_response(uri, resolution, page, page_size)
|
2018-07-12 04:07:20 +02:00
|
|
|
)
|
|
|
|
except (UnknownNameError, UnknownClaimID, UnknownURI) as err:
|
2019-03-22 00:46:37 +01:00
|
|
|
log.exception(err)
|
2018-09-27 06:56:04 +02:00
|
|
|
results[uri] = {'error': str(err)}
|
|
|
|
else:
|
|
|
|
results[uri] = {'error': "URI lbry://{} cannot be resolved".format(uri.replace("lbry://", ""))}
|
2018-10-15 23:16:43 +02:00
|
|
|
return results
|
2018-07-12 04:07:20 +02:00
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def _handle_resolve_uri_response(self, uri, resolution, page=0, page_size=10, raw=False):
|
2018-07-12 04:07:20 +02:00
|
|
|
result = {}
|
|
|
|
claim_trie_root = self.claim_trie_root
|
|
|
|
parsed_uri = parse_lbry_uri(uri)
|
|
|
|
certificate = None
|
|
|
|
# parse an included certificate
|
|
|
|
if 'certificate' in resolution:
|
|
|
|
certificate_response = resolution['certificate']['result']
|
|
|
|
certificate_resolution_type = resolution['certificate']['resolution_type']
|
|
|
|
if certificate_resolution_type == "winning" and certificate_response:
|
|
|
|
if 'height' in certificate_response:
|
|
|
|
height = certificate_response['height']
|
|
|
|
depth = self.height - height
|
|
|
|
certificate_result = _verify_proof(parsed_uri.name,
|
|
|
|
claim_trie_root,
|
|
|
|
certificate_response,
|
|
|
|
height, depth,
|
|
|
|
transaction_class=self.transaction_class,
|
|
|
|
hash160_to_address=self.hash160_to_address)
|
2018-11-07 16:05:49 +01:00
|
|
|
result['certificate'] = await self.parse_and_validate_claim_result(certificate_result,
|
|
|
|
raw=raw)
|
2018-07-12 04:07:20 +02:00
|
|
|
elif certificate_resolution_type == "claim_id":
|
2018-11-07 16:05:49 +01:00
|
|
|
result['certificate'] = await self.parse_and_validate_claim_result(certificate_response,
|
|
|
|
raw=raw)
|
2018-07-12 04:07:20 +02:00
|
|
|
elif certificate_resolution_type == "sequence":
|
2018-11-07 16:05:49 +01:00
|
|
|
result['certificate'] = await self.parse_and_validate_claim_result(certificate_response,
|
|
|
|
raw=raw)
|
2018-07-12 04:07:20 +02:00
|
|
|
else:
|
|
|
|
log.error("unknown response type: %s", certificate_resolution_type)
|
|
|
|
|
|
|
|
if 'certificate' in result:
|
|
|
|
certificate = result['certificate']
|
|
|
|
if 'unverified_claims_in_channel' in resolution:
|
|
|
|
max_results = len(resolution['unverified_claims_in_channel'])
|
|
|
|
result['claims_in_channel'] = max_results
|
|
|
|
else:
|
|
|
|
result['claims_in_channel'] = 0
|
|
|
|
else:
|
|
|
|
result['error'] = "claim not found"
|
|
|
|
result['success'] = False
|
|
|
|
result['uri'] = str(parsed_uri)
|
|
|
|
|
|
|
|
else:
|
|
|
|
certificate = None
|
|
|
|
|
|
|
|
# if this was a resolution for a name, parse the result
|
|
|
|
if 'claim' in resolution:
|
|
|
|
claim_response = resolution['claim']['result']
|
|
|
|
claim_resolution_type = resolution['claim']['resolution_type']
|
|
|
|
if claim_resolution_type == "winning" and claim_response:
|
|
|
|
if 'height' in claim_response:
|
|
|
|
height = claim_response['height']
|
|
|
|
depth = self.height - height
|
|
|
|
claim_result = _verify_proof(parsed_uri.name,
|
|
|
|
claim_trie_root,
|
|
|
|
claim_response,
|
|
|
|
height, depth,
|
|
|
|
transaction_class=self.transaction_class,
|
|
|
|
hash160_to_address=self.hash160_to_address)
|
2018-11-07 16:05:49 +01:00
|
|
|
result['claim'] = await self.parse_and_validate_claim_result(claim_result,
|
|
|
|
certificate,
|
|
|
|
raw)
|
2018-07-12 04:07:20 +02:00
|
|
|
elif claim_resolution_type == "claim_id":
|
2018-11-07 16:05:49 +01:00
|
|
|
result['claim'] = await self.parse_and_validate_claim_result(claim_response,
|
|
|
|
certificate,
|
|
|
|
raw)
|
2018-07-12 04:07:20 +02:00
|
|
|
elif claim_resolution_type == "sequence":
|
2018-11-07 16:05:49 +01:00
|
|
|
result['claim'] = await self.parse_and_validate_claim_result(claim_response,
|
|
|
|
certificate,
|
|
|
|
raw)
|
2018-07-12 04:07:20 +02:00
|
|
|
else:
|
|
|
|
log.error("unknown response type: %s", claim_resolution_type)
|
|
|
|
|
|
|
|
# if this was a resolution for a name in a channel make sure there is only one valid
|
|
|
|
# match
|
|
|
|
elif 'unverified_claims_for_name' in resolution and 'certificate' in result:
|
|
|
|
unverified_claims_for_name = resolution['unverified_claims_for_name']
|
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
channel_info = await self.get_channel_claims_page(unverified_claims_for_name,
|
2018-07-12 05:46:01 +02:00
|
|
|
result['certificate'], page=1)
|
2018-07-12 04:07:20 +02:00
|
|
|
claims_in_channel, upper_bound = channel_info
|
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
if not claims_in_channel:
|
2018-07-12 04:07:20 +02:00
|
|
|
log.error("No valid claims for this name for this channel")
|
2018-10-15 23:16:43 +02:00
|
|
|
elif len(claims_in_channel) > 1:
|
2018-12-07 07:53:52 +01:00
|
|
|
log.warning("Multiple signed claims for the same name.")
|
|
|
|
winner = pick_winner_from_channel_path_collision(claims_in_channel)
|
|
|
|
if winner:
|
|
|
|
result['claim'] = winner
|
|
|
|
else:
|
|
|
|
log.error("No valid claims for this name for this channel")
|
2018-07-12 04:07:20 +02:00
|
|
|
else:
|
|
|
|
result['claim'] = claims_in_channel[0]
|
|
|
|
|
|
|
|
# parse and validate claims in a channel iteratively into pages of results
|
|
|
|
elif 'unverified_claims_in_channel' in resolution and 'certificate' in result:
|
|
|
|
ids_to_check = resolution['unverified_claims_in_channel']
|
2018-10-15 23:16:43 +02:00
|
|
|
channel_info = await self.get_channel_claims_page(ids_to_check, result['certificate'],
|
2018-07-12 05:46:01 +02:00
|
|
|
page=page, page_size=page_size)
|
2018-07-12 04:07:20 +02:00
|
|
|
claims_in_channel, upper_bound = channel_info
|
|
|
|
|
|
|
|
if claims_in_channel:
|
2019-03-26 03:06:36 +01:00
|
|
|
result['total_claims'] = upper_bound
|
2018-07-12 04:07:20 +02:00
|
|
|
result['claims_in_channel'] = claims_in_channel
|
|
|
|
elif 'error' not in result:
|
2018-11-26 02:00:43 +01:00
|
|
|
return {'error': 'claim not found', 'success': False, 'uri': str(parsed_uri)}
|
2018-07-12 04:07:20 +02:00
|
|
|
|
2018-11-27 19:17:18 +01:00
|
|
|
# invalid signatures can only return outside a channel
|
2018-11-26 02:00:43 +01:00
|
|
|
if result.get('claim', {}).get('has_signature', False):
|
2018-11-27 19:17:18 +01:00
|
|
|
if parsed_uri.path and not result['claim']['signature_is_valid']:
|
|
|
|
return {'error': 'claim not found', 'success': False, 'uri': str(parsed_uri)}
|
2018-10-15 23:16:43 +02:00
|
|
|
return result
|
2018-07-12 04:07:20 +02:00
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def get_certificate_and_validate_result(self, claim_result):
|
2018-08-28 02:03:08 +02:00
|
|
|
if not claim_result or 'value' not in claim_result:
|
|
|
|
return claim_result
|
|
|
|
certificate = None
|
2019-03-22 00:46:37 +01:00
|
|
|
certificate_id = Claim.from_bytes(unhexlify(claim_result['value'])).signing_channel_id
|
2018-08-28 02:03:08 +02:00
|
|
|
if certificate_id:
|
2019-01-18 03:37:37 +01:00
|
|
|
certificate = await self.network.get_claims_by_ids(certificate_id)
|
|
|
|
certificate = certificate.pop(certificate_id) if certificate else None
|
2018-11-07 16:05:49 +01:00
|
|
|
return await self.parse_and_validate_claim_result(claim_result, certificate=certificate)
|
2018-08-28 02:03:08 +02:00
|
|
|
|
2018-11-07 16:05:49 +01:00
|
|
|
async def parse_and_validate_claim_result(self, claim_result, certificate=None, raw=False):
|
2018-07-12 04:07:20 +02:00
|
|
|
if not claim_result or 'value' not in claim_result:
|
|
|
|
return claim_result
|
|
|
|
|
|
|
|
claim_result['decoded_claim'] = False
|
|
|
|
decoded = None
|
|
|
|
|
|
|
|
if not raw:
|
|
|
|
claim_value = claim_result['value']
|
|
|
|
try:
|
2019-03-20 22:31:00 +01:00
|
|
|
decoded = claim_result['value'] = Claim.from_bytes(unhexlify(claim_value))
|
2018-07-12 04:07:20 +02:00
|
|
|
claim_result['decoded_claim'] = True
|
|
|
|
except DecodeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if decoded:
|
|
|
|
claim_result['has_signature'] = False
|
2019-03-20 07:00:03 +01:00
|
|
|
if decoded.is_signed:
|
2019-03-22 00:46:37 +01:00
|
|
|
claim_tx = await self.network.get_transaction(claim_result['txid'])
|
2018-07-12 04:07:20 +02:00
|
|
|
if certificate is None:
|
|
|
|
log.info("fetching certificate to check claim signature")
|
2019-03-20 07:00:03 +01:00
|
|
|
certificate = await self.network.get_claims_by_ids(decoded.signing_channel_id)
|
2018-07-12 04:07:20 +02:00
|
|
|
if not certificate:
|
2019-03-20 07:00:03 +01:00
|
|
|
log.warning('Certificate %s not found', decoded.signing_channel_id)
|
2019-03-22 20:35:45 +01:00
|
|
|
cert_tx = await self.network.get_transaction(certificate['txid']) if certificate else None
|
2018-07-12 04:07:20 +02:00
|
|
|
claim_result['has_signature'] = True
|
|
|
|
claim_result['signature_is_valid'] = False
|
|
|
|
validated, channel_name = validate_claim_signature_and_get_channel_name(
|
2019-03-23 05:07:22 +01:00
|
|
|
claim_result, certificate, self.ledger, claim_tx=claim_tx, cert_tx=cert_tx
|
2019-03-22 00:46:37 +01:00
|
|
|
)
|
2018-07-12 04:07:20 +02:00
|
|
|
claim_result['channel_name'] = channel_name
|
|
|
|
if validated:
|
|
|
|
claim_result['signature_is_valid'] = True
|
|
|
|
|
|
|
|
if 'height' in claim_result and claim_result['height'] is None:
|
|
|
|
claim_result['height'] = -1
|
|
|
|
|
2018-11-30 22:11:23 +01:00
|
|
|
if 'amount' in claim_result:
|
2018-07-12 04:07:20 +02:00
|
|
|
claim_result = format_amount_value(claim_result)
|
|
|
|
|
2019-03-20 07:00:03 +01:00
|
|
|
claim_result['permanent_url'] = _get_permanent_url(
|
|
|
|
claim_result, decoded.signing_channel_id if decoded else None)
|
2018-07-12 04:07:20 +02:00
|
|
|
|
|
|
|
return claim_result
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def prepare_claim_queries(start_position, query_size, channel_claim_infos):
|
|
|
|
queries = [tuple()]
|
|
|
|
names = {}
|
|
|
|
# a table of index counts for the sorted claim ids, including ignored claims
|
|
|
|
absolute_position_index = {}
|
|
|
|
|
2018-07-15 05:02:19 +02:00
|
|
|
block_sorted_infos = sorted(channel_claim_infos.items(), key=lambda x: int(x[1][1]))
|
2018-07-12 04:07:20 +02:00
|
|
|
per_block_infos = {}
|
|
|
|
for claim_id, (name, height) in block_sorted_infos:
|
|
|
|
claims = per_block_infos.get(height, [])
|
|
|
|
claims.append((claim_id, name))
|
|
|
|
per_block_infos[height] = sorted(claims, key=lambda x: int(x[0], 16))
|
|
|
|
|
|
|
|
abs_position = 0
|
|
|
|
|
|
|
|
for height in sorted(per_block_infos.keys(), reverse=True):
|
|
|
|
for claim_id, name in per_block_infos[height]:
|
|
|
|
names[claim_id] = name
|
|
|
|
absolute_position_index[claim_id] = abs_position
|
|
|
|
if abs_position >= start_position:
|
|
|
|
if len(queries[-1]) >= query_size:
|
|
|
|
queries.append(tuple())
|
|
|
|
queries[-1] += (claim_id,)
|
|
|
|
abs_position += 1
|
|
|
|
return queries, names, absolute_position_index
|
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def iter_channel_claims_pages(self, queries, claim_positions, claim_names, certificate,
|
2018-07-12 04:07:20 +02:00
|
|
|
page_size=10):
|
|
|
|
# lbryum server returns a dict of {claim_id: (name, claim_height)}
|
|
|
|
# first, sort the claims by block height (and by claim id int value within a block).
|
|
|
|
|
|
|
|
# map the sorted claims into getclaimsbyids queries of query_size claim ids each
|
|
|
|
|
|
|
|
# send the batched queries to lbryum server and iteratively validate and parse
|
|
|
|
# the results, yield a page of results at a time.
|
|
|
|
|
|
|
|
# these results can include those where `signature_is_valid` is False. if they are skipped,
|
|
|
|
# page indexing becomes tricky, as the number of results isn't known until after having
|
|
|
|
# processed them.
|
2018-09-17 22:31:44 +02:00
|
|
|
# TODO: fix ^ in lbrynet.schema
|
2018-07-12 04:07:20 +02:00
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def iter_validate_channel_claims():
|
2018-07-12 05:46:01 +02:00
|
|
|
formatted_claims = []
|
2018-07-12 04:07:20 +02:00
|
|
|
for claim_ids in queries:
|
2018-10-15 23:16:43 +02:00
|
|
|
batch_result = await self.network.get_claims_by_ids(*claim_ids)
|
2018-07-12 04:07:20 +02:00
|
|
|
for claim_id in claim_ids:
|
|
|
|
claim = batch_result[claim_id]
|
|
|
|
if claim['name'] == claim_names[claim_id]:
|
2018-11-07 16:05:49 +01:00
|
|
|
formatted_claim = await self.parse_and_validate_claim_result(claim, certificate)
|
2018-07-12 04:07:20 +02:00
|
|
|
formatted_claim['absolute_channel_position'] = claim_positions[
|
|
|
|
claim['claim_id']]
|
2018-07-12 05:46:01 +02:00
|
|
|
formatted_claims.append(formatted_claim)
|
2018-07-12 04:07:20 +02:00
|
|
|
else:
|
|
|
|
log.warning("ignoring claim with name mismatch %s %s", claim['name'],
|
|
|
|
claim['claim_id'])
|
2018-10-15 23:16:43 +02:00
|
|
|
return formatted_claims
|
2018-07-12 04:07:20 +02:00
|
|
|
|
|
|
|
results = []
|
2018-07-12 05:46:01 +02:00
|
|
|
|
2018-10-16 05:04:04 +02:00
|
|
|
for claim in await iter_validate_channel_claims():
|
2018-07-12 04:07:20 +02:00
|
|
|
results.append(claim)
|
|
|
|
|
|
|
|
# if there is a full page of results, yield it
|
|
|
|
if len(results) and len(results) % page_size == 0:
|
2018-10-15 23:16:43 +02:00
|
|
|
return results[-page_size:]
|
2018-07-12 04:07:20 +02:00
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
return results
|
2018-07-12 04:07:20 +02:00
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def get_channel_claims_page(self, channel_claim_infos, certificate, page, page_size=10):
|
2018-07-12 04:07:20 +02:00
|
|
|
page = page or 0
|
|
|
|
page_size = max(page_size, 1)
|
|
|
|
if page_size > 500:
|
|
|
|
raise Exception("page size above maximum allowed")
|
|
|
|
start_position = (page - 1) * page_size
|
|
|
|
queries, names, claim_positions = self.prepare_claim_queries(start_position, page_size,
|
|
|
|
channel_claim_infos)
|
2018-10-15 23:16:43 +02:00
|
|
|
page_generator = await self.iter_channel_claims_pages(queries, claim_positions, names,
|
2018-07-12 05:46:01 +02:00
|
|
|
certificate, page_size=page_size)
|
2018-07-12 04:07:20 +02:00
|
|
|
upper_bound = len(claim_positions)
|
|
|
|
if not page:
|
2018-10-15 23:16:43 +02:00
|
|
|
return None, upper_bound
|
2018-07-12 04:07:20 +02:00
|
|
|
if start_position > upper_bound:
|
|
|
|
raise IndexError("claim %i greater than max %i" % (start_position, upper_bound))
|
2018-10-15 23:16:43 +02:00
|
|
|
return page_generator, upper_bound
|
2018-07-12 04:07:20 +02:00
|
|
|
|
|
|
|
|
2018-07-12 03:31:50 +02:00
|
|
|
# Format amount to be decimal encoded string
|
|
|
|
# Format value to be hex encoded string
|
|
|
|
# TODO: refactor. Came from lbryum, there could be another part of torba doing it
|
|
|
|
def format_amount_value(obj):
|
|
|
|
if isinstance(obj, dict):
|
2018-07-15 21:23:31 +02:00
|
|
|
for k, v in obj.items():
|
2018-07-22 01:08:28 +02:00
|
|
|
if k in ('amount', 'effective_amount'):
|
2018-07-12 03:31:50 +02:00
|
|
|
if not isinstance(obj[k], float):
|
2018-11-30 22:11:23 +01:00
|
|
|
obj[k] = dewies_to_lbc(obj[k])
|
2018-07-12 03:31:50 +02:00
|
|
|
elif k == 'supports' and isinstance(v, list):
|
2018-11-30 22:11:23 +01:00
|
|
|
obj[k] = [{'txid': txid, 'nout': nout, 'amount': dewies_to_lbc(amount)}
|
2018-07-12 03:31:50 +02:00
|
|
|
for (txid, nout, amount) in v]
|
|
|
|
elif isinstance(v, (list, dict)):
|
|
|
|
obj[k] = format_amount_value(v)
|
|
|
|
elif isinstance(obj, list):
|
|
|
|
obj = [format_amount_value(o) for o in obj]
|
|
|
|
return obj
|
|
|
|
|
|
|
|
|
2018-12-21 20:18:44 +01:00
|
|
|
def _get_permanent_url(claim_result, certificate_id):
|
2019-01-18 03:37:37 +01:00
|
|
|
if certificate_id:
|
|
|
|
return f"{claim_result['channel_name']}#{certificate_id}/{claim_result['name']}"
|
2018-07-12 03:31:50 +02:00
|
|
|
else:
|
2019-01-18 03:37:37 +01:00
|
|
|
return f"{claim_result['name']}#{claim_result['claim_id']}"
|
2018-07-12 03:31:50 +02:00
|
|
|
|
|
|
|
|
2018-07-12 04:07:20 +02:00
|
|
|
def _verify_proof(name, claim_trie_root, result, height, depth, transaction_class, hash160_to_address):
|
2018-07-12 03:31:50 +02:00
|
|
|
"""
|
|
|
|
Verify proof for name claim
|
|
|
|
"""
|
|
|
|
|
|
|
|
def _build_response(name, value, claim_id, txid, n, amount, effective_amount,
|
|
|
|
claim_sequence, claim_address, supports):
|
|
|
|
r = {
|
|
|
|
'name': name,
|
2018-07-15 21:23:31 +02:00
|
|
|
'value': hexlify(value),
|
2018-07-12 03:31:50 +02:00
|
|
|
'claim_id': claim_id,
|
|
|
|
'txid': txid,
|
|
|
|
'nout': n,
|
|
|
|
'amount': amount,
|
|
|
|
'effective_amount': effective_amount,
|
|
|
|
'height': height,
|
|
|
|
'depth': depth,
|
|
|
|
'claim_sequence': claim_sequence,
|
|
|
|
'address': claim_address,
|
|
|
|
'supports': supports
|
|
|
|
}
|
|
|
|
return r
|
|
|
|
|
|
|
|
def _parse_proof_result(name, result):
|
|
|
|
support_amount = sum([amt for (stxid, snout, amt) in result['supports']])
|
|
|
|
supports = result['supports']
|
|
|
|
if 'txhash' in result['proof'] and 'nOut' in result['proof']:
|
|
|
|
if 'transaction' in result:
|
|
|
|
tx = transaction_class(raw=unhexlify(result['transaction']))
|
|
|
|
nOut = result['proof']['nOut']
|
2018-07-15 20:49:14 +02:00
|
|
|
if result['proof']['txhash'] == tx.id:
|
2018-07-12 03:31:50 +02:00
|
|
|
if 0 <= nOut < len(tx.outputs):
|
|
|
|
claim_output = tx.outputs[nOut]
|
|
|
|
effective_amount = claim_output.amount + support_amount
|
2018-07-12 04:07:20 +02:00
|
|
|
claim_address = hash160_to_address(claim_output.script.values['pubkey_hash'])
|
2018-07-12 03:31:50 +02:00
|
|
|
claim_id = result['claim_id']
|
|
|
|
claim_sequence = result['claim_sequence']
|
|
|
|
claim_script = claim_output.script
|
2018-07-17 05:32:37 +02:00
|
|
|
decoded_name = claim_script.values['claim_name'].decode()
|
|
|
|
decoded_value = claim_script.values['claim']
|
2018-07-12 03:31:50 +02:00
|
|
|
if decoded_name == name:
|
|
|
|
return _build_response(name, decoded_value, claim_id,
|
2018-07-15 20:49:14 +02:00
|
|
|
tx.id, nOut, claim_output.amount,
|
2018-07-12 03:31:50 +02:00
|
|
|
effective_amount, claim_sequence,
|
|
|
|
claim_address, supports)
|
|
|
|
return {'error': 'name in proof did not match requested name'}
|
|
|
|
outputs = len(tx['outputs'])
|
|
|
|
return {'error': 'invalid nOut: %d (let(outputs): %d' % (nOut, outputs)}
|
|
|
|
return {'error': "computed txid did not match given transaction: %s vs %s" %
|
2018-07-15 20:49:14 +02:00
|
|
|
(tx.id, result['proof']['txhash'])
|
2018-07-12 03:31:50 +02:00
|
|
|
}
|
|
|
|
return {'error': "didn't receive a transaction with the proof"}
|
|
|
|
return {'error': 'name is not claimed'}
|
|
|
|
|
|
|
|
if 'proof' in result:
|
2019-02-14 20:54:23 +01:00
|
|
|
proof_name = name
|
|
|
|
if 'name' in result:
|
|
|
|
proof_name = result['name']
|
|
|
|
name = result['name']
|
|
|
|
if 'normalized_name' in result:
|
|
|
|
proof_name = result['normalized_name']
|
2018-07-12 03:31:50 +02:00
|
|
|
try:
|
2019-02-14 20:54:23 +01:00
|
|
|
verify_proof(result['proof'], claim_trie_root, proof_name)
|
2018-07-12 03:31:50 +02:00
|
|
|
except InvalidProofError:
|
|
|
|
return {'error': "Proof was invalid"}
|
|
|
|
return _parse_proof_result(name, result)
|
|
|
|
else:
|
|
|
|
return {'error': "proof not in result"}
|
|
|
|
|
|
|
|
|
2019-03-23 05:07:22 +01:00
|
|
|
def validate_claim_signature_and_get_channel_name(claim_result, certificate_claim, ledger,
|
2019-03-22 00:46:37 +01:00
|
|
|
claim_tx=None, cert_tx=None):
|
|
|
|
if cert_tx and certificate_claim and claim_tx and claim_result:
|
|
|
|
tx = Transaction(unhexlify(claim_tx))
|
|
|
|
cert_tx = Transaction(unhexlify(cert_tx))
|
2019-03-23 05:07:22 +01:00
|
|
|
try:
|
|
|
|
is_signed = tx.outputs[claim_result['nout']].is_signed_by(
|
|
|
|
cert_tx.outputs[certificate_claim['nout']], ledger
|
|
|
|
)
|
|
|
|
except InvalidSignature:
|
|
|
|
return False, None
|
2019-03-22 00:46:37 +01:00
|
|
|
return is_signed, certificate_claim['name']
|
|
|
|
return False, None
|
2018-07-12 03:31:50 +02:00
|
|
|
|
|
|
|
|
|
|
|
# TODO: The following came from code handling lbryum results. Now that it's all in one place a refactor should unify it.
|
|
|
|
def _decode_claim_result(claim):
|
|
|
|
if 'has_signature' in claim and claim['has_signature']:
|
|
|
|
if not claim['signature_is_valid']:
|
|
|
|
log.warning("lbry://%s#%s has an invalid signature",
|
|
|
|
claim['name'], claim['claim_id'])
|
2018-11-10 20:52:01 +01:00
|
|
|
if 'value' not in claim:
|
|
|
|
log.warning('Got an invalid claim while parsing, please report: %s', claim)
|
|
|
|
claim['hex'] = None
|
|
|
|
claim['value'] = None
|
2019-02-14 20:54:23 +01:00
|
|
|
backend_message = ' SDK message: ' + claim['error'] if 'error' in claim else ''
|
|
|
|
claim['error'] = "Failed to parse: missing value." + backend_message
|
2018-11-10 20:52:01 +01:00
|
|
|
return claim
|
2018-07-12 03:31:50 +02:00
|
|
|
try:
|
2019-03-20 22:31:00 +01:00
|
|
|
if not isinstance(claim['value'], Claim):
|
|
|
|
claim['value'] = Claim.from_bytes(claim['value'])
|
2019-03-20 07:00:03 +01:00
|
|
|
claim['hex'] = hexlify(claim['value'].to_bytes())
|
2018-07-12 03:31:50 +02:00
|
|
|
except DecodeError:
|
|
|
|
claim['hex'] = claim['value']
|
|
|
|
claim['value'] = None
|
|
|
|
claim['error'] = "Failed to decode value"
|
|
|
|
return claim
|
|
|
|
|
2018-11-30 22:11:23 +01:00
|
|
|
|
2018-07-12 03:31:50 +02:00
|
|
|
def _handle_claim_result(results):
|
|
|
|
if not results:
|
|
|
|
#TODO: cannot determine what name we searched for here
|
|
|
|
# we should fix lbryum commands that return None
|
|
|
|
raise UnknownNameError("")
|
|
|
|
|
|
|
|
if 'error' in results:
|
|
|
|
if results['error'] in ['name is not claimed', 'claim not found']:
|
|
|
|
if 'claim_id' in results:
|
|
|
|
raise UnknownClaimID(results['claim_id'])
|
2019-02-28 18:04:02 +01:00
|
|
|
if 'name' in results:
|
2018-07-12 03:31:50 +02:00
|
|
|
raise UnknownNameError(results['name'])
|
2019-02-28 18:04:02 +01:00
|
|
|
if 'uri' in results:
|
2018-07-12 03:31:50 +02:00
|
|
|
raise UnknownURI(results['uri'])
|
2019-02-28 18:04:02 +01:00
|
|
|
if 'outpoint' in results:
|
2018-07-12 03:31:50 +02:00
|
|
|
raise UnknownOutpoint(results['outpoint'])
|
|
|
|
raise Exception(results['error'])
|
|
|
|
|
|
|
|
# case where return value is {'certificate':{'txid', 'value',...},...}
|
|
|
|
if 'certificate' in results:
|
|
|
|
results['certificate'] = _decode_claim_result(results['certificate'])
|
|
|
|
|
|
|
|
# case where return value is {'claim':{'txid','value',...},...}
|
|
|
|
if 'claim' in results:
|
|
|
|
results['claim'] = _decode_claim_result(results['claim'])
|
|
|
|
|
|
|
|
# case where return value is {'txid','value',...}
|
|
|
|
# returned by queries that are not name resolve related
|
|
|
|
# (getclaimbyoutpoint, getclaimbyid, getclaimsfromtx)
|
|
|
|
elif 'value' in results:
|
|
|
|
results = _decode_claim_result(results)
|
|
|
|
|
|
|
|
# case where there is no 'certificate', 'value', or 'claim' key
|
|
|
|
elif 'certificate' not in results:
|
2018-10-18 12:42:45 +02:00
|
|
|
msg = f'result in unexpected format:{results}'
|
2018-07-12 03:31:50 +02:00
|
|
|
assert False, msg
|
|
|
|
|
|
|
|
return results
|
2018-12-07 07:53:52 +01:00
|
|
|
|
|
|
|
|
|
|
|
def pick_winner_from_channel_path_collision(claims_in_channel):
|
|
|
|
# we should be doing this by effective amount so we pick the controlling claim, however changing the resolved
|
|
|
|
# claim triggers another issue where 2 claims cant be saved for the same file. This code picks the oldest, so it
|
|
|
|
# stays the same. Using effective amount would change the resolved claims for a channel path on takeovers,
|
|
|
|
# potentially triggering that.
|
|
|
|
winner = None
|
|
|
|
for claim in claims_in_channel:
|
|
|
|
if not claim['signature_is_valid']:
|
|
|
|
continue
|
|
|
|
if winner is None:
|
|
|
|
winner = claim
|
2019-02-14 20:54:23 +01:00
|
|
|
elif claim['height'] < winner['height'] or \
|
|
|
|
(claim['height'] == winner['height'] and claim['nout'] < winner['nout']):
|
2018-12-07 07:53:52 +01:00
|
|
|
winner = claim
|
|
|
|
return winner
|