massive progress

This commit is contained in:
Lex Berezhny 2019-04-29 00:38:58 -04:00
parent 03455310ae
commit 9b8be8682c
20 changed files with 363 additions and 694 deletions

View file

@ -35,7 +35,7 @@ from lbrynet.wallet.transaction import Transaction, Output, Input
from lbrynet.wallet.account import Account as LBCAccount
from lbrynet.wallet.dewies import dewies_to_lbc, lbc_to_dewies
from lbrynet.schema.claim import Claim
from lbrynet.schema.uri import parse_lbry_uri, URIParseError
from lbrynet.schema.url import URL
from lbrynet.extras.daemon.comment_client import jsonrpc_batch, jsonrpc_post, rpc_body
@ -868,15 +868,16 @@ class Daemon(metaclass=JSONRPCServerType):
valid_urls = set()
for u in urls:
try:
parse_lbry_uri(u)
URL.parse(u)
valid_urls.add(u)
except URIParseError:
results[u] = {"error": "%s is not a valid url" % u}
except ValueError:
results[u] = {"error": f"{u} is not a valid url"}
resolved = await self.resolve(*tuple(valid_urls))
resolved = await self.resolve(list(valid_urls))
for resolved_uri in resolved:
results[resolved_uri] = resolved[resolved_uri]
results[resolved_uri] = resolved[resolved_uri] if resolved[resolved_uri] is not None else \
{"error": f"{resolved_uri} did not resolve to a claim"}
return results
@ -1671,7 +1672,7 @@ class Daemon(metaclass=JSONRPCServerType):
Usage:
claim_search [<name> | --name=<name>] [--claim_id=<claim_id>] [--txid=<txid> --nout=<nout>]
[--channel_id=<channel_id>] [--channel_name=<channel_name>] [--is_winning] [--page=<page>]
[--channel_id=<channel_id>] [--channel_name=<channel_name>] [--is_controlling] [--page=<page>]
[--page_size=<page_size>]
Options:
@ -1681,7 +1682,7 @@ class Daemon(metaclass=JSONRPCServerType):
--nout=<nout> : (str) find a claim with this txid:nout
--channel_id=<channel_id> : (str) limit search to specific channel claim id (returns stream claims)
--channel_name=<channel_name> : (str) limit search to specific channel name (returns stream claims)
--is_winning : (bool) limit to winning claims
--is_controlling : (bool) limit to controlling claims for their respective name
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
@ -1689,10 +1690,10 @@ class Daemon(metaclass=JSONRPCServerType):
"""
page_num, page_size = abs(kwargs.pop('page', 1)), min(abs(kwargs.pop('page_size', 10)), 50)
kwargs.update({'offset': page_size * (page_num-1), 'limit': page_size})
page = await self.ledger.claim_search(**kwargs)
txos, offset, total = await self.ledger.claim_search(**kwargs)
return {
"items": page.txos, "page": page_num, "page_size": page_size,
"total_pages": int((page.total + (page_size-1)) / page_size)
"items": txos, "page": page_num, "page_size": page_size,
"total_pages": int((total + (page_size-1)) / page_size)
}
CHANNEL_DOC = """
@ -3371,16 +3372,16 @@ class Daemon(metaclass=JSONRPCServerType):
raise Exception(
"Stream name cannot be blank."
)
parsed = parse_lbry_uri(name)
if parsed.is_channel:
parsed = URL.parse(name)
if parsed.has_channel:
raise Exception(
"Stream names cannot start with '@' symbol. This is reserved for channels claims."
)
if parsed.name != name:
if not parsed.has_stream or parsed.stream.name != name:
raise Exception(
"Stream name has invalid characters."
)
except (TypeError, URIParseError):
except (TypeError, ValueError):
raise Exception("Invalid stream name.")
@staticmethod
@ -3390,12 +3391,12 @@ class Daemon(metaclass=JSONRPCServerType):
raise Exception(
"Channel name cannot be blank."
)
parsed = parse_lbry_uri(name)
if not parsed.is_channel:
parsed = URL.parse(name)
if not parsed.has_channel:
raise Exception("Channel names must start with '@' symbol.")
if parsed.name != name:
if parsed.channel.name != name:
raise Exception("Channel name has invalid character")
except (TypeError, URIParseError):
except (TypeError, ValueError):
raise Exception("Invalid channel name.")
def get_fee_address(self, kwargs: dict, claim_address: str) -> str:
@ -3471,23 +3472,14 @@ class Daemon(metaclass=JSONRPCServerType):
except ValueError as e:
raise ValueError(f"Invalid value for '{argument}': {e.args[0]}")
async def resolve(self, *uris, **kwargs):
page = kwargs.get('page', 0)
page_size = kwargs.get('page_size', 10)
ledger: MainNetLedger = self.default_account.ledger
results = await ledger.resolve(page, page_size, *uris)
if 'error' not in results:
await self.storage.save_claims_for_resolve([
value for value in results.values() if 'error' not in value
])
async def resolve(self, urls):
results = await self.ledger.resolve(urls)
#if 'error' not in results:
# await self.storage.save_claims_for_resolve([
# value for value in results.values() if isinstance(value, Output)
# ])
return results
async def get_claims_for_name(self, name: str):
response = await self.ledger.network.get_claims_for_name(name)
resolutions = await self.resolve(*(f"{claim['name']}#{claim['claim_id']}" for claim in response['claims']))
response['claims'] = [value.get('claim', value.get('certificate')) for value in resolutions.values()]
return response
def _old_get_temp_claim_info(self, tx, txo, address, claim_dict, name, bid):
return {
"claim_id": txo.claim_id,

View file

@ -3,7 +3,11 @@ from decimal import Decimal
from binascii import hexlify
from datetime import datetime
from json import JSONEncoder
from ecdsa import BadSignatureError
from cryptography.exceptions import InvalidSignature
from google.protobuf.message import DecodeError
from lbrynet.schema.claim import Claim
from lbrynet.wallet.ledger import MainNetLedger, Account
from lbrynet.wallet.transaction import Transaction, Output
@ -167,10 +171,13 @@ class JSONResponseEncoder(JSONEncoder):
if txo.script.is_claim_involved:
output.update({
'name': txo.claim_name,
'normalized': txo.normalized_name,
'claim_id': txo.claim_id,
'permanent_url': txo.permanent_url,
'meta': txo.meta
})
if txo.script.is_claim_name or txo.script.is_update_claim:
try:
output['value'] = txo.claim
output['value_type'] = txo.claim.claim_type
if self.include_protobuf:
@ -178,20 +185,24 @@ class JSONResponseEncoder(JSONEncoder):
if txo.channel is not None:
output['signing_channel'] = {
'name': txo.channel.claim_name,
'normalized': txo.channel.normalized_name,
'claim_id': txo.channel.claim_id,
'value': txo.channel.claim
'value': txo.channel.claim,
'meta': txo.channel.meta
}
if check_signature and txo.claim.is_signed:
output['is_channel_signature_valid'] = False
try:
output['is_channel_signature_valid'] = txo.is_signed_by(txo.channel, self.ledger)
except BadSignatureError:
except (BadSignatureError, InvalidSignature):
pass
except ValueError:
log.exception(
'txo.id: %s, txo.channel.id:%s, output: %s',
txo.id, txo.channel.id, output
)
except DecodeError:
pass
return output
def encode_input(self, txi):

View file

@ -1,71 +0,0 @@
import base64
import struct
from typing import List
from lbrynet.schema.types.v2.page_pb2 import Page as PageMessage
from lbrynet.wallet.transaction import Transaction, Output
class Page:
__slots__ = 'txs', 'txos', 'offset', 'total'
def __init__(self, txs, txos, offset, total):
self.txs: List[Transaction] = txs
self.txos: List[Output] = txos
self.offset = offset
self.total = total
@classmethod
def from_base64(cls, data: str) -> 'Page':
return cls.from_bytes(base64.b64decode(data))
@classmethod
def from_bytes(cls, data: bytes) -> 'Page':
page_message = PageMessage()
page_message.ParseFromString(data)
tx_map, txo_list = {}, []
for tx_message in page_message.txs:
tx = Transaction(tx_message.raw, height=tx_message.height, position=tx_message.position)
tx_map[tx.hash] = tx
for txo_message in page_message.txos:
output = tx_map[txo_message.tx_hash].outputs[txo_message.nout]
if txo_message.WhichOneof('meta') == 'claim':
claim = txo_message.claim
output.meta = {
'is_winning': claim.is_winning,
'effective_amount': claim.effective_amount,
'trending_amount': claim.trending_amount,
}
if claim.HasField('channel'):
output.channel = tx_map[claim.channel.tx_hash].outputs[claim.channel.nout]
txo_list.append(output)
return cls(list(tx_map.values()), txo_list, page_message.offset, page_message.total)
@classmethod
def to_base64(cls, tx_rows, txo_rows, offset, total) -> str:
return base64.b64encode(cls.to_bytes(tx_rows, txo_rows, offset, total)).decode()
@classmethod
def to_bytes(cls, tx_rows, txo_rows, offset, total) -> bytes:
page = PageMessage()
page.total = total
page.offset = offset
for tx in tx_rows:
tx_message = page.txs.add()
tx_message.raw = tx['raw']
tx_message.height = tx['height']
tx_message.position = tx['position']
for txo in txo_rows:
txo_message = page.txos.add()
txo_message.tx_hash = txo['txo_hash'][:32]
txo_message.nout, = struct.unpack('<I', txo['txo_hash'][32:])
if 'channel_txo_hash' in txo and txo['channel_txo_hash']:
txo_message.claim.channel.tx_hash = txo['channel_txo_hash'][:32]
txo_message.claim.channel.nout, = struct.unpack('<I', txo['channel_txo_hash'][32:])
if 'is_winning' in txo: # claim
txo_message.claim.is_winning = bool(txo['is_winning'])
txo_message.claim.activation_height = txo['activation_height']
txo_message.claim.effective_amount = txo['effective_amount']
txo_message.claim.trending_amount = txo['trending_amount']
return page.SerializeToString()

View file

@ -1,273 +0,0 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: page.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='page.proto',
package='pb',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\npage.proto\x12\x02pb\"]\n\x04Page\x12\x18\n\x04txos\x18\x01 \x03(\x0b\x32\n.pb.Output\x12\x1c\n\x03txs\x18\x02 \x03(\x0b\x32\x0f.pb.Transaction\x12\r\n\x05total\x18\x03 \x01(\r\x12\x0e\n\x06offset\x18\x04 \x01(\r\"O\n\x06Output\x12\x0f\n\x07tx_hash\x18\x01 \x01(\x0c\x12\x0c\n\x04nout\x18\x02 \x01(\r\x12\x1e\n\x05\x63laim\x18\x03 \x01(\x0b\x32\r.pb.ClaimMetaH\x00\x42\x06\n\x04meta\"\x8a\x01\n\tClaimMeta\x12\x1b\n\x07\x63hannel\x18\x01 \x01(\x0b\x32\n.pb.Output\x12\x12\n\nis_winning\x18\x02 \x01(\x08\x12\x19\n\x11\x61\x63tivation_height\x18\x03 \x01(\x04\x12\x18\n\x10\x65\x66\x66\x65\x63tive_amount\x18\x04 \x01(\x04\x12\x17\n\x0ftrending_amount\x18\x05 \x01(\x04\"<\n\x0bTransaction\x12\x0b\n\x03raw\x18\x01 \x01(\x0c\x12\x0e\n\x06height\x18\x02 \x01(\x05\x12\x10\n\x08position\x18\x03 \x01(\rb\x06proto3')
)
_PAGE = _descriptor.Descriptor(
name='Page',
full_name='pb.Page',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='txos', full_name='pb.Page.txos', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='txs', full_name='pb.Page.txs', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total', full_name='pb.Page.total', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='offset', full_name='pb.Page.offset', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18,
serialized_end=111,
)
_OUTPUT = _descriptor.Descriptor(
name='Output',
full_name='pb.Output',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tx_hash', full_name='pb.Output.tx_hash', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nout', full_name='pb.Output.nout', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='claim', full_name='pb.Output.claim', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='meta', full_name='pb.Output.meta',
index=0, containing_type=None, fields=[]),
],
serialized_start=113,
serialized_end=192,
)
_CLAIMMETA = _descriptor.Descriptor(
name='ClaimMeta',
full_name='pb.ClaimMeta',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel', full_name='pb.ClaimMeta.channel', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='is_winning', full_name='pb.ClaimMeta.is_winning', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='activation_height', full_name='pb.ClaimMeta.activation_height', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='effective_amount', full_name='pb.ClaimMeta.effective_amount', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trending_amount', full_name='pb.ClaimMeta.trending_amount', index=4,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=195,
serialized_end=333,
)
_TRANSACTION = _descriptor.Descriptor(
name='Transaction',
full_name='pb.Transaction',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='raw', full_name='pb.Transaction.raw', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='height', full_name='pb.Transaction.height', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='position', full_name='pb.Transaction.position', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=335,
serialized_end=395,
)
_PAGE.fields_by_name['txos'].message_type = _OUTPUT
_PAGE.fields_by_name['txs'].message_type = _TRANSACTION
_OUTPUT.fields_by_name['claim'].message_type = _CLAIMMETA
_OUTPUT.oneofs_by_name['meta'].fields.append(
_OUTPUT.fields_by_name['claim'])
_OUTPUT.fields_by_name['claim'].containing_oneof = _OUTPUT.oneofs_by_name['meta']
_CLAIMMETA.fields_by_name['channel'].message_type = _OUTPUT
DESCRIPTOR.message_types_by_name['Page'] = _PAGE
DESCRIPTOR.message_types_by_name['Output'] = _OUTPUT
DESCRIPTOR.message_types_by_name['ClaimMeta'] = _CLAIMMETA
DESCRIPTOR.message_types_by_name['Transaction'] = _TRANSACTION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Page = _reflection.GeneratedProtocolMessageType('Page', (_message.Message,), dict(
DESCRIPTOR = _PAGE,
__module__ = 'page_pb2'
# @@protoc_insertion_point(class_scope:pb.Page)
))
_sym_db.RegisterMessage(Page)
Output = _reflection.GeneratedProtocolMessageType('Output', (_message.Message,), dict(
DESCRIPTOR = _OUTPUT,
__module__ = 'page_pb2'
# @@protoc_insertion_point(class_scope:pb.Output)
))
_sym_db.RegisterMessage(Output)
ClaimMeta = _reflection.GeneratedProtocolMessageType('ClaimMeta', (_message.Message,), dict(
DESCRIPTOR = _CLAIMMETA,
__module__ = 'page_pb2'
# @@protoc_insertion_point(class_scope:pb.ClaimMeta)
))
_sym_db.RegisterMessage(ClaimMeta)
Transaction = _reflection.GeneratedProtocolMessageType('Transaction', (_message.Message,), dict(
DESCRIPTOR = _TRANSACTION,
__module__ = 'page_pb2'
# @@protoc_insertion_point(class_scope:pb.Transaction)
))
_sym_db.RegisterMessage(Transaction)
# @@protoc_insertion_point(module_scope)

View file

@ -1,4 +1,5 @@
import re
import unicodedata
from typing import NamedTuple, Tuple
@ -39,12 +40,30 @@ def _create_url_regex():
URL_REGEX = _create_url_regex()
def normalize_name(name):
return unicodedata.normalize('NFD', name).casefold()
class PathSegment(NamedTuple):
name: str
claim_id: str = None
sequence: int = None
amount_order: int = None
@property
def normalized(self):
return normalize_name(self.name)
def to_dict(self):
q = {'name': self.name}
if self.claim_id is not None:
q['claim_id'] = self.claim_id
if self.sequence is not None:
q['sequence'] = self.sequence
if self.amount_order is not None:
q['amount_order'] = self.amount_order
return q
def __str__(self):
if self.claim_id is not None:
return f"{self.name}#{self.claim_id}"
@ -68,16 +87,16 @@ class URL(NamedTuple):
return self.stream is not None
@property
def parts(self) -> Tuple:
if self.has_channel:
if self.has_stream:
return self.channel, self.stream
return self.channel,
return self.stream,
def has_stream_in_channel(self):
return self.has_channel and self.has_stream
@property
def first(self):
return self.parts[0]
def parts(self) -> Tuple:
if self.has_stream_in_channel:
return self.channel, self.stream
if self.has_channel:
return self.channel,
return self.stream,
def __str__(self):
return f"lbry://{'/'.join(str(p) for p in self.parts)}"

View file

@ -12,7 +12,7 @@ from lbrynet.utils import cache_concurrent
from lbrynet.stream.descriptor import StreamDescriptor
from lbrynet.stream.managed_stream import ManagedStream
from lbrynet.schema.claim import Claim
from lbrynet.schema.uri import parse_lbry_uri
from lbrynet.schema.url import URL
from lbrynet.extras.daemon.storage import lbc_to_dewies
if typing.TYPE_CHECKING:
from lbrynet.conf import Config
@ -324,11 +324,10 @@ class StreamManager:
try:
# resolve the claim
parsed_uri = parse_lbry_uri(uri)
if parsed_uri.is_channel:
if not URL.parse(uri).has_stream:
raise ResolveError("cannot download a channel claim, specify a /path")
try:
resolved_result = await asyncio.wait_for(self.wallet.ledger.resolve(0, 1, uri), resolve_timeout)
resolved_result = await asyncio.wait_for(self.wallet.ledger.resolve(uri), resolve_timeout)
except asyncio.TimeoutError:
raise ResolveTimeout(uri)
await self.storage.save_claims_for_resolve([

View file

@ -276,5 +276,5 @@ class CommandTestCase(IntegrationTestCase):
async def resolve(self, uri):
return await self.out(self.daemon.jsonrpc_resolve(uri))
async def claim_search(self, *args, **kwargs):
return (await self.out(self.daemon.jsonrpc_claim_search(*args, **kwargs)))['items']
async def claim_search(self, **kwargs):
return (await self.out(self.daemon.jsonrpc_claim_search(**kwargs)))['items']

View file

@ -94,7 +94,7 @@ class Account(BaseAccount):
)
results['migrate-failed'] += 1
continue
claims = await self.ledger.network.get_claims_by_ids(maybe_claim_id)
claims = await self.ledger.network.get_claims_by_ids([maybe_claim_id])
if maybe_claim_id not in claims:
log.warning(
"Failed to migrate claim '%s', server did not return any claim information.",

View file

@ -1,10 +1,10 @@
import asyncio
import logging
from binascii import unhexlify
from typing import Optional
from typing import Tuple, List, Dict
from torba.client.baseledger import BaseLedger
from lbrynet.schema.page import Page
from lbrynet.schema.result import Outputs
from lbrynet.wallet.dewies import dewies_to_lbc
from lbrynet.wallet.resolve import Resolver
from lbrynet.wallet.account import Account
@ -51,21 +51,24 @@ class MainNetLedger(BaseLedger):
self.fee_per_name_char = self.config.get('fee_per_name_char', self.default_fee_per_name_char)
self.resolver = Resolver(self)
def resolve(self, page, page_size, *uris):
return self.resolver.resolve(page, page_size, *uris)
async def _inflate_outputs(self, query):
outputs = Outputs.from_base64(await query)
txs = []
if len(outputs.txs) > 0:
txs = await asyncio.gather(*(self.cache_transaction(*tx) for tx in outputs.txs))
return outputs.inflate(txs), outputs.offset, outputs.total
async def claim_search(self, **kwargs) -> Page:
return Page.from_base64(await self.network.claim_search(**kwargs))
async def resolve(self, urls):
txos = (await self._inflate_outputs(self.network.resolve(urls)))[0]
assert len(urls) == len(txos), "Mismatch between urls requested for resolve and responses received."
return {url: txo for url, txo in zip(urls, txos)}
async def get_claim_by_claim_id(self, claim_id) -> Optional[Output]:
page = await self.claim_search(claim_id=claim_id)
if page.txos:
return page.txos[0]
async def claim_search(self, **kwargs) -> Tuple[List, int, int]:
return await self._inflate_outputs(self.network.claim_search(**kwargs))
async def get_claim_by_outpoint(self, txid, nout) -> Optional[Output]:
page = await self.claim_search(txid=txid, nout=nout)
if page.txos:
return page.txos[0]
async def get_claim_by_claim_id(self, claim_id) -> Dict[str, Output]:
for claim in (await self.claim_search(claim_id=claim_id))[0]:
return claim
async def start(self):
await super().start()

View file

@ -200,12 +200,7 @@ class LbryWalletManager(BaseWalletManager):
_raw = await self.ledger.network.get_transaction(txid)
except CodeMessageError as e:
return {'success': False, 'code': e.code, 'message': e.message}
# this is a workaround for the current protocol. Should be fixed when lbryum support is over and we
# are able to use the modern get_transaction call, which accepts verbose to show height and other fields
height = await self.ledger.network.get_transaction_height(txid)
tx = self.ledger.transaction_class(unhexlify(_raw))
if tx and height > 0:
await self.ledger.maybe_verify_transaction(tx, height + 1) # off by one from server side, yes...
return tx
@staticmethod

View file

@ -6,21 +6,11 @@ class Network(BaseNetwork):
def get_server_height(self):
return self.rpc('blockchain.block.get_server_height', [])
def get_values_for_uris(self, block_hash, *uris):
return self.rpc('blockchain.claimtrie.getvaluesforuris', [block_hash, *uris])
def get_claims_by_ids(self, *claim_ids):
def get_claims_by_ids(self, claim_ids):
return self.rpc('blockchain.claimtrie.getclaimsbyids', claim_ids)
def get_claims_in_tx(self, txid):
return self.rpc('blockchain.claimtrie.getclaimsintx', [txid])
def get_claims_for_name(self, name):
return self.rpc('blockchain.claimtrie.getclaimsforname', [name])
def get_transaction_height(self, txid):
# 1.0 protocol specific workaround. Newer protocol should use get_transaction with verbose True
return self.rpc('blockchain.transaction.get_height', [txid])
def resolve(self, urls):
return self.rpc('blockchain.claimtrie.resolve', urls)
def claim_search(self, **kwargs):
return self.rpc('blockchain.claimtrie.search', kwargs)

View file

@ -10,7 +10,7 @@ from lbrynet.wallet.dewies import dewies_to_lbc
from lbrynet.error import UnknownNameError, UnknownClaimID, UnknownURI, UnknownOutpoint
from lbrynet.schema.claim import Claim
from google.protobuf.message import DecodeError
from lbrynet.schema.uri import parse_lbry_uri, URIParseError
from lbrynet.schema.url import URL
from lbrynet.wallet.claim_proofs import verify_proof, InvalidProofError
log = logging.getLogger(__name__)
@ -27,9 +27,9 @@ class Resolver:
uris = set(uris)
try:
for uri in uris:
parsed_uri = parse_lbry_uri(uri)
if parsed_uri.claim_id:
validate_claim_id(parsed_uri.claim_id)
for part in URL.parse(uri).parts:
if part.claim_id:
validate_claim_id(part.claim_id)
claim_trie_root = self.ledger.headers.claim_trie_root
resolutions = await self.network.get_values_for_uris(self.ledger.headers.hash().decode(), *uris)
if len(uris) > 1:
@ -162,7 +162,7 @@ class Resolver:
if certificate is None:
log.info("fetching certificate to check claim signature")
channel_id = claim_result['value'].signing_channel_id
certificate = (await self.network.get_claims_by_ids(channel_id)).get(channel_id)
certificate = (await self.network.get_claims_by_ids([channel_id])).get(channel_id)
if not certificate:
log.warning('Certificate %s not found', channel_id)
claim_result['channel_name'] = certificate['name'] if certificate else None
@ -238,7 +238,7 @@ class Resolver:
async def iter_validate_channel_claims():
formatted_claims = []
for claim_ids in queries:
batch_result = await self.network.get_claims_by_ids(*claim_ids)
batch_result = await self.network.get_claims_by_ids(claim_ids)
for claim_id in claim_ids:
claim = batch_result[claim_id]
if claim['name'] == claim_names[claim_id]:

View file

@ -18,7 +18,6 @@ class LBRYBlockProcessor(BlockProcessor):
self.sql.begin()
try:
super().advance_blocks(blocks)
self.sql.delete_dereferenced_transactions()
except:
self.logger.exception(f'Error while advancing transaction in new block.')
raise

View file

@ -8,6 +8,7 @@ from torba.server.util import class_logger
from torba.client.basedatabase import query, constraints_to_sql
from google.protobuf.message import DecodeError
from lbrynet.schema.url import URL, normalize_name
from lbrynet.wallet.transaction import Transaction, Output
@ -19,46 +20,36 @@ class SQLDB:
pragma journal_mode=WAL;
"""
CREATE_TX_TABLE = """
create table if not exists tx (
tx_hash bytes primary key,
raw bytes not null,
position integer not null,
height integer not null
);
"""
CREATE_CLAIM_TABLE = """
create table if not exists claim (
claim_hash bytes primary key,
tx_hash bytes not null,
txo_hash bytes not null,
height integer not null,
activation_height integer,
amount integer not null,
effective_amount integer not null default 0,
trending_amount integer not null default 0,
normalized text not null,
claim_name text not null,
channel_hash bytes
is_channel bool not null,
txo_hash bytes not null,
tx_position integer not null,
height integer not null,
amount integer not null,
channel_hash bytes,
activation_height integer,
effective_amount integer not null default 0,
trending_amount integer not null default 0
);
create index if not exists claim_tx_hash_idx on claim (tx_hash);
create index if not exists claim_normalized_idx on claim (normalized);
create index if not exists claim_txo_hash_idx on claim (txo_hash);
create index if not exists claim_activation_height_idx on claim (activation_height);
create index if not exists claim_channel_hash_idx on claim (channel_hash);
create index if not exists claim_claim_name_idx on claim (claim_name);
create index if not exists claim_activation_height_idx on claim (activation_height);
"""
CREATE_SUPPORT_TABLE = """
create table if not exists support (
txo_hash bytes primary key,
tx_hash bytes not null,
claim_hash bytes not null,
position integer not null,
tx_position integer not null,
height integer not null,
amount integer not null,
is_comment bool not null default false
claim_hash bytes not null,
amount integer not null
);
create index if not exists support_tx_hash_idx on support (tx_hash);
create index if not exists support_txo_hash_idx on support (txo_hash);
create index if not exists support_claim_hash_idx on support (claim_hash, height);
"""
@ -75,7 +66,7 @@ class SQLDB:
CREATE_CLAIMTRIE_TABLE = """
create table if not exists claimtrie (
claim_name text primary key,
normalized text primary key,
claim_hash bytes not null,
last_take_over_height integer not null
);
@ -84,7 +75,6 @@ class SQLDB:
CREATE_TABLES_QUERY = (
PRAGMAS +
CREATE_TX_TABLE +
CREATE_CLAIM_TABLE +
CREATE_SUPPORT_TABLE +
CREATE_CLAIMTRIE_TABLE +
@ -140,56 +130,61 @@ class SQLDB:
def commit(self):
self.execute('commit;')
def insert_txs(self, txs: Set[Transaction]):
if txs:
self.db.executemany(
"INSERT INTO tx (tx_hash, raw, position, height) VALUES (?, ?, ?, ?)",
[(sqlite3.Binary(tx.hash), sqlite3.Binary(tx.raw), tx.position, tx.height) for tx in txs]
)
def _upsertable_claims(self, txos: Set[Output]):
claims, tags = [], []
for txo in txos:
tx = txo.tx_ref.tx
try:
assert txo.claim_name
assert txo.normalized_name
except (AssertionError, UnicodeDecodeError):
self.logger.exception(f"Could not decode claim name for {tx.id}:{txo.position}.")
continue
txo_hash = sqlite3.Binary(txo.ref.hash)
claim_record = {
'claim_hash': sqlite3.Binary(txo.claim_hash),
'normalized': txo.normalized_name,
'claim_name': txo.claim_name,
'is_channel': False,
'txo_hash': txo_hash,
'tx_position': tx.position,
'height': tx.height,
'amount': txo.amount,
'channel_hash': None,
}
claims.append(claim_record)
try:
claim = txo.claim
if claim.is_channel:
metadata = claim.channel
else:
metadata = claim.stream
except DecodeError:
self.logger.exception(f"Could not parse claim protobuf for {tx.id}:{txo.position}.")
continue
txo_hash = sqlite3.Binary(txo.ref.hash)
channel_hash = sqlite3.Binary(claim.signing_channel_hash) if claim.signing_channel_hash else None
claims.append({
'claim_hash': sqlite3.Binary(txo.claim_hash),
'tx_hash': sqlite3.Binary(tx.hash),
'txo_hash': txo_hash,
'channel_hash': channel_hash,
'amount': txo.amount,
'claim_name': txo.claim_name,
'height': tx.height
})
for tag in metadata.tags:
claim_record['is_channel'] = claim.is_channel
if claim.signing_channel_hash:
claim_record['channel_hash'] = sqlite3.Binary(claim.signing_channel_hash)
for tag in claim.message.tags:
tags.append((tag, txo_hash, tx.height))
if tags:
self.db.executemany(
"INSERT INTO tag (tag, txo_hash, height) VALUES (?, ?, ?)", tags
)
return claims
def insert_claims(self, txos: Set[Output]):
claims = self._upsertable_claims(txos)
if claims:
self.db.executemany(
"INSERT INTO claim (claim_hash, tx_hash, txo_hash, channel_hash, amount, claim_name, height) "
"VALUES (:claim_hash, :tx_hash, :txo_hash, :channel_hash, :amount, :claim_name, :height) ",
"INSERT INTO claim ("
" claim_hash, normalized, claim_name, is_channel, txo_hash,"
" tx_position, height, amount, channel_hash) "
"VALUES ("
" :claim_hash, :normalized, :claim_name, :is_channel, :txo_hash,"
" :tx_position, :height, :amount, :channel_hash) ",
claims
)
@ -198,8 +193,8 @@ class SQLDB:
if claims:
self.db.executemany(
"UPDATE claim SET "
" tx_hash=:tx_hash, txo_hash=:txo_hash, channel_hash=:channel_hash, "
" amount=:amount, height=:height "
" is_channel=:is_channel, txo_hash=:txo_hash, tx_position=:tx_position,"
" height=:height, amount=:amount, channel_hash=:channel_hash "
"WHERE claim_hash=:claim_hash;",
claims
)
@ -231,14 +226,15 @@ class SQLDB:
for txo in txos:
tx = txo.tx_ref.tx
supports.append((
sqlite3.Binary(txo.ref.hash), sqlite3.Binary(tx.hash),
sqlite3.Binary(txo.claim_hash), tx.position, tx.height,
txo.amount, False
sqlite3.Binary(txo.ref.hash), tx.position, tx.height,
sqlite3.Binary(txo.claim_hash), txo.amount
))
if supports:
self.db.executemany(
"INSERT INTO support (txo_hash, tx_hash, claim_hash, position, height, amount, is_comment) "
"VALUES (?, ?, ?, ?, ?, ?, ?)", supports
"INSERT INTO support ("
" txo_hash, tx_position, height, claim_hash, amount"
") "
"VALUES (?, ?, ?, ?, ?)", supports
)
def delete_other_txos(self, txo_hashes: Set[bytes]):
@ -247,21 +243,13 @@ class SQLDB:
'support', {'txo_hash__in': [sqlite3.Binary(txo_hash) for txo_hash in txo_hashes]}
))
def delete_dereferenced_transactions(self):
self.execute("""
DELETE FROM tx WHERE (
(SELECT COUNT(*) FROM claim WHERE claim.tx_hash=tx.tx_hash) +
(SELECT COUNT(*) FROM support WHERE support.tx_hash=tx.tx_hash)
) = 0
""")
def _make_claims_without_competition_become_controlling(self, height):
self.execute(f"""
INSERT INTO claimtrie (claim_name, claim_hash, last_take_over_height)
SELECT claim.claim_name, claim.claim_hash, {height} FROM claim
LEFT JOIN claimtrie USING (claim_name)
INSERT INTO claimtrie (normalized, claim_hash, last_take_over_height)
SELECT claim.normalized, claim.claim_hash, {height} FROM claim
LEFT JOIN claimtrie USING (normalized)
WHERE claimtrie.claim_hash IS NULL
GROUP BY claim.claim_name HAVING COUNT(*) = 1
GROUP BY claim.normalized HAVING COUNT(*) = 1
""")
self.execute(f"""
UPDATE claim SET activation_height = {height}
@ -295,25 +283,25 @@ class SQLDB:
(
{height} -
(SELECT last_take_over_height FROM claimtrie
WHERE claimtrie.claim_name=claim.claim_name)
WHERE claimtrie.normalized=claim.normalized)
) / 32 AS INT))
WHERE activation_height IS NULL
""")
def get_overtakings(self):
return self.execute(f"""
SELECT winner.claim_name, winner.claim_hash FROM (
SELECT claim_name, claim_hash, MAX(effective_amount)
FROM claim GROUP BY claim_name
) AS winner JOIN claimtrie USING (claim_name)
SELECT winner.normalized, winner.claim_hash FROM (
SELECT normalized, claim_hash, MAX(effective_amount)
FROM claim GROUP BY normalized
) AS winner JOIN claimtrie USING (normalized)
WHERE claimtrie.claim_hash <> winner.claim_hash
""")
def _perform_overtake(self, height):
for overtake in self.get_overtakings():
self.execute(
f"UPDATE claim SET activation_height = {height} WHERE claim_name = ?",
(overtake['claim_name'],)
f"UPDATE claim SET activation_height = {height} WHERE normalized = ?",
(overtake['normalized'],)
)
self.execute(
f"UPDATE claimtrie SET claim_hash = ?, last_take_over_height = {height}",
@ -329,34 +317,37 @@ class SQLDB:
self._update_effective_amount(height)
self._perform_overtake(height)
def get_transactions(self, tx_hashes):
cur = self.db.cursor()
cur.execute(*query("SELECT * FROM tx", tx_hash__in=tx_hashes))
return cur.fetchall()
def get_claims(self, cols, **constraints):
if 'is_winning' in constraints:
if 'is_controlling' in constraints:
constraints['claimtrie.claim_hash__is_not_null'] = ''
del constraints['is_winning']
if 'name' in constraints:
constraints['claim.claim_name__like'] = constraints.pop('name')
del constraints['is_controlling']
if 'claim_id' in constraints:
constraints['claim.claim_hash'] = sqlite3.Binary(
unhexlify(constraints.pop('claim_id'))[::-1]
)
if 'name' in constraints:
constraints['claim.normalized'] = normalize_name(constraints.pop('name'))
if 'channel' in constraints:
url = URL.parse(constraints.pop('channel'))
if url.channel.claim_id:
constraints['channel_id'] = url.channel.claim_id
else:
constraints['channel_name'] = url.channel.name
if 'channel_id' in constraints:
constraints['claim.channel_hash'] = sqlite3.Binary(
unhexlify(constraints.pop('channel_id'))[::-1]
)
constraints['channel_hash'] = unhexlify(constraints.pop('channel_id'))[::-1]
if 'channel_hash' in constraints:
constraints['channel.claim_hash'] = sqlite3.Binary(constraints.pop('channel_hash'))
if 'channel_name' in constraints:
constraints['channel.normalized'] = normalize_name(constraints.pop('channel_name'))
if 'txid' in constraints:
tx_hash = unhexlify(constraints.pop('txid'))[::-1]
if 'nout' in constraints:
nout = constraints.pop('nout')
nout = constraints.pop('nout', 0)
constraints['claim.txo_hash'] = sqlite3.Binary(
tx_hash + struct.pack('<I', nout)
)
else:
constraints['claim.tx_hash'] = sqlite3.Binary(tx_hash)
cur = self.db.cursor()
cur.execute(*query(
f"""
@ -374,31 +365,68 @@ class SQLDB:
count = self.get_claims('count(*)', **constraints)
return count[0][0]
def _search(self, **constraints):
return self.get_claims(
"""
claimtrie.claim_hash as is_controlling,
claim.claim_hash, claim.txo_hash, claim.height,
claim.activation_height, claim.effective_amount, claim.trending_amount,
channel.txo_hash as channel_txo_hash, channel.height as channel_height,
channel.activation_height as channel_activation_height,
channel.effective_amount as channel_effective_amount,
channel.trending_amount as channel_trending_amount,
CASE WHEN claim.is_channel=1 THEN (
SELECT COUNT(*) FROM claim as claim_in_channel
WHERE claim_in_channel.channel_hash=claim.claim_hash
) ELSE 0 END AS claims_in_channel
""", **constraints
)
SEARCH_PARAMS = {
'name', 'claim_id', 'txid', 'nout', 'channel_id', 'is_winning', 'limit', 'offset'
'name', 'claim_id', 'txid', 'nout',
'channel', 'channel_id', 'channel_name',
'is_controlling', 'limit', 'offset'
}
def claim_search(self, constraints) -> Tuple[List, List, int, int]:
def search(self, constraints) -> Tuple[List, int, int]:
assert set(constraints).issubset(self.SEARCH_PARAMS), \
f"Search query contains invalid arguments: {set(constraints).difference(self.SEARCH_PARAMS)}"
total = self.get_claims_count(**constraints)
constraints['offset'] = abs(constraints.get('offset', 0))
constraints['limit'] = min(abs(constraints.get('limit', 10)), 50)
constraints['order_by'] = ["claim.height DESC", "claim.claim_name ASC"]
txo_rows = self.get_claims(
"""
claim.txo_hash, channel.txo_hash as channel_txo_hash,
claim.activation_height, claimtrie.claim_hash as is_winning,
claim.effective_amount, claim.trending_amount
""", **constraints
)
tx_hashes = set()
for claim in txo_rows:
tx_hashes.add(claim['txo_hash'][:32])
if claim['channel_txo_hash'] is not None:
tx_hashes.add(claim['channel_txo_hash'][:32])
tx_rows = self.get_transactions([sqlite3.Binary(h) for h in tx_hashes])
return tx_rows, txo_rows, constraints['offset'], total
constraints['order_by'] = ["claim.height DESC", "claim.normalized ASC"]
txo_rows = self._search(**constraints)
return txo_rows, constraints['offset'], total
def resolve(self, urls) -> List:
result = []
for raw_url in urls:
try:
url = URL.parse(raw_url)
except ValueError as e:
result.append(e)
continue
channel = None
if url.has_channel:
matches = self._search(is_controlling=True, **url.channel.to_dict())
if matches:
channel = matches[0]
else:
result.append(LookupError(f'Could not find channel in "{raw_url}".'))
continue
if url.has_stream:
query = url.stream.to_dict()
if channel is not None:
query['channel_hash'] = channel['claim_hash']
matches = self._search(is_controlling=True, **query)
if matches:
result.append(matches[0])
else:
result.append(LookupError(f'Could not find stream in "{raw_url}".'))
continue
else:
result.append(channel)
return result
def advance_txs(self, height, all_txs):
sql, txs = self, set()
@ -428,7 +456,6 @@ class SQLDB:
sql.abandon_claims(abandon_claim_hashes)
sql.clear_claim_metadata(stale_claim_metadata_txo_hashes)
sql.delete_other_txos(delete_txo_hashes)
sql.insert_txs(txs)
sql.insert_claims(insert_claims)
sql.update_claims(update_claims)
sql.insert_supports(insert_supports)

View file

@ -7,8 +7,8 @@ from torba.server.hash import hash_to_hex_str
from torba.server.session import ElectrumX
from torba.server import util
from lbrynet.schema.page import Page
from lbrynet.schema.uri import parse_lbry_uri, CLAIM_ID_MAX_LENGTH, URIParseError
from lbrynet.schema.result import Outputs
from lbrynet.schema.url import URL
from lbrynet.wallet.server.block_processor import LBRYBlockProcessor
from lbrynet.wallet.server.db import LBRYDB
@ -29,6 +29,7 @@ class LBRYElectrumX(ElectrumX):
handlers = {
'blockchain.transaction.get_height': self.transaction_get_height,
'blockchain.claimtrie.search': self.claimtrie_search,
'blockchain.claimtrie.resolve': self.claimtrie_resolve,
'blockchain.claimtrie.getclaimbyid': self.claimtrie_getclaimbyid,
'blockchain.claimtrie.getclaimsforname': self.claimtrie_getclaimsforname,
'blockchain.claimtrie.getclaimsbyids': self.claimtrie_getclaimsbyids,
@ -44,6 +45,14 @@ class LBRYElectrumX(ElectrumX):
}
self.request_handlers.update(handlers)
async def claimtrie_search(self, **kwargs):
if 'claim_id' in kwargs:
self.assert_claim_id(kwargs['claim_id'])
return Outputs.to_base64(*self.db.sql.search(kwargs))
async def claimtrie_resolve(self, *urls):
return Outputs.to_base64(self.db.sql.resolve(urls))
async def get_server_height(self):
return self.bp.height
@ -138,11 +147,6 @@ class LBRYElectrumX(ElectrumX):
return claims
return {}
async def claimtrie_search(self, **kwargs):
if 'claim_id' in kwargs:
self.assert_claim_id(kwargs['claim_id'])
return Page.to_base64(*self.db.sql.claim_search(kwargs))
async def batched_formatted_claims_from_daemon(self, claim_ids):
claims = await self.daemon.getclaimsbyids(claim_ids)
result = []
@ -252,21 +256,22 @@ class LBRYElectrumX(ElectrumX):
uri = uri
block_hash = block_hash
try:
parsed_uri = parse_lbry_uri(uri)
except URIParseError as err:
return {'error': err.message}
parsed_uri = URL.parse(uri)
except ValueError as err:
return {'error': err.args[0]}
result = {}
if parsed_uri.contains_channel:
if parsed_uri.has_channel:
certificate = None
# TODO: this is also done on the else, refactor
if parsed_uri.claim_id:
if len(parsed_uri.claim_id) < CLAIM_ID_MAX_LENGTH:
certificate_info = self.claimtrie_getpartialmatch(parsed_uri.name, parsed_uri.claim_id)
if parsed_uri.channel.claim_id:
if len(parsed_uri.channel.claim_id) < 40:
certificate_info = self.claimtrie_getpartialmatch(
parsed_uri.channel.name, parsed_uri.channel.claim_id)
else:
certificate_info = await self.claimtrie_getclaimbyid(parsed_uri.claim_id)
if certificate_info and self.claim_matches_name(certificate_info, parsed_uri.name):
certificate_info = await self.claimtrie_getclaimbyid(parsed_uri.channel.claim_id)
if certificate_info and self.claim_matches_name(certificate_info, parsed_uri.channel.name):
certificate = {'resolution_type': CLAIM_ID, 'result': certificate_info}
elif parsed_uri.claim_sequence:
certificate_info = await self.claimtrie_getnthclaimforname(parsed_uri.name, parsed_uri.claim_sequence)
@ -297,7 +302,7 @@ class LBRYElectrumX(ElectrumX):
else:
claim = None
if parsed_uri.claim_id:
if len(parsed_uri.claim_id) < CLAIM_ID_MAX_LENGTH:
if len(parsed_uri.claim_id) < 40:
claim_info = self.claimtrie_getpartialmatch(parsed_uri.name, parsed_uri.claim_id)
else:
claim_info = await self.claimtrie_getclaimbyid(parsed_uri.claim_id)

View file

@ -14,6 +14,7 @@ from ecdsa.util import sigencode_der
from torba.client.basetransaction import BaseTransaction, BaseInput, BaseOutput, ReadOnlyList
from torba.client.hash import hash160, sha256, Base58
from lbrynet.schema.claim import Claim
from lbrynet.schema.url import normalize_name
from lbrynet.wallet.account import Account
from lbrynet.wallet.script import InputScript, OutputScript
@ -74,6 +75,10 @@ class Output(BaseOutput):
return self.script.values['claim_name'].decode()
raise ValueError('No claim_name associated.')
@property
def normalized_name(self) -> str:
return normalize_name(self.claim_name)
@property
def claim(self) -> Claim:
if self.is_claim:

View file

@ -39,9 +39,8 @@ class EpicAdventuresOfChris45(CommandTestCase):
self.assertEqual(result, '8.989893')
# And is the channel resolvable and empty?
response = await self.out(self.daemon.jsonrpc_resolve('lbry://@spam'))
self.assertIn('lbry://@spam', response)
self.assertIn('certificate', response['lbry://@spam'])
response = await self.resolve('lbry://@spam')
self.assertEqual(response['lbry://@spam']['value_type'], 'channel')
# "What goes well with spam?" ponders Chris...
# "A hovercraft with eels!" he exclaims.
@ -64,9 +63,8 @@ class EpicAdventuresOfChris45(CommandTestCase):
# Also checks that his new story can be found on the blockchain before
# giving the link to all his friends.
response = await self.out(self.daemon.jsonrpc_resolve('lbry://@spam/hovercraft'))
self.assertIn('lbry://@spam/hovercraft', response)
self.assertIn('claim', response['lbry://@spam/hovercraft'])
response = await self.resolve('lbry://@spam/hovercraft')
self.assertEqual(response['lbry://@spam/hovercraft']['value_type'], 'stream')
# He goes to tell everyone about it and in the meantime 5 blocks are confirmed.
await self.generate(5)
@ -86,8 +84,11 @@ class EpicAdventuresOfChris45(CommandTestCase):
await self.confirm_tx(abandon['txid'])
# And now checks that the claim doesn't resolve anymore.
response = await self.out(self.daemon.jsonrpc_resolve('lbry://@spam/hovercraft'))
self.assertNotIn('claim', response['lbry://@spam/hovercraft'])
response = await self.resolve('lbry://@spam/hovercraft')
self.assertEqual(
response['lbry://@spam/hovercraft'],
{'error': 'lbry://@spam/hovercraft did not resolve to a claim'}
)
# After abandoning he just waits for his LBCs to be returned to his account
await self.generate(5)
@ -134,10 +135,10 @@ class EpicAdventuresOfChris45(CommandTestCase):
await self.confirm_tx(tx['txid'])
# And check if his support showed up
resolve_result = await self.out(self.daemon.jsonrpc_resolve(uri))
resolve_result = await self.resolve(uri)
# It obviously did! Because, blockchain baby \O/
self.assertEqual(resolve_result[uri]['claim']['amount'], '1.0')
self.assertEqual(resolve_result[uri]['claim']['effective_amount'], '1.2')
self.assertEqual(resolve_result[uri]['amount'], '1.0')
self.assertEqual(resolve_result[uri]['meta']['effective_amount'], '1.2')
await self.generate(5)
# Now he also wanted to support the original creator of the Award Winning Novel
@ -148,9 +149,9 @@ class EpicAdventuresOfChris45(CommandTestCase):
await self.confirm_tx(tx['txid'])
# And again checks if it went to the just right place
resolve_result = await self.out(self.daemon.jsonrpc_resolve(uri))
resolve_result = await self.resolve(uri)
# Which it obviously did. Because....?????
self.assertEqual(resolve_result[uri]['claim']['effective_amount'], '1.5')
self.assertEqual(resolve_result[uri]['meta']['effective_amount'], '1.5')
await self.generate(5)
# Seeing the ravishing success of his novel Chris adds support to his claim too
@ -160,7 +161,7 @@ class EpicAdventuresOfChris45(CommandTestCase):
# And check if his support showed up
resolve_result = await self.out(self.daemon.jsonrpc_resolve(uri))
# It did!
self.assertEqual(resolve_result[uri]['claim']['effective_amount'], '1.9')
self.assertEqual(resolve_result[uri]['meta']['effective_amount'], '1.9')
await self.generate(5)
# Now Ramsey who is a singer by profession, is preparing for his new "gig". He has everything in place for that
@ -184,5 +185,5 @@ class EpicAdventuresOfChris45(CommandTestCase):
await self.confirm_tx(abandon['txid'])
# He them checks that the claim doesn't resolve anymore.
response = await self.out(self.daemon.jsonrpc_resolve(uri))
self.assertNotIn('claim', response[uri])
response = await self.resolve(uri)
self.assertEqual(response[uri], {'error': f'{uri} did not resolve to a claim'})

View file

@ -259,16 +259,16 @@ class StreamCommands(CommandTestCase):
# defaults to using all accounts to lookup channel
await self.stream_create('hovercraft1', '0.1', channel_id=baz_id)
self.assertEqual((await self.claim_search('hovercraft1'))[0]['channel_name'], '@baz')
self.assertEqual((await self.claim_search(name='hovercraft1'))[0]['signing_channel']['name'], '@baz')
# lookup by channel_name in all accounts
await self.stream_create('hovercraft2', '0.1', channel_name='@baz')
self.assertEqual((await self.claim_search('hovercraft2'))[0]['channel_name'], '@baz')
self.assertEqual((await self.claim_search(name='hovercraft2'))[0]['signing_channel']['name'], '@baz')
# uses only the specific accounts which contains the channel
await self.stream_create('hovercraft3', '0.1', channel_id=baz_id, channel_account_id=[account2_id])
self.assertEqual((await self.claim_search('hovercraft3'))[0]['channel_name'], '@baz')
self.assertEqual((await self.claim_search(name='hovercraft3'))[0]['signing_channel']['name'], '@baz')
# lookup by channel_name in specific account
await self.stream_create('hovercraft4', '0.1', channel_name='@baz', channel_account_id=[account2_id])
self.assertEqual((await self.claim_search('hovercraft4'))[0]['channel_name'], '@baz')
self.assertEqual((await self.claim_search(name='hovercraft4'))[0]['signing_channel']['name'], '@baz')
# fails when specifying account which does not contain channel
with self.assertRaisesRegex(ValueError, "Couldn't find channel with channel_id"):
await self.stream_create(
@ -657,7 +657,7 @@ class StreamCommands(CommandTestCase):
self.assertEqual(2, len(self.daemon.jsonrpc_file_list()))
r = await self.resolve('lbry://@abc/foo')
self.assertEqual(
r['lbry://@abc/foo']['claim']['claim_id'],
r['lbry://@abc/foo']['claim_id'],
tx3['outputs'][0]['claim_id']
)
@ -665,9 +665,9 @@ class StreamCommands(CommandTestCase):
tx4 = await self.publish('foo', languages='uk-UA')
self.assertEqual(2, len(self.daemon.jsonrpc_file_list()))
r = await self.resolve('lbry://foo')
claim = r['lbry://foo']['claim']
claim = r['lbry://foo']
self.assertEqual(claim['txid'], tx4['outputs'][0]['txid'])
self.assertNotIn('channel_name', claim)
self.assertNotIn('signing_channel', claim)
self.assertEqual(claim['value']['languages'], ['uk-UA'])
async def test_claim_search(self):
@ -712,7 +712,7 @@ class StreamCommands(CommandTestCase):
self.assertEqual(len(claims), 3)
claims = await self.claim_search(channel_name="@abc", channel_id=channel_id)
self.assertEqual(len(claims), 3)
claims = await self.claim_search(channel_name=f"@abc#{channel_id}")
claims = await self.claim_search(channel=f"@abc#{channel_id}")
self.assertEqual(len(claims), 3)
await self.stream_abandon(claim_id=claims[0]['claim_id'])
@ -758,12 +758,14 @@ class StreamCommands(CommandTestCase):
# Original channel doesnt exists anymore, so the signature is invalid. For invalid signatures, resolution is
# only possible outside a channel
response = await self.resolve('lbry://@abc/on-channel-claim')
self.assertNotIn('claim', response['lbry://@abc/on-channel-claim'])
self.assertEqual(response, {
'lbry://@abc/on-channel-claim': {'error': 'lbry://@abc/on-channel-claim did not resolve to a claim'}
})
response = await self.resolve('lbry://on-channel-claim')
self.assertIs(False, response['lbry://on-channel-claim']['claim']['signature_is_valid'])
self.assertNotIn('is_channel_signature_valid', response['lbry://on-channel-claim'])
direct_uri = 'lbry://on-channel-claim#' + orphan_claim_id
response = await self.resolve(direct_uri)
self.assertIs(False, response[direct_uri]['claim']['signature_is_valid'])
self.assertNotIn('is_channel_signature_valid', response[direct_uri])
await self.stream_abandon(claim_id=orphan_claim_id)
uri = 'lbry://@abc/on-channel-claim'
@ -771,7 +773,7 @@ class StreamCommands(CommandTestCase):
valid_claim = await self.stream_create('on-channel-claim', '0.00000001', channel_id=channel['claim_id'])
# resolves normally
response = await self.resolve(uri)
self.assertTrue(response[uri]['claim']['signature_is_valid'])
self.assertTrue(response[uri]['is_channel_signature_valid'])
# ooops! claimed a valid conflict! (this happens on the wild, mostly by accident or race condition)
await self.stream_create(
@ -780,44 +782,29 @@ class StreamCommands(CommandTestCase):
# it still resolves! but to the older claim
response = await self.resolve(uri)
self.assertTrue(response[uri]['claim']['signature_is_valid'])
self.assertEqual(response[uri]['claim']['txid'], valid_claim['txid'])
claims = (await self.daemon.jsonrpc_claim_search('on-channel-claim'))['items']
self.assertTrue(response[uri]['is_channel_signature_valid'])
self.assertEqual(response[uri]['txid'], valid_claim['txid'])
claims = await self.claim_search(name='on-channel-claim')
self.assertEqual(2, len(claims))
signer_ids = set([claim['value'].signing_channel_id for claim in claims])
self.assertEqual({channel['claim_id']}, signer_ids)
self.assertEqual(
{channel['claim_id']}, {claim['signing_channel']['claim_id'] for claim in claims}
)
async def test_normalization_resolution(self):
# this test assumes that the lbrycrd forks normalization at height == 250 on regtest
one = 'ΣίσυφοςfiÆ'
two = 'ΣΊΣΥΦΟσFIæ'
c1 = await self.stream_create('ΣίσυφοςfiÆ', '0.1')
c2 = await self.stream_create('ΣΊΣΥΦΟσFIæ', '0.2')
_ = await self.stream_create(one, '0.1')
c = await self.stream_create(two, '0.2')
r1 = await self.daemon.jsonrpc_resolve(urls='lbry://ΣίσυφοςfiÆ')
r2 = await self.daemon.jsonrpc_resolve(urls='lbry://ΣΊΣΥΦΟσFIæ')
winner_id = c['outputs'][0]['claim_id']
r1c = list(r1.values())[0]['claim']['claim_id']
r2c = list(r2.values())[0]['claim']['claim_id']
self.assertEqual(c1['outputs'][0]['claim_id'], r1c)
self.assertEqual(c2['outputs'][0]['claim_id'], r2c)
self.assertNotEqual(r1c, r2c)
r1 = await self.resolve(f'lbry://{one}')
r2 = await self.resolve(f'lbry://{two}')
await self.generate(50)
self.assertTrue(self.ledger.headers.height > 250)
r3 = await self.daemon.jsonrpc_resolve(urls='lbry://ΣίσυφοςfiÆ')
r4 = await self.daemon.jsonrpc_resolve(urls='lbry://ΣΊΣΥΦΟσFIæ')
r3c = list(r3.values())[0]['claim']['claim_id']
r4c = list(r4.values())[0]['claim']['claim_id']
r3n = list(r3.values())[0]['claim']['name']
r4n = list(r4.values())[0]['claim']['name']
self.assertEqual(c2['outputs'][0]['claim_id'], r3c)
self.assertEqual(c2['outputs'][0]['claim_id'], r4c)
self.assertEqual(r3c, r4c)
self.assertEqual(r3n, r4n)
self.assertEqual(winner_id, r1[f'lbry://{one}']['claim_id'])
self.assertEqual(winner_id, r2[f'lbry://{two}']['claim_id'])
async def test_resolve_old_claim(self):
channel = await self.daemon.jsonrpc_channel_create('@olds', '1.0')
@ -829,8 +816,8 @@ class StreamCommands(CommandTestCase):
await self.broadcast(tx)
await self.confirm_tx(tx.id)
response = await self.daemon.jsonrpc_resolve(urls='@olds/example')
self.assertTrue(response['@olds/example']['claim']['signature_is_valid'])
response = await self.resolve('@olds/example')
self.assertTrue(response['@olds/example']['is_channel_signature_valid'])
claim.publisherSignature.signature = bytes(reversed(claim.publisherSignature.signature))
tx = await Transaction.claim_create(
@ -840,10 +827,10 @@ class StreamCommands(CommandTestCase):
await self.broadcast(tx)
await self.confirm_tx(tx.id)
response = await self.daemon.jsonrpc_resolve(urls='bad_example')
self.assertIs(False, response['bad_example']['claim']['signature_is_valid'], response)
response = await self.daemon.jsonrpc_resolve(urls='@olds/bad_example')
self.assertEqual('URI lbry://@olds/bad_example cannot be resolved', response['@olds/bad_example']['error'])
response = await self.resolve('bad_example')
self.assertFalse(response['bad_example']['is_channel_signature_valid'])
response = await self.resolve('@olds/bad_example')
self.assertFalse(response['@olds/bad_example']['is_channel_signature_valid'])
def generate_signed_legacy(address: bytes, output: Output):

View file

@ -13,32 +13,26 @@ class ResolveCommand(CommandTestCase):
# resolving a channel @abc
response = await self.resolve('lbry://@abc')
self.assertSetEqual({'lbry://@abc'}, set(response))
self.assertIn('certificate', response['lbry://@abc'])
self.assertNotIn('claim', response['lbry://@abc'])
self.assertEqual(response['lbry://@abc']['certificate']['name'], '@abc')
self.assertEqual(response['lbry://@abc']['claims_in_channel'], 0)
self.assertEqual(response['lbry://@abc']['name'], '@abc')
self.assertEqual(response['lbry://@abc']['value_type'], 'channel')
self.assertEqual(response['lbry://@abc']['meta']['claims_in_channel'], 0)
await self.stream_create('foo', '0.01', channel_id=channel_id)
await self.stream_create('foo2', '0.01', channel_id=channel_id)
# resolving a channel @abc with some claims in it
response = await self.resolve('lbry://@abc')
self.assertSetEqual({'lbry://@abc'}, set(response))
self.assertIn('certificate', response['lbry://@abc'])
self.assertNotIn('claim', response['lbry://@abc'])
self.assertEqual(response['lbry://@abc']['certificate']['name'], '@abc')
self.assertEqual(response['lbry://@abc']['claims_in_channel'], 2)
response['lbry://@abc']['confirmations'] += 2
response['lbry://@abc']['meta']['claims_in_channel'] = 2
self.assertEqual(response, await self.resolve('lbry://@abc'))
# resolving claim foo within channel @abc
response = await self.resolve('lbry://@abc/foo')
self.assertSetEqual({'lbry://@abc/foo'}, set(response))
claim = response['lbry://@abc/foo']
self.assertIn('certificate', claim)
self.assertIn('claim', claim)
self.assertEqual(claim['claim']['name'], 'foo')
self.assertEqual(claim['claim']['channel_name'], '@abc')
self.assertEqual(claim['certificate']['name'], '@abc')
self.assertEqual(claim['claims_in_channel'], 0)
self.assertEqual(claim['name'], 'foo')
self.assertEqual(claim['value_type'], 'stream')
self.assertEqual(claim['signing_channel']['name'], '@abc')
self.assertTrue(claim['is_channel_signature_valid'])
self.assertEqual(
claim['claim']['timestamp'],
self.ledger.headers[claim['claim']['height']]['timestamp']
@ -49,46 +43,32 @@ class ResolveCommand(CommandTestCase):
)
# resolving claim foo by itself
response = await self.resolve('lbry://foo')
self.assertSetEqual({'lbry://foo'}, set(response))
claim = response['lbry://foo']
self.assertIn('certificate', claim)
self.assertIn('claim', claim)
self.assertEqual(claim['claim']['name'], 'foo')
self.assertEqual(claim['claim']['channel_name'], '@abc')
self.assertEqual(claim['certificate']['name'], '@abc')
self.assertEqual(claim['claims_in_channel'], 0)
self.assertEqual(claim, (await self.resolve('lbry://foo'))['lbry://foo'])
# resolving from the given permanent url
new_response = await self.resolve(claim['claim']['permanent_url'])
self.assertEqual(new_response[claim['claim']['permanent_url']], claim)
permanent_url = response['lbry://@abc/foo']['permanent_url']
self.assertEqual(claim, (await self.resolve(permanent_url))[permanent_url])
# resolving multiple at once
response = await self.resolve(['lbry://foo', 'lbry://foo2'])
self.assertSetEqual({'lbry://foo', 'lbry://foo2'}, set(response))
claim = response['lbry://foo2']
self.assertIn('certificate', claim)
self.assertIn('claim', claim)
self.assertEqual(claim['claim']['name'], 'foo2')
self.assertEqual(claim['claim']['channel_name'], '@abc')
self.assertEqual(claim['certificate']['name'], '@abc')
self.assertEqual(claim['claims_in_channel'], 0)
self.assertEqual(claim['name'], 'foo2')
self.assertEqual(claim['value_type'], 'stream')
self.assertEqual(claim['signing_channel']['name'], '@abc')
self.assertTrue(claim['is_channel_signature_valid'])
# resolve has correct confirmations
tx_details = await self.blockchain.get_raw_transaction(claim['claim']['txid'])
self.assertEqual(claim['claim']['confirmations'], json.loads(tx_details)['confirmations'])
tx_details = await self.blockchain.get_raw_transaction(claim['txid'])
self.assertEqual(claim['confirmations'], json.loads(tx_details)['confirmations'])
# resolve handles invalid data
txid = await self.blockchain_claim_name(
"gibberish", hexlify(b"{'invalid':'json'}").decode(), "0.1")
await self.blockchain_claim_name("gibberish", hexlify(b"{'invalid':'json'}").decode(), "0.1")
await self.generate(1)
response = await self.resolve("lbry://gibberish")
self.assertSetEqual({'lbry://gibberish'}, set(response))
claim = response['lbry://gibberish']['claim']
claim = response['lbry://gibberish']
self.assertEqual(claim['name'], 'gibberish')
self.assertEqual(claim['protobuf'], hexlify(b"{'invalid':'json'}").decode())
self.assertFalse(claim['decoded_claim'])
self.assertEqual(claim['txid'], txid)
self.assertEqual(claim['effective_amount'], "0.1")
self.assertNotIn('value', claim)
async def _test_resolve_abc_foo(self):
response = await self.resolve('lbry://@abc/foo')

View file

@ -7,7 +7,7 @@ class TestClaimtrie(CommandTestCase):
return tx['outputs'][0]['claim_id']
async def assertWinningClaim(self, name, tx):
other = (await self.out(self.daemon.jsonrpc_claim_search(name=name, is_winning=True)))['items'][0]
other = (await self.out(self.daemon.jsonrpc_claim_search(name=name, is_controlling=True)))['items'][0]
self.assertEqual(self.get_claim_id(tx), other['claim_id'])
async def test_designed_edge_cases(self):