lbryumx -> lbrynet.extras.wallet.server

This commit is contained in:
Lex Berezhny 2018-11-04 04:42:47 -05:00
parent dc81b6def1
commit 8ca2b75c80
40 changed files with 1210 additions and 33 deletions

View file

@ -21,7 +21,6 @@ jobs:
- pip install coverage
- pip install git+https://github.com/lbryio/torba.git
- pip install -e .[test]
- pip install git+https://github.com/lbryio/lbryumx.git#latest_rebased
script:
- HOME=/tmp coverage run -p --source=lbrynet -m unittest discover -v tests.unit.wallet
- HOME=/tmp coverage run -p --source=lbrynet -m twisted.trial --reactor=asyncio tests.unit.analytics tests.unit.components tests.unit.core tests.unit.cryptstream tests.unit.database tests.unit.dht tests.unit.lbryfilemanager tests.unit.lbrynet_daemon tests.unit.test_cli tests.unit.test_customLogger
@ -46,7 +45,6 @@ jobs:
install:
- pip install tox-travis coverage
- pushd .. && git clone https://github.com/lbryio/torba.git && popd
- pushd .. && git clone https://github.com/lbryio/lbryumx.git && cd lbryumx && git checkout latest_rebased && cd .. && popd
script: tox
after_success:
- coverage combine tests/

View file

@ -17,8 +17,8 @@ from lbrynet.core.PaymentRateManager import OnlyFreePaymentsManager
from lbrynet.core.RateLimiter import RateLimiter
from lbrynet.core.BlobManager import DiskBlobManager
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, EncryptedFileStreamType
from lbrynet.wallet.manager import LbryWalletManager
from lbrynet.wallet.network import Network
from lbrynet.extras.wallet import LbryWalletManager
from lbrynet.extras.wallet import Network
from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory
from lbrynet.core.server.ServerProtocol import ServerProtocolFactory
from lbrynet.daemon.Component import Component

View file

@ -46,9 +46,9 @@ from lbrynet.dht.error import TimeoutError
from lbrynet.core.Peer import Peer
from lbrynet.core.SinglePeerDownloader import SinglePeerDownloader
from lbrynet.core.client.StandaloneBlobDownloader import StandaloneBlobDownloader
from lbrynet.wallet.account import Account as LBCAccount
from lbrynet.wallet.manager import LbryWalletManager
from lbrynet.wallet.dewies import dewies_to_lbc, lbc_to_dewies
from lbrynet.extras.wallet import LbryWalletManager
from lbrynet.extras.wallet.account import Account as LBCAccount
from lbrynet.extras.wallet.dewies import dewies_to_lbc, lbc_to_dewies
log = logging.getLogger(__name__)
requires = AuthJSONRPCServer.requires

View file

@ -13,7 +13,7 @@ from lbrynet.core.StreamDescriptor import download_sd_blob
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
from lbrynet import conf
from torba.client.constants import COIN
from lbrynet.wallet.dewies import dewies_to_lbc
from lbrynet.extras.wallet.dewies import dewies_to_lbc
INITIALIZING_CODE = 'initializing'
DOWNLOAD_METADATA_CODE = 'downloading_metadata'

View file

@ -4,9 +4,9 @@ from binascii import hexlify
from datetime import datetime
from json import JSONEncoder
from ecdsa import BadSignatureError
from lbrynet.wallet.transaction import Transaction, Output
from lbrynet.wallet.dewies import dewies_to_lbc
from lbrynet.wallet.ledger import MainNetLedger
from lbrynet.extras.wallet import MainNetLedger
from lbrynet.extras.wallet.transaction import Transaction, Output
from lbrynet.extras.wallet.dewies import dewies_to_lbc
log = logging.getLogger(__name__)

View file

View file

@ -4,6 +4,8 @@ __node_bin__ = ''
__node_url__ = (
'https://github.com/lbryio/lbrycrd/releases/download/v0.12.2.1/lbrycrd-linux.zip'
)
__spvserver__ = 'lbryumx.coin.LBCRegTest'
__spvserver__ = 'lbrynet.extras.wallet.server.coin.LBCRegTest'
from .ledger import MainNetLedger, RegTestLedger
from .manager import LbryWalletManager
from .network import Network

View file

View file

@ -0,0 +1,174 @@
import hashlib
import struct
import msgpack
from torba.server.hash import hash_to_hex_str
from torba.server.block_processor import BlockProcessor
from lbrynet.schema.proto.claim_pb2 import Claim
from lbrynet.schema.uri import parse_lbry_uri
from lbrynet.schema.decode import smart_decode
from .model import NameClaim, ClaimInfo, ClaimUpdate, ClaimSupport
class LBRYBlockProcessor(BlockProcessor):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.env.coin.NET == "regtest":
self.prefetcher.polling_delay = 0.5
self.should_validate_signatures = self.env.boolean('VALIDATE_CLAIM_SIGNATURES', False)
self.logger.info("LbryumX Block Processor - Validating signatures: {}".format(self.should_validate_signatures))
def advance_blocks(self, blocks):
# save height, advance blocks as usual, then hook our claim tx processing
height = self.height + 1
super().advance_blocks(blocks)
pending_undo = []
for index, block in enumerate(blocks):
undo = self.advance_claim_txs(block.transactions, height + index)
pending_undo.append((height+index, undo,))
self.db.write_undo(pending_undo)
def advance_claim_txs(self, txs, height):
# TODO: generate claim undo info!
undo_info = []
add_undo = undo_info.append
update_inputs = set()
for tx, txid in txs:
update_inputs.clear()
if tx.has_claims:
for index, output in enumerate(tx.outputs):
claim = output.claim
if isinstance(claim, NameClaim):
add_undo(self.advance_claim_name_transaction(output, height, txid, index))
elif isinstance(claim, ClaimUpdate):
update_input = self.db.get_update_input(claim, tx.inputs)
if update_input:
update_inputs.add(update_input)
add_undo(self.advance_update_claim(output, height, txid, index))
else:
info = (hash_to_hex_str(txid), hash_to_hex_str(claim.claim_id),)
self.logger.error("REJECTED: {} updating {}".format(*info))
elif isinstance(claim, ClaimSupport):
self.advance_support(claim, txid, index, height, output.value)
for txin in tx.inputs:
if txin not in update_inputs:
abandoned_claim_id = self.db.abandon_spent(txin.prev_hash, txin.prev_idx)
if abandoned_claim_id:
add_undo((abandoned_claim_id, self.db.get_claim_info(abandoned_claim_id)))
return undo_info
def advance_update_claim(self, output, height, txid, nout):
claim_id = output.claim.claim_id
claim_info = self.claim_info_from_output(output, txid, nout, height)
old_claim_info = self.db.get_claim_info(claim_id)
self.db.put_claim_id_for_outpoint(old_claim_info.txid, old_claim_info.nout, None)
if old_claim_info.cert_id:
self.db.remove_claim_from_certificate_claims(old_claim_info.cert_id, claim_id)
if claim_info.cert_id:
self.db.put_claim_id_signed_by_cert_id(claim_info.cert_id, claim_id)
self.db.put_claim_info(claim_id, claim_info)
self.db.put_claim_id_for_outpoint(txid, nout, claim_id)
return claim_id, old_claim_info
def advance_claim_name_transaction(self, output, height, txid, nout):
claim_id = claim_id_hash(txid, nout)
claim_info = self.claim_info_from_output(output, txid, nout, height)
if claim_info.cert_id:
self.db.put_claim_id_signed_by_cert_id(claim_info.cert_id, claim_id)
self.db.put_claim_info(claim_id, claim_info)
self.db.put_claim_for_name(claim_info.name, claim_id)
self.db.put_claim_id_for_outpoint(txid, nout, claim_id)
return claim_id, None
def backup_from_undo_info(self, claim_id, undo_claim_info):
"""
Undo information holds a claim state **before** a transaction changes it
There are 4 possibilities when processing it, of which only 3 are valid ones:
1. the claim is known and the undo info has info, it was an update
2. the claim is known and the undo info doesn't hold any info, it was claimed
3. the claim in unknown and the undo info has info, it was abandoned
4. the claim is unknown and the undo info does't hold info, error!
"""
undo_claim_info = ClaimInfo(*undo_claim_info) if undo_claim_info else None
current_claim_info = self.db.get_claim_info(claim_id)
if current_claim_info and undo_claim_info:
# update, remove current claim
self.db.remove_claim_id_for_outpoint(current_claim_info.txid, current_claim_info.nout)
if current_claim_info.cert_id:
self.db.remove_claim_from_certificate_claims(current_claim_info.cert_id, claim_id)
elif current_claim_info and not undo_claim_info:
# claim, abandon it
self.db.abandon_spent(current_claim_info.txid, current_claim_info.nout)
elif not current_claim_info and undo_claim_info:
# abandon, reclaim it (happens below)
pass
else:
# should never happen, unless the database got into an inconsistent state
raise Exception("Unexpected situation occurred on backup, this means the database is inconsistent. "
"Please report. Resetting the data folder (reindex) solves it for now.")
if undo_claim_info:
self.db.put_claim_info(claim_id, undo_claim_info)
if undo_claim_info.cert_id:
cert_id = self._checksig(undo_claim_info.name, undo_claim_info.value, undo_claim_info.address)
self.db.put_claim_id_signed_by_cert_id(cert_id, claim_id)
self.db.put_claim_for_name(undo_claim_info.name, claim_id)
self.db.put_claim_id_for_outpoint(undo_claim_info.txid, undo_claim_info.nout, claim_id)
def backup_txs(self, txs):
self.logger.info("Reorg at height {} with {} transactions.".format(self.height, len(txs)))
undo_info = msgpack.loads(self.db.claim_undo_db.get(struct.pack(">I", self.height)), use_list=False)
for claim_id, undo_claim_info in reversed(undo_info):
self.backup_from_undo_info(claim_id, undo_claim_info)
return super().backup_txs(txs)
def backup_blocks(self, raw_blocks):
self.db.batched_flush_claims()
super().backup_blocks(raw_blocks=raw_blocks)
self.db.batched_flush_claims()
def shutdown(self):
self.db.shutdown()
async def flush(self, flush_utxos):
self.db.batched_flush_claims()
return await super().flush(flush_utxos)
def advance_support(self, claim_support, txid, nout, height, amount):
# TODO: check for more controller claim rules, like takeover or ordering
pass
def claim_info_from_output(self, output, txid, nout, height):
amount = output.value
address = self.coin.address_from_script(output.pk_script)
name, value, cert_id = output.claim.name, output.claim.value, None
assert txid and address
cert_id = self._checksig(name, value, address)
return ClaimInfo(name, value, txid, nout, amount, address, height, cert_id)
def _checksig(self, name, value, address):
try:
parse_lbry_uri(name.decode()) # skip invalid names
cert_id = Claim.FromString(value).publisherSignature.certificateId[::-1] or None
if not self.should_validate_signatures:
return cert_id
if cert_id:
cert_claim = self.db.get_claim_info(cert_id)
if cert_claim:
certificate = smart_decode(cert_claim.value)
claim_dict = smart_decode(value)
claim_dict.validate_signature(address, certificate)
return cert_id
except Exception as e:
pass
def claim_id_hash(txid, n):
# TODO: This should be in lbryschema
packed = txid + struct.pack('>I', n)
md = hashlib.new('ripemd160')
md.update(hashlib.sha256(packed).digest())
return md.digest()

View file

@ -0,0 +1,139 @@
import struct
from torba.server.script import ScriptPubKey, _match_ops, OpCodes
from torba.server.util import cachedproperty
from torba.server.hash import hash_to_hex_str, HASHX_LEN
from hashlib import sha256
from torba.server.coins import Coin, CoinError
from .opcodes import decode_claim_script, opcodes as lbry_opcodes
class LBC(Coin):
from .session import LBRYElectrumX
from .block_processor import LBRYBlockProcessor
from .tx import LBRYDeserializer
from .daemon import LBCDaemon
from .db import LBRYDB
DAEMON = LBCDaemon
SESSIONCLS = LBRYElectrumX
BLOCK_PROCESSOR = LBRYBlockProcessor
DB = LBRYDB
DESERIALIZER = LBRYDeserializer
NAME = "LBRY"
SHORTNAME = "LBC"
NET = "mainnet"
BASIC_HEADER_SIZE = 112
CHUNK_SIZE = 96
XPUB_VERBYTES = bytes.fromhex("019C354f")
XPRV_VERBYTES = bytes.fromhex("019C3118")
P2PKH_VERBYTE = bytes.fromhex("55")
P2SH_VERBYTES = bytes.fromhex("7A")
WIF_BYTE = bytes.fromhex("1C")
GENESIS_HASH = ('9c89283ba0f3227f6c03b70216b9f665'
'f0118d5e0fa729cedf4fb34d6a34f463')
TX_COUNT = 2716936
TX_COUNT_HEIGHT = 329554
TX_PER_BLOCK = 1
RPC_PORT = 9245
REORG_LIMIT = 200
PEERS = [
]
@classmethod
def genesis_block(cls, block):
'''Check the Genesis block is the right one for this coin.
Return the block less its unspendable coinbase.
'''
header = cls.block_header(block, 0)
header_hex_hash = hash_to_hex_str(cls.header_hash(header))
if header_hex_hash != cls.GENESIS_HASH:
raise CoinError('genesis block has hash {} expected {}'
.format(header_hex_hash, cls.GENESIS_HASH))
return block
@classmethod
def electrum_header(cls, header, height):
version, = struct.unpack('<I', header[:4])
timestamp, bits, nonce = struct.unpack('<III', header[100:112])
return {
'version': version,
'prev_block_hash': hash_to_hex_str(header[4:36]),
'merkle_root': hash_to_hex_str(header[36:68]),
'claim_trie_root': hash_to_hex_str(header[68:100]),
'timestamp': timestamp,
'bits': bits,
'nonce': nonce,
'block_height': height,
}
@cachedproperty
def address_handlers(cls):
return ScriptPubKey.PayToHandlers(
address=cls.P2PKH_address_from_hash160,
script_hash=cls.P2SH_address_from_hash160,
pubkey=cls.P2PKH_address_from_pubkey,
unspendable=lambda: None,
strange=cls.claim_address_handler,
)
@classmethod
def address_from_script(cls, script):
'''Given a pk_script, return the adddress it pays to, or None.'''
return ScriptPubKey.pay_to(cls.address_handlers, script)
@classmethod
def claim_address_handler(cls, script):
'''Parse a claim script, returns the address
'''
decoded = decode_claim_script(script)
if not decoded:
return None
ops = []
for op, data, _ in decoded[1]:
if not data:
ops.append(op)
else:
ops.append((op, data,))
match = _match_ops
TO_ADDRESS_OPS = [OpCodes.OP_DUP, OpCodes.OP_HASH160, -1,
OpCodes.OP_EQUALVERIFY, OpCodes.OP_CHECKSIG]
TO_P2SH_OPS = [OpCodes.OP_HASH160, -1, OpCodes.OP_EQUAL]
TO_PUBKEY_OPS = [-1, OpCodes.OP_CHECKSIG]
if match(ops, TO_ADDRESS_OPS):
return cls.P2PKH_address_from_hash160(ops[2][-1])
if match(ops, TO_P2SH_OPS):
return cls.P2SH_address_from_hash160(ops[1][-1])
if match(ops, TO_PUBKEY_OPS):
return cls.P2PKH_address_from_pubkey(ops[0][-1])
if ops and ops[0] == OpCodes.OP_RETURN:
return None
return None
@classmethod
def hashX_from_script(cls, script):
'''
Overrides electrumx hashX from script by extracting addresses from claim scripts.
'''
if script and script[0] == OpCodes.OP_RETURN:
return None
if script[0] in [
lbry_opcodes.OP_CLAIM_NAME,
lbry_opcodes.OP_SUPPORT_CLAIM,
lbry_opcodes.OP_UPDATE_CLAIM
]:
return cls.address_to_hashX(cls.claim_address_handler(script))
else:
return sha256(script).digest()[:HASHX_LEN]
class LBCRegTest(LBC):
NET = "regtest"
GENESIS_HASH = '6e3fcf1299d4ec5d79c3a4c91d624a4acf9e2e173d95a1a0504f677669687556'
XPUB_VERBYTES = bytes.fromhex('043587cf')
XPRV_VERBYTES = bytes.fromhex('04358394')
P2PKH_VERBYTE = bytes.fromhex("6f")
P2SH_VERBYTES = bytes.fromhex("c4")

View file

@ -0,0 +1,62 @@
from aiorpcx import RPCError
from functools import wraps
from torba.server.daemon import Daemon, DaemonError
def handles_errors(decorated_function):
@wraps(decorated_function)
async def wrapper(*args, **kwargs):
try:
return await decorated_function(*args, **kwargs)
except DaemonError as daemon_error:
error_dict = daemon_error.args[0]
message, code = error_dict['message'], error_dict['code']
raise RPCError(code=code, message=message)
return wrapper
class LBCDaemon(Daemon):
@handles_errors
async def getrawtransaction(self, hex_hash, verbose=False):
return await super().getrawtransaction(hex_hash=hex_hash, verbose=verbose)
@handles_errors
async def getclaimbyid(self, claim_id):
'''Given a claim id, retrieves claim information.'''
return await self._send_single('getclaimbyid', (claim_id,))
@handles_errors
async def getclaimsbyids(self, claim_ids):
'''Given a list of claim ids, batches calls to retrieve claim information.'''
return await self._send_vector('getclaimbyid', ((claim_id,) for claim_id in claim_ids))
@handles_errors
async def getclaimsforname(self, name):
'''Given a name, retrieves all claims matching that name.'''
return await self._send_single('getclaimsforname', (name,))
@handles_errors
async def getclaimsfortx(self, txid):
'''Given a txid, returns the claims it make.'''
return await self._send_single('getclaimsfortx', (txid,))
@handles_errors
async def getnameproof(self, name, block_hash=None):
'''Given a name and optional block_hash, returns a name proof and winner, if any.'''
return await self._send_single('getnameproof', (name, block_hash,) if block_hash else (name,))
@handles_errors
async def getvalueforname(self, name):
'''Given a name, returns the winning claim value.'''
return await self._send_single('getvalueforname', (name,))
@handles_errors
async def claimname(self, name, hexvalue, amount):
'''Claim a name, used for functional tests only.'''
return await self._send_single('claimname', (name, hexvalue, float(amount)))
@handles_errors
async def generate(self, number_of_blocks):
'''Generates regtest blocks, used for functional tests only.'''
return await self._send_single('generate', (int(number_of_blocks),))

View file

@ -0,0 +1,210 @@
import msgpack
import struct
import time
from torba.server.hash import hash_to_hex_str
from torba.server.db import DB
from .model import ClaimInfo
class LBRYDB(DB):
def __init__(self, *args, **kwargs):
self.claim_cache = {}
self.claims_for_name_cache = {}
self.claims_signed_by_cert_cache = {}
self.outpoint_to_claim_id_cache = {}
self.claims_db = self.names_db = self.signatures_db = self.outpoint_to_claim_id_db = self.claim_undo_db = None
# stores deletes not yet flushed to disk
self.pending_abandons = {}
super().__init__(*args, **kwargs)
def shutdown(self):
self.batched_flush_claims()
self.claims_db.close()
self.names_db.close()
self.signatures_db.close()
self.outpoint_to_claim_id_db.close()
self.claim_undo_db.close()
self.utxo_db.close()
# electrumx ones
self.utxo_db.close()
self.history.close_db()
async def _open_dbs(self, for_sync, compacting):
await super()._open_dbs(for_sync=for_sync, compacting=compacting)
def log_reason(message, is_for_sync):
reason = 'sync' if is_for_sync else 'serving'
self.logger.info('{} for {}'.format(message, reason))
if self.claims_db:
if self.claims_db.for_sync == for_sync:
return
log_reason('closing claim DBs to re-open', for_sync)
self.claims_db.close()
self.names_db.close()
self.signatures_db.close()
self.outpoint_to_claim_id_db.close()
self.claim_undo_db.close()
self.claims_db = self.db_class('claims', for_sync)
self.names_db = self.db_class('names', for_sync)
self.signatures_db = self.db_class('signatures', for_sync)
self.outpoint_to_claim_id_db = self.db_class('outpoint_claim_id', for_sync)
self.claim_undo_db = self.db_class('claim_undo', for_sync)
log_reason('opened claim DBs', self.claims_db.for_sync)
def flush_dbs(self, flush_data, flush_utxos, estimate_txs_remaining):
# flush claims together with utxos as they are parsed together
self.batched_flush_claims()
return super().flush_dbs(flush_data, flush_utxos, estimate_txs_remaining)
def batched_flush_claims(self):
with self.claims_db.write_batch() as claims_batch:
with self.names_db.write_batch() as names_batch:
with self.signatures_db.write_batch() as signed_claims_batch:
with self.outpoint_to_claim_id_db.write_batch() as outpoint_batch:
self.flush_claims(claims_batch, names_batch, signed_claims_batch,
outpoint_batch)
def flush_claims(self, batch, names_batch, signed_claims_batch, outpoint_batch):
flush_start = time.time()
write_claim, write_name, write_cert = batch.put, names_batch.put, signed_claims_batch.put
write_outpoint = outpoint_batch.put
delete_claim, delete_outpoint, delete_name = batch.delete, outpoint_batch.delete, names_batch.delete
delete_cert = signed_claims_batch.delete
for claim_id, outpoints in self.pending_abandons.items():
claim = self.get_claim_info(claim_id)
self.remove_claim_for_name(claim.name, claim_id)
if claim.cert_id:
self.remove_claim_from_certificate_claims(claim.cert_id, claim_id)
self.remove_certificate(claim_id)
self.claim_cache[claim_id] = None
for txid, tx_index in outpoints:
self.put_claim_id_for_outpoint(txid, tx_index, None)
for key, claim in self.claim_cache.items():
if claim:
write_claim(key, claim)
else:
delete_claim(key)
for name, claims in self.claims_for_name_cache.items():
if not claims:
delete_name(name)
else:
write_name(name, msgpack.dumps(claims))
for cert_id, claims in self.claims_signed_by_cert_cache.items():
if not claims:
delete_cert(cert_id)
else:
write_cert(cert_id, msgpack.dumps(claims))
for key, claim_id in self.outpoint_to_claim_id_cache.items():
if claim_id:
write_outpoint(key, claim_id)
else:
delete_outpoint(key)
self.logger.info('flushed at height {:,d} with {:,d} claims, {:,d} outpoints, {:,d} names '
'and {:,d} certificates added while {:,d} were abandoned in {:.1f}s, committing...'
.format(self.db_height,
len(self.claim_cache), len(self.outpoint_to_claim_id_cache),
len(self.claims_for_name_cache),
len(self.claims_signed_by_cert_cache), len(self.pending_abandons),
time.time() - flush_start))
self.claim_cache = {}
self.claims_for_name_cache = {}
self.claims_signed_by_cert_cache = {}
self.outpoint_to_claim_id_cache = {}
self.pending_abandons = {}
def assert_flushed(self, flush_data):
super().assert_flushed(flush_data)
assert not self.claim_cache
assert not self.claims_for_name_cache
assert not self.claims_signed_by_cert_cache
assert not self.outpoint_to_claim_id_cache
assert not self.pending_abandons
def abandon_spent(self, tx_hash, tx_idx):
claim_id = self.get_claim_id_from_outpoint(tx_hash, tx_idx)
if claim_id:
self.logger.info("[!] Abandon: {}".format(hash_to_hex_str(claim_id)))
self.pending_abandons.setdefault(claim_id, []).append((tx_hash, tx_idx,))
return claim_id
def put_claim_id_for_outpoint(self, tx_hash, tx_idx, claim_id):
self.logger.info("[+] Adding outpoint: {}:{} for {}.".format(hash_to_hex_str(tx_hash), tx_idx,
hash_to_hex_str(claim_id) if claim_id else None))
self.outpoint_to_claim_id_cache[tx_hash + struct.pack('>I', tx_idx)] = claim_id
def remove_claim_id_for_outpoint(self, tx_hash, tx_idx):
self.logger.info("[-] Remove outpoint: {}:{}.".format(hash_to_hex_str(tx_hash), tx_idx))
self.outpoint_to_claim_id_cache[tx_hash + struct.pack('>I', tx_idx)] = None
def get_claim_id_from_outpoint(self, tx_hash, tx_idx):
key = tx_hash + struct.pack('>I', tx_idx)
return self.outpoint_to_claim_id_cache.get(key) or self.outpoint_to_claim_id_db.get(key)
def get_claims_for_name(self, name):
if name in self.claims_for_name_cache: return self.claims_for_name_cache[name]
db_claims = self.names_db.get(name)
return msgpack.loads(db_claims) if db_claims else {}
def put_claim_for_name(self, name, claim_id):
self.logger.info("[+] Adding claim {} for name {}.".format(hash_to_hex_str(claim_id), name))
claims = self.get_claims_for_name(name)
claims.setdefault(claim_id, max(claims.values() or [0]) + 1)
self.claims_for_name_cache[name] = claims
def remove_claim_for_name(self, name, claim_id):
self.logger.info("[-] Removing claim from name: {} - {}".format(hash_to_hex_str(claim_id), name))
claims = self.get_claims_for_name(name)
claim_n = claims.pop(claim_id)
for claim_id, number in claims.items():
if number > claim_n:
claims[claim_id] = number - 1
self.claims_for_name_cache[name] = claims
def get_signed_claim_ids_by_cert_id(self, cert_id):
if cert_id in self.claims_signed_by_cert_cache: return self.claims_signed_by_cert_cache[cert_id]
db_claims = self.signatures_db.get(cert_id)
return msgpack.loads(db_claims, use_list=True) if db_claims else []
def put_claim_id_signed_by_cert_id(self, cert_id, claim_id):
self.logger.info("[+] Adding signature: {} - {}".format(hash_to_hex_str(claim_id), hash_to_hex_str(cert_id)))
certs = self.get_signed_claim_ids_by_cert_id(cert_id)
certs.append(claim_id)
self.claims_signed_by_cert_cache[cert_id] = certs
def remove_certificate(self, cert_id):
self.logger.info("[-] Removing certificate: {}".format(hash_to_hex_str(cert_id)))
self.claims_signed_by_cert_cache[cert_id] = []
def remove_claim_from_certificate_claims(self, cert_id, claim_id):
self.logger.info("[-] Removing signature: {} - {}".format(hash_to_hex_str(claim_id), hash_to_hex_str(cert_id)))
certs = self.get_signed_claim_ids_by_cert_id(cert_id)
if claim_id in certs:
certs.remove(claim_id)
self.claims_signed_by_cert_cache[cert_id] = certs
def get_claim_info(self, claim_id):
serialized = self.claim_cache.get(claim_id) or self.claims_db.get(claim_id)
return ClaimInfo.from_serialized(serialized) if serialized else None
def put_claim_info(self, claim_id, claim_info):
self.logger.info("[+] Adding claim info for: {}".format(hash_to_hex_str(claim_id)))
self.claim_cache[claim_id] = claim_info.serialized
def get_update_input(self, claim, inputs):
claim_id = claim.claim_id
claim_info = self.get_claim_info(claim_id)
if not claim_info:
return False
for input in inputs:
if input.prev_hash == claim_info.txid and input.prev_idx == claim_info.nout:
return input
return False
def write_undo(self, pending_undo):
with self.claim_undo_db.write_batch() as writer:
for height, undo_info in pending_undo:
writer.put(struct.pack(">I", height), msgpack.dumps(undo_info))

View file

@ -0,0 +1,47 @@
from collections import namedtuple
import msgpack
from torba.server.util import cachedproperty
# Classes representing data and their serializers, if any.
class ClaimInfo(namedtuple("NameClaim", "name value txid nout amount address height cert_id")):
'''Claim information as its stored on database'''
@classmethod
def from_serialized(cls, serialized):
return cls(*msgpack.loads(serialized))
@property
def serialized(self):
return msgpack.dumps(self)
class NameClaim(namedtuple("NameClaim", "name value")):
pass
class ClaimUpdate(namedtuple("ClaimUpdate", "name claim_id value")):
pass
class ClaimSupport(namedtuple("ClaimSupport", "name claim_id")):
pass
class LBRYTx(namedtuple("Tx", "version inputs outputs locktime")):
'''Transaction that can contain claim, update or support in its outputs.'''
@cachedproperty
def is_coinbase(self):
return self.inputs[0].is_coinbase
@cachedproperty
def has_claims(self):
for output in self.outputs:
if output.claim:
return True
return False
class TxClaimOutput(namedtuple("TxClaimOutput", "value pk_script claim")):
pass

View file

@ -0,0 +1,126 @@
import struct
from torba.server.enum import Enumeration
from .model import NameClaim, ClaimSupport, ClaimUpdate
# TODO: Take this to lbryschema (it's also on lbryum and lbryum-server)
opcodes = Enumeration("Opcodes", [
("OP_0", 0), ("OP_PUSHDATA1", 76), "OP_PUSHDATA2", "OP_PUSHDATA4", "OP_1NEGATE", "OP_RESERVED",
"OP_1", "OP_2", "OP_3", "OP_4", "OP_5", "OP_6", "OP_7",
"OP_8", "OP_9", "OP_10", "OP_11", "OP_12", "OP_13", "OP_14", "OP_15", "OP_16",
"OP_NOP", "OP_VER", "OP_IF", "OP_NOTIF", "OP_VERIF", "OP_VERNOTIF", "OP_ELSE", "OP_ENDIF",
"OP_VERIFY",
"OP_RETURN", "OP_TOALTSTACK", "OP_FROMALTSTACK", "OP_2DROP", "OP_2DUP", "OP_3DUP", "OP_2OVER",
"OP_2ROT", "OP_2SWAP",
"OP_IFDUP", "OP_DEPTH", "OP_DROP", "OP_DUP", "OP_NIP", "OP_OVER", "OP_PICK", "OP_ROLL",
"OP_ROT",
"OP_SWAP", "OP_TUCK", "OP_CAT", "OP_SUBSTR", "OP_LEFT", "OP_RIGHT", "OP_SIZE", "OP_INVERT",
"OP_AND",
"OP_OR", "OP_XOR", "OP_EQUAL", "OP_EQUALVERIFY", "OP_RESERVED1", "OP_RESERVED2", "OP_1ADD",
"OP_1SUB", "OP_2MUL",
"OP_2DIV", "OP_NEGATE", "OP_ABS", "OP_NOT", "OP_0NOTEQUAL", "OP_ADD", "OP_SUB", "OP_MUL",
"OP_DIV",
"OP_MOD", "OP_LSHIFT", "OP_RSHIFT", "OP_BOOLAND", "OP_BOOLOR",
"OP_NUMEQUAL", "OP_NUMEQUALVERIFY", "OP_NUMNOTEQUAL", "OP_LESSTHAN",
"OP_GREATERTHAN", "OP_LESSTHANOREQUAL", "OP_GREATERTHANOREQUAL", "OP_MIN", "OP_MAX",
"OP_WITHIN", "OP_RIPEMD160", "OP_SHA1", "OP_SHA256", "OP_HASH160",
"OP_HASH256", "OP_CODESEPARATOR", "OP_CHECKSIG", "OP_CHECKSIGVERIFY", "OP_CHECKMULTISIG",
"OP_CHECKMULTISIGVERIFY", "OP_NOP1", "OP_NOP2", "OP_NOP3", "OP_NOP4", "OP_NOP5",
"OP_CLAIM_NAME",
"OP_SUPPORT_CLAIM", "OP_UPDATE_CLAIM",
("OP_SINGLEBYTE_END", 0xF0),
("OP_DOUBLEBYTE_BEGIN", 0xF000),
"OP_PUBKEY", "OP_PUBKEYHASH",
("OP_INVALIDOPCODE", 0xFFFF),
])
def script_GetOp(bytes):
i = 0
while i < len(bytes):
vch = None
opcode = bytes[i]
i += 1
if opcode <= opcodes.OP_PUSHDATA4:
nSize = opcode
if opcode == opcodes.OP_PUSHDATA1:
nSize = bytes[i]
i += 1
elif opcode == opcodes.OP_PUSHDATA2:
(nSize,) = struct.unpack_from('<H', bytes, i)
i += 2
elif opcode == opcodes.OP_PUSHDATA4:
(nSize,) = struct.unpack_from('<I', bytes, i)
i += 4
if i + nSize > len(bytes):
vch = "_INVALID_" + bytes[i:]
i = len(bytes)
else:
vch = bytes[i:i + nSize]
i += nSize
yield (opcode, vch, i)
def decode_claim_script(bytes_script):
try:
decoded_script = [x for x in script_GetOp(bytes_script)]
except Exception as e:
print(e)
return None
if len(decoded_script) <= 6:
return False
op = 0
claim_type = decoded_script[op][0]
if claim_type == opcodes.OP_UPDATE_CLAIM:
if len(decoded_script) <= 7:
return False
if claim_type not in [
opcodes.OP_CLAIM_NAME,
opcodes.OP_SUPPORT_CLAIM,
opcodes.OP_UPDATE_CLAIM
]:
return False
op += 1
value = None
claim_id = None
claim = None
if not (0 <= decoded_script[op][0] <= opcodes.OP_PUSHDATA4):
return False
name = decoded_script[op][1]
op += 1
if not (0 <= decoded_script[op][0] <= opcodes.OP_PUSHDATA4):
return False
if decoded_script[0][0] in [
opcodes.OP_SUPPORT_CLAIM,
opcodes.OP_UPDATE_CLAIM
]:
claim_id = decoded_script[op][1]
if len(claim_id) != 20:
return False
else:
value = decoded_script[op][1]
op += 1
if decoded_script[0][0] == opcodes.OP_UPDATE_CLAIM:
value = decoded_script[op][1]
op += 1
if decoded_script[op][0] != opcodes.OP_2DROP:
return False
op += 1
if decoded_script[op][0] != opcodes.OP_DROP and decoded_script[0][0] == opcodes.OP_CLAIM_NAME:
return False
elif decoded_script[op][0] != opcodes.OP_2DROP and decoded_script[0][0] == opcodes.OP_UPDATE_CLAIM:
return False
op += 1
if decoded_script[0][0] == opcodes.OP_CLAIM_NAME:
if name is None or value is None:
return False
claim = NameClaim(name, value)
elif decoded_script[0][0] == opcodes.OP_UPDATE_CLAIM:
if name is None or value is None or claim_id is None:
return False
claim = ClaimUpdate(name, claim_id, value)
elif decoded_script[0][0] == opcodes.OP_SUPPORT_CLAIM:
if name is None or claim_id is None:
return False
claim = ClaimSupport(name, claim_id)
return claim, decoded_script[op:]

View file

@ -0,0 +1,392 @@
import math
from binascii import unhexlify, hexlify
from aiorpcx import RPCError
from torba.server.hash import hash_to_hex_str
from torba.server.session import ElectrumX
import torba.server.util as util
from lbrynet.schema.uri import parse_lbry_uri
from lbrynet.schema.error import URIParseError, DecodeError
from .block_processor import LBRYBlockProcessor
from .db import LBRYDB
class LBRYElectrumX(ElectrumX):
PROTOCOL_MIN = (0, 0) # temporary, for supporting 0.10 protocol
max_errors = math.inf # don't disconnect people for errors! let them happen...
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# fixme: this is a rebase hack, we need to go through ChainState instead later
self.daemon = self.session_mgr.daemon
self.bp: LBRYBlockProcessor = self.session_mgr.bp
self.db: LBRYDB = self.bp.db
# fixme: lbryum specific subscribe
self.subscribe_height = False
def set_request_handlers(self, ptuple):
super().set_request_handlers(ptuple)
handlers = {
'blockchain.transaction.get_height': self.transaction_get_height,
'blockchain.claimtrie.getclaimbyid': self.claimtrie_getclaimbyid,
'blockchain.claimtrie.getclaimsforname': self.claimtrie_getclaimsforname,
'blockchain.claimtrie.getclaimsbyids': self.claimtrie_getclaimsbyids,
'blockchain.claimtrie.getvalue': self.claimtrie_getvalue,
'blockchain.claimtrie.getnthclaimforname': self.claimtrie_getnthclaimforname,
'blockchain.claimtrie.getclaimsintx': self.claimtrie_getclaimsintx,
'blockchain.claimtrie.getclaimssignedby': self.claimtrie_getclaimssignedby,
'blockchain.claimtrie.getclaimssignedbynthtoname': self.claimtrie_getclaimssignedbynthtoname,
'blockchain.claimtrie.getvalueforuri': self.claimtrie_getvalueforuri,
'blockchain.claimtrie.getvaluesforuris': self.claimtrie_getvalueforuris,
'blockchain.claimtrie.getclaimssignedbyid': self.claimtrie_getclaimssignedbyid,
'blockchain.block.get_server_height': self.get_server_height,
'blockchain.block.get_block': self.get_block,
}
# fixme: methods we use but shouldnt be using anymore. To be removed when torba goes out
handlers.update({
'blockchain.numblocks.subscribe': self.numblocks_subscribe,
'blockchain.utxo.get_address': self.utxo_get_address,
'blockchain.transaction.broadcast':
self.transaction_broadcast_1_0,
'blockchain.transaction.get': self.transaction_get,
})
self.request_handlers.update(handlers)
async def utxo_get_address(self, tx_hash, index):
# fixme: lbryum
# Used only for electrum client command-line requests. We no
# longer index by address, so need to request the raw
# transaction. So it works for any TXO not just UTXOs.
self.assert_tx_hash(tx_hash)
try:
index = int(index)
if index < 0:
raise ValueError
except ValueError:
raise RPCError("index has to be >= 0 and integer")
raw_tx = await self.daemon_request('getrawtransaction', tx_hash)
if not raw_tx:
return None
raw_tx = util.hex_to_bytes(raw_tx)
tx = self.coin.DESERIALIZER(raw_tx).read_tx()
if index >= len(tx.outputs):
return None
return self.coin.address_from_script(tx.outputs[index].pk_script)
async def transaction_broadcast_1_0(self, raw_tx):
# fixme: lbryum
# An ugly API: current Electrum clients only pass the raw
# transaction in hex and expect error messages to be returned in
# the result field. And the server shouldn't be doing the client's
# user interface job here.
try:
return await self.transaction_broadcast(raw_tx)
except RPCError as e:
return e.message
async def numblocks_subscribe(self):
# fixme workaround for lbryum
'''Subscribe to get height of new blocks.'''
self.subscribe_height = True
return self.bp.height
async def notify(self, height, touched):
# fixme workaround for lbryum
await super().notify(height, touched)
if self.subscribe_height and height != self.notified_height:
self.send_notification('blockchain.numblocks.subscribe', (height,))
async def transaction_get(self, tx_hash, verbose=False):
# fixme: workaround for lbryum sending the height instead of True/False.
# fixme: lbryum_server ignored that and always used False, but this is out of spec
if verbose not in (True, False):
verbose = False
return await self.daemon_request('getrawtransaction', tx_hash, verbose)
async def get_block(self, block_hash):
return await self.daemon.deserialised_block(block_hash)
async def get_server_height(self):
return self.bp.height
async def transaction_get_height(self, tx_hash):
self.assert_tx_hash(tx_hash)
transaction_info = await self.daemon.getrawtransaction(tx_hash, True)
if transaction_info and 'hex' in transaction_info and 'confirmations' in transaction_info:
# an unconfirmed transaction from lbrycrdd will not have a 'confirmations' field
height = self.db.db_height
height = height - transaction_info['confirmations']
return height
elif transaction_info and 'hex' in transaction_info:
return -1
return None
async def claimtrie_getclaimssignedby(self, name):
winning_claim = await self.daemon.getvalueforname(name)
if winning_claim:
return await self.claimtrie_getclaimssignedbyid(winning_claim['claimId'])
async def claimtrie_getclaimssignedbyid(self, certificate_id):
claim_ids = self.get_claim_ids_signed_by(certificate_id)
return await self.batched_formatted_claims_from_daemon(claim_ids)
def get_claim_ids_signed_by(self, certificate_id):
raw_certificate_id = unhexlify(certificate_id)[::-1]
raw_claim_ids = self.db.get_signed_claim_ids_by_cert_id(raw_certificate_id)
return list(map(hash_to_hex_str, raw_claim_ids))
def get_signed_claims_with_name_for_channel(self, channel_id, name):
claim_ids_for_name = list(self.db.get_claims_for_name(name.encode('ISO-8859-1')).keys())
claim_ids_for_name = set(map(hash_to_hex_str, claim_ids_for_name))
channel_claim_ids = set(self.get_claim_ids_signed_by(channel_id))
return claim_ids_for_name.intersection(channel_claim_ids)
async def claimtrie_getclaimssignedbynthtoname(self, name, n):
n = int(n)
for claim_id, sequence in self.db.get_claims_for_name(name.encode('ISO-8859-1')).items():
if n == sequence:
return await self.claimtrie_getclaimssignedbyid(hash_to_hex_str(claim_id))
async def claimtrie_getclaimsintx(self, txid):
# TODO: this needs further discussion.
# Code on lbryum-server is wrong and we need to gather what we clearly expect from this command
claim_ids = [claim['claimId'] for claim in (await self.daemon.getclaimsfortx(txid)) if 'claimId' in claim]
return await self.batched_formatted_claims_from_daemon(claim_ids)
async def claimtrie_getvalue(self, name, block_hash=None):
proof = await self.daemon.getnameproof(name, block_hash)
result = {'proof': proof, 'supports': []}
if proof_has_winning_claim(proof):
tx_hash, nout = proof['txhash'], int(proof['nOut'])
transaction_info = await self.daemon.getrawtransaction(tx_hash, True)
result['transaction'] = transaction_info['hex']
result['height'] = (self.db.db_height - transaction_info['confirmations']) + 1
raw_claim_id = self.db.get_claim_id_from_outpoint(unhexlify(tx_hash)[::-1], nout)
sequence = self.db.get_claims_for_name(name.encode('ISO-8859-1')).get(raw_claim_id)
if sequence:
claim_id = hexlify(raw_claim_id[::-1]).decode()
claim_info = await self.daemon.getclaimbyid(claim_id)
if not claim_info or not claim_info.get('value'):
claim_info = await self.slow_get_claim_by_id_using_name(claim_id)
result['claim_sequence'] = sequence
result['claim_id'] = claim_id
supports = self.format_supports_from_daemon(claim_info.get('supports', [])) # fixme: lbrycrd#124
result['supports'] = supports
else:
self.logger.warning('tx has no claims in db: {} {}'.format(tx_hash, nout))
return result
async def claimtrie_getnthclaimforname(self, name, n):
n = int(n)
for claim_id, sequence in self.db.get_claims_for_name(name.encode('ISO-8859-1')).items():
if n == sequence:
return await self.claimtrie_getclaimbyid(hash_to_hex_str(claim_id))
async def claimtrie_getclaimsforname(self, name):
claims = await self.daemon.getclaimsforname(name)
if claims:
claims['claims'] = [self.format_claim_from_daemon(claim, name) for claim in claims['claims']]
claims['supports_without_claims'] = claims['supports without claims']
del claims['supports without claims']
claims['last_takeover_height'] = claims['nLastTakeoverHeight']
del claims['nLastTakeoverHeight']
return claims
return {}
async def batched_formatted_claims_from_daemon(self, claim_ids):
claims = await self.daemon.getclaimsbyids(claim_ids)
result = []
for claim, claim_id in zip(claims, claim_ids):
if claim and claim.get('value'):
result.append(self.format_claim_from_daemon(claim))
else:
recovered_claim = await self.slow_get_claim_by_id_using_name(claim_id)
if recovered_claim:
result.append(self.format_claim_from_daemon(recovered_claim))
return result
def format_claim_from_daemon(self, claim, name=None):
'''Changes the returned claim data to the format expected by lbrynet and adds missing fields.'''
if not claim: return {}
name = name or claim['name']
claim_id = claim['claimId']
raw_claim_id = unhexlify(claim_id)[::-1]
if not self.db.get_claim_info(raw_claim_id):
#raise RPCError("Lbrycrd has {} but not lbryumx, please submit a bug report.".format(claim_id))
return {}
address = self.db.get_claim_info(raw_claim_id).address.decode()
sequence = self.db.get_claims_for_name(name.encode('ISO-8859-1')).get(raw_claim_id)
if not sequence:
return {}
supports = self.format_supports_from_daemon(claim.get('supports', [])) # fixme: lbrycrd#124
amount = get_from_possible_keys(claim, 'amount', 'nAmount')
height = get_from_possible_keys(claim, 'height', 'nHeight')
effective_amount = get_from_possible_keys(claim, 'effective amount', 'nEffectiveAmount')
valid_at_height = get_from_possible_keys(claim, 'valid at height', 'nValidAtHeight')
return {
"name": name,
"claim_id": claim['claimId'],
"txid": claim['txid'],
"nout": claim['n'],
"amount": amount,
"depth": self.db.db_height - height,
"height": height,
"value": hexlify(claim['value'].encode('ISO-8859-1')).decode(),
"claim_sequence": sequence, # from index
"address": address, # from index
"supports": supports, # fixme: to be included in lbrycrd#124
"effective_amount": effective_amount,
"valid_at_height": valid_at_height # TODO PR lbrycrd to include it
}
def format_supports_from_daemon(self, supports):
return [[support['txid'], support['n'], get_from_possible_keys(support, 'amount', 'nAmount')] for
support in supports]
async def claimtrie_getclaimbyid(self, claim_id):
self.assert_claim_id(claim_id)
claim = await self.daemon.getclaimbyid(claim_id)
if not claim or not claim.get('value'):
claim = await self.slow_get_claim_by_id_using_name(claim_id)
return self.format_claim_from_daemon(claim)
async def claimtrie_getclaimsbyids(self, *claim_ids):
claims = await self.batched_formatted_claims_from_daemon(claim_ids)
return dict(zip(claim_ids, claims))
def assert_tx_hash(self, value):
'''Raise an RPCError if the value is not a valid transaction
hash.'''
try:
if len(util.hex_to_bytes(value)) == 32:
return
except Exception:
pass
raise RPCError('{} should be a transaction hash'.format(value))
def assert_claim_id(self, value):
'''Raise an RPCError if the value is not a valid claim id
hash.'''
try:
if len(util.hex_to_bytes(value)) == 20:
return
except Exception:
pass
raise RPCError('{} should be a claim id hash'.format(value))
async def slow_get_claim_by_id_using_name(self, claim_id):
# TODO: temporary workaround for a lbrycrd bug on indexing. Should be removed when it gets stable
raw_claim_id = unhexlify(claim_id)[::-1]
claim = self.db.get_claim_info(raw_claim_id)
if claim:
name = claim.name.decode('ISO-8859-1')
claims = await self.daemon.getclaimsforname(name)
for claim in claims['claims']:
if claim['claimId'] == claim_id:
claim['name'] = name
self.logger.warning('Recovered a claim missing from lbrycrd index: {} {}'.format(name, claim_id))
return claim
async def claimtrie_getvalueforuri(self, block_hash, uri, known_certificates=None):
# TODO: this thing is huge, refactor
CLAIM_ID = "claim_id"
WINNING = "winning"
SEQUENCE = "sequence"
uri = uri
block_hash = block_hash
try:
parsed_uri = parse_lbry_uri(uri)
except URIParseError as err:
return {'error': err.message}
result = {}
if parsed_uri.is_channel:
certificate = None
# TODO: this is also done on the else, refactor
if parsed_uri.claim_id:
certificate_info = await self.claimtrie_getclaimbyid(parsed_uri.claim_id)
if certificate_info and certificate_info['name'] == parsed_uri.name:
certificate = {'resolution_type': CLAIM_ID, 'result': certificate_info}
elif parsed_uri.claim_sequence:
certificate_info = await self.claimtrie_getnthclaimforname(parsed_uri.name, parsed_uri.claim_sequence)
if certificate_info:
certificate = {'resolution_type': SEQUENCE, 'result': certificate_info}
else:
certificate_info = await self.claimtrie_getvalue(parsed_uri.name, block_hash)
if certificate_info:
certificate = {'resolution_type': WINNING, 'result': certificate_info}
if certificate and 'claim_id' not in certificate['result']:
return result
if certificate and not parsed_uri.path:
result['certificate'] = certificate
channel_id = certificate['result']['claim_id']
claims_in_channel = await self.claimtrie_getclaimssignedbyid(channel_id)
result['unverified_claims_in_channel'] = {claim['claim_id']: (claim['name'], claim['height'])
for claim in claims_in_channel if claim}
elif certificate:
result['certificate'] = certificate
channel_id = certificate['result']['claim_id']
claim_ids_matching_name = self.get_signed_claims_with_name_for_channel(channel_id, parsed_uri.path)
claims = await self.batched_formatted_claims_from_daemon(claim_ids_matching_name)
claims_in_channel = {claim['claim_id']: (claim['name'], claim['height'])
for claim in claims}
result['unverified_claims_for_name'] = claims_in_channel
else:
claim = None
if parsed_uri.claim_id:
claim_info = await self.claimtrie_getclaimbyid(parsed_uri.claim_id)
if claim_info and claim_info['name'] == parsed_uri.name:
claim = {'resolution_type': CLAIM_ID, 'result': claim_info}
elif parsed_uri.claim_sequence:
claim_info = await self.claimtrie_getnthclaimforname(parsed_uri.name, parsed_uri.claim_sequence)
if claim_info:
claim = {'resolution_type': SEQUENCE, 'result': claim_info}
else:
claim_info = await self.claimtrie_getvalue(parsed_uri.name, block_hash)
if claim_info:
claim = {'resolution_type': WINNING, 'result': claim_info}
if (claim and
# is not an unclaimed winning name
(claim['resolution_type'] != WINNING or proof_has_winning_claim(claim['result']['proof']))):
raw_claim_id = unhexlify(claim['result']['claim_id'])[::-1]
raw_certificate_id = self.db.get_claim_info(raw_claim_id).cert_id
if raw_certificate_id:
certificate_id = hash_to_hex_str(raw_certificate_id)
certificate = await self.claimtrie_getclaimbyid(certificate_id)
if certificate:
certificate = {'resolution_type': CLAIM_ID,
'result': certificate}
result['certificate'] = certificate
result['claim'] = claim
return result
async def claimtrie_getvalueforuris(self, block_hash, *uris):
MAX_BATCH_URIS = 500
if len(uris) > MAX_BATCH_URIS:
raise Exception("Exceeds max batch uris of {}".format(MAX_BATCH_URIS))
return {uri: await self.claimtrie_getvalueforuri(block_hash, uri) for uri in uris}
# TODO: get it all concurrently when lbrycrd pending changes goes into a stable release
#async def getvalue(uri):
# value = await self.claimtrie_getvalueforuri(block_hash, uri)
# return uri, value,
#return dict([await asyncio.gather(*tuple(getvalue(uri) for uri in uris))][0])
def proof_has_winning_claim(proof):
return {'txhash', 'nOut'}.issubset(proof.keys())
def get_from_possible_keys(dictionary, *keys):
for key in keys:
if key in dictionary:
return dictionary[key]

View file

@ -0,0 +1,21 @@
from torba.server.tx import Deserializer
from .opcodes import decode_claim_script
from .model import TxClaimOutput, LBRYTx
class LBRYDeserializer(Deserializer):
def _read_output(self):
value = self._read_le_int64()
script = self._read_varbytes() # pk_script
claim = decode_claim_script(script)
claim = claim[0] if claim else None
return TxClaimOutput(value, script, claim)
def read_tx(self):
return LBRYTx(
self._read_le_int32(), # version
self._read_inputs(), # inputs
self._read_outputs(), # outputs
self._read_le_uint32() # locktime
)

View file

@ -48,7 +48,14 @@ setup(
'test': (
'mock>=2.0,<3.0',
'faker==0.8.17',
'torba[server]'
)
'pytest',
'pytest-asyncio',
'pytest-xprocess',
'torba[server]',
),
'wallet-server': (
'msgpack',
'torba[server]',
),
}
)

View file

@ -16,7 +16,7 @@ lbrynet.schema.BLOCKCHAIN_NAME = 'lbrycrd_regtest'
from lbrynet import conf as lbry_conf
from lbrynet.dht.node import Node
from lbrynet.daemon.Daemon import Daemon
from lbrynet.wallet.manager import LbryWalletManager
from lbrynet.extras.wallet import LbryWalletManager
from lbrynet.daemon.Components import WalletComponent, DHTComponent, HashAnnouncerComponent, \
ExchangeRateManagerComponent
from lbrynet.daemon.Components import REFLECTOR_COMPONENT, PEER_PROTOCOL_SERVER_COMPONENT

View file

@ -3,9 +3,9 @@ import asyncio
from torba.testcase import IntegrationTestCase
from lbrynet.schema.claim import ClaimDict
from lbrynet.wallet.transaction import Transaction
from lbrynet.wallet.account import generate_certificate
from lbrynet.wallet.dewies import dewies_to_lbc as d2l, lbc_to_dewies as l2d
from lbrynet.extras.wallet.transaction import Transaction
from lbrynet.extras.wallet.account import generate_certificate
from lbrynet.extras.wallet.dewies import dewies_to_lbc as d2l, lbc_to_dewies as l2d
import lbrynet.schema
lbrynet.schema.BLOCKCHAIN_NAME = 'lbrycrd_regtest'

View file

@ -19,7 +19,7 @@ from lbrynet.daemon.Components import PEER_PROTOCOL_SERVER_COMPONENT, EXCHANGE_R
from lbrynet.daemon.Components import RATE_LIMITER_COMPONENT, HEADERS_COMPONENT, FILE_MANAGER_COMPONENT
from lbrynet.daemon.Daemon import Daemon as LBRYDaemon
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader
from lbrynet.wallet.manager import LbryWalletManager
from lbrynet.extras.wallet import LbryWalletManager
from torba.client.wallet import Wallet
from lbrynet.core.PaymentRateManager import OnlyFreePaymentsManager

View file

@ -15,7 +15,7 @@ from lbrynet.database.storage import SQLiteStorage
from lbrynet.dht.peerfinder import DummyPeerFinder
from lbrynet.file_manager.EncryptedFileStatusReport import EncryptedFileStatusReport
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader
from lbrynet.wallet.manager import LbryWalletManager
from lbrynet.extras.wallet import LbryWalletManager
from tests.mocks import mock_conf_settings

View file

@ -1,9 +1,9 @@
from torba.testcase import AsyncioTestCase
from torba.client.wallet import Wallet
from lbrynet.wallet.ledger import MainNetLedger, WalletDatabase
from lbrynet.wallet.header import Headers
from lbrynet.wallet.account import Account
from lbrynet.extras.wallet.ledger import MainNetLedger, WalletDatabase
from lbrynet.extras.wallet.header import Headers
from lbrynet.extras.wallet.account import Account
class TestAccount(AsyncioTestCase):

View file

@ -1,7 +1,7 @@
import unittest
from binascii import hexlify, unhexlify
from lbrynet.wallet.claim_proofs import get_hash_for_outpoint, verify_proof
from lbrynet.extras.wallet.claim_proofs import get_hash_for_outpoint, verify_proof
from lbrynet.schema.hashing import double_sha256

View file

@ -1,6 +1,6 @@
import unittest
from lbrynet.wallet.dewies import lbc_to_dewies as l2d, dewies_to_lbc as d2l
from lbrynet.extras.wallet.dewies import lbc_to_dewies as l2d, dewies_to_lbc as d2l
class TestDeweyConversion(unittest.TestCase):

View file

@ -3,7 +3,7 @@ from binascii import unhexlify
from torba.testcase import AsyncioTestCase
from torba.client.util import ArithUint256
from lbrynet.wallet.ledger import Headers
from lbrynet.extras.wallet.ledger import Headers
class TestHeaders(AsyncioTestCase):

View file

@ -1,9 +1,9 @@
from torba.testcase import AsyncioTestCase
from torba.client.wallet import Wallet
from lbrynet.wallet.account import Account
from lbrynet.wallet.transaction import Transaction, Output, Input
from lbrynet.wallet.ledger import MainNetLedger
from lbrynet.extras.wallet.account import Account
from lbrynet.extras.wallet.transaction import Transaction, Output, Input
from lbrynet.extras.wallet.ledger import MainNetLedger
class LedgerTestCase(AsyncioTestCase):

View file

@ -1,7 +1,7 @@
import unittest
from binascii import hexlify, unhexlify
from lbrynet.wallet.script import OutputScript
from lbrynet.extras.wallet.script import OutputScript
class TestPayClaimNamePubkeyHash(unittest.TestCase):

View file

@ -5,8 +5,8 @@ from torba.testcase import AsyncioTestCase
from torba.client.constants import CENT, COIN, NULL_HASH32
from torba.client.wallet import Wallet
from lbrynet.wallet.ledger import MainNetLedger
from lbrynet.wallet.transaction import Transaction, Output, Input
from lbrynet.extras.wallet import MainNetLedger
from lbrynet.extras.wallet.transaction import Transaction, Output, Input
FEE_PER_BYTE = 50

View file

@ -5,7 +5,6 @@ envlist = py37-integration
deps =
coverage
../torba[server]
../lbryumx
extras = test
changedir = {toxinidir}/tests
setenv =