wip lbry.blockchain

This commit is contained in:
Lex Berezhny 2020-05-01 09:28:51 -04:00
parent c61c9726b0
commit ccd32eae70
29 changed files with 1556 additions and 1758 deletions

View file

@ -1,2 +0,0 @@
from .sync import BlockchainSync
from .lbrycrd import Lbrycrd

View file

@ -7,6 +7,9 @@ class BCDataStream:
def __init__(self, data=None, fp=None):
self.data = fp or BytesIO(data)
def tell(self):
return self.data.tell()
def reset(self):
self.data.seek(0)

View file

@ -1,37 +1,59 @@
import struct
from hashlib import sha256
from typing import Set
from binascii import unhexlify
from typing import NamedTuple, List
from chiabip158 import PyBIP158
from lbry.crypto.hash import double_sha256
from lbry.wallet.transaction import Transaction
from lbry.wallet.bcd_data_stream import BCDataStream
from lbry.blockchain.transaction import Transaction
from lbry.blockchain.bcd_data_stream import BCDataStream
ZERO_BLOCK = bytes((0,)*32)
class Block:
def create_block_filter(addresses: Set[str]) -> bytes:
return bytes(PyBIP158([bytearray(a.encode()) for a in addresses]).GetEncoded())
__slots__ = (
'version', 'block_hash', 'prev_block_hash',
'merkle_root', 'claim_trie_root', 'timestamp',
'bits', 'nonce', 'txs'
)
def __init__(self, stream: BCDataStream):
def get_block_filter(block_filter: str) -> PyBIP158:
return PyBIP158(bytearray(unhexlify(block_filter)))
class Block(NamedTuple):
height: int
version: int
file_number: int
block_hash: bytes
prev_block_hash: bytes
merkle_root: bytes
claim_trie_root: bytes
timestamp: int
bits: int
nonce: int
txs: List[Transaction]
@staticmethod
def from_data_stream(stream: BCDataStream, height: int, file_number: int):
header = stream.data.read(112)
version, = struct.unpack('<I', header[:4])
timestamp, bits, nonce = struct.unpack('<III', header[100:112])
self.version = version
self.block_hash = double_sha256(header)
self.prev_block_hash = header[4:36]
self.merkle_root = header[36:68]
self.claim_trie_root = header[68:100][::-1]
self.timestamp = timestamp
self.bits = bits
self.nonce = nonce
tx_count = stream.read_compact_size()
self.txs = [
Transaction(position=i)._deserialize(stream)
for i in range(tx_count)
]
return Block(
height=height,
version=version,
file_number=file_number,
block_hash=double_sha256(header),
prev_block_hash=header[4:36],
merkle_root=header[36:68],
claim_trie_root=header[68:100][::-1],
timestamp=timestamp,
bits=bits,
nonce=nonce,
txs=[Transaction(height=height, position=i).deserialize(stream) for i in range(tx_count)]
)
@property
def is_first_block(self):

View file

@ -1,37 +1,123 @@
import os.path
import asyncio
import sqlite3
from typing import Optional
from concurrent.futures import ThreadPoolExecutor
FILES = [
'block_index',
'claims'
]
class BlockchainDB:
__slots__ = 'file_path', 'db'
def __init__(self, directory: str):
self.file_path = f"file:{os.path.join(directory, 'block_index.sqlite')}?mode=ro"
self.db: Optional[sqlite3.Connection] = None
self.directory = directory
self.connection: Optional[sqlite3.Connection] = None
self.executor: Optional[ThreadPoolExecutor] = None
def open(self):
self.db = sqlite3.connect(self.file_path, uri=True, timeout=60.0 * 5)
self.db.row_factory = sqlite3.Row
async def run_in_executor(self, *args):
return await asyncio.get_event_loop().run_in_executor(self.executor, *args)
def execute(self, *args, **kwargs):
if self.db is None:
self.open()
return list(self.db.execute(*args, **kwargs).fetchall())
def sync_open(self):
self.connection = sqlite3.connect(
os.path.join(self.directory, FILES[0]+'.sqlite'),
timeout=60.0 * 5
)
for file in FILES[1:]:
self.connection.execute(
f"ATTACH DATABASE '{os.path.join(self.directory, file+'.sqlite')}' AS {file}"
)
self.connection.row_factory = sqlite3.Row
def get_block_files(self):
return self.execute(
async def open(self):
assert self.executor is None, "Database is already open."
self.executor = ThreadPoolExecutor(max_workers=1)
return await self.run_in_executor(self.sync_open)
def sync_close(self):
self.connection.close()
self.connection = None
async def close(self):
if self.executor is not None:
if self.connection is not None:
await self.run_in_executor(self.sync_close)
self.executor.shutdown()
self.executor = None
def sync_execute(self, sql: str, *args):
return self.connection.execute(sql, *args)
async def execute(self, sql, *args):
return await self.run_in_executor(self.sync_execute, sql, *args)
def sync_execute_fetchall(self, sql: str, *args):
return list(self.connection.execute(sql, *args).fetchall())
async def execute_fetchall(self, sql: str, *args):
return await self.run_in_executor(self.sync_execute_fetchall, sql, *args)
def sync_get_block_files(self):
return self.sync_execute_fetchall(
"""
SELECT file as file_number, COUNT(hash) as blocks, SUM(txcount) as txs
FROM block_info GROUP BY file ORDER BY file ASC;
"""
)
def get_file_details(self, block_file):
return self.execute(
async def get_block_files(self):
return await self.run_in_executor(self.sync_get_block_files)
def sync_get_file_details(self, block_file):
return self.sync_execute_fetchall(
"""
SELECT datapos as data_offset, height, hash as block_hash, txCount as txs
FROM block_info WHERE file = ? ORDER BY datapos ASC;
""", (block_file,)
)
async def get_file_details(self, block_file):
return await self.run_in_executor(self.sync_get_file_details, block_file)
def sync_get_claimtrie(self):
return self.sync_execute_fetchall(
"""
SELECT
takeover.name AS normalized,
takeover.claimID AS claim_hash,
takeover.height AS last_take_over_height,
originalHeight AS original_height,
updateHeight AS update_height,
validHeight AS valid_height,
activationHeight AS activation_height,
expirationHeight AS expiration_height
FROM takeover JOIN claim USING (claimID)
GROUP BY takeover.name HAVING MAX(height);
"""
)
async def get_claimtrie(self):
return await self.run_in_executor(self.sync_get_claimtrie)
def sync_get_claims(self):
return self.sync_execute_fetchall(
"""
SELECT
claimID AS claim_hash,
txID AS tx_hash,
txN AS position,
amount,
originalHeight AS original_height,
updateHeight AS update_height,
validHeight AS valid_height,
activationHeight AS activation_height,
expirationHeight AS expiration_height
FROM claims.claim
"""
)
async def get_claims(self):
return await self.run_in_executor(self.sync_get_claims)

View file

@ -1,6 +1,6 @@
import textwrap
from decimal import Decimal
from .util import coins_to_satoshis, satoshis_to_coins
from lbry.blockchain.util import coins_to_satoshis, satoshis_to_coins
def lbc_to_dewies(lbc: str) -> int:

View file

@ -1,5 +1,5 @@
from binascii import hexlify, unhexlify
from .constants import NULL_HASH32
from lbry.constants import NULL_HASH32
class TXRef:

View file

@ -12,7 +12,7 @@ from typing import Optional, Iterator, Tuple, Callable
from binascii import hexlify, unhexlify
from lbry.crypto.hash import sha512, double_sha256, ripemd160
from lbry.wallet.util import ArithUint256
from lbry.blockchain.util import ArithUint256
from .checkpoints import HASHES

View file

@ -8,15 +8,16 @@ import tempfile
import urllib.request
from typing import Optional
from binascii import hexlify
from concurrent.futures import ThreadPoolExecutor
import aiohttp
import zmq
import zmq.asyncio
from lbry.wallet.stream import StreamController
from lbry.conf import Config
from lbry.event import EventController
from .database import BlockchainDB
from .ledger import Ledger, RegTestLedger
log = logging.getLogger(__name__)
@ -58,10 +59,10 @@ class Process(asyncio.SubprocessProtocol):
class Lbrycrd:
def __init__(self, path, regtest=False):
self.data_dir = self.actual_data_dir = path
self.regtest = regtest
if regtest:
def __init__(self, ledger: Ledger):
self.ledger = ledger
self.data_dir = self.actual_data_dir = ledger.conf.lbrycrd_dir
if self.is_regtest:
self.actual_data_dir = os.path.join(self.data_dir, 'regtest')
self.blocks_dir = os.path.join(self.actual_data_dir, 'blocks')
self.bin_dir = os.path.join(os.path.dirname(__file__), 'bin')
@ -74,34 +75,27 @@ class Lbrycrd:
self.rpcport = 9245 + 2 # avoid conflict with default rpc port
self.rpcuser = 'rpcuser'
self.rpcpassword = 'rpcpassword'
self.session: Optional[aiohttp.ClientSession] = None
self.subscribed = False
self.subscription: Optional[asyncio.Task] = None
self.subscription_url = 'tcp://127.0.0.1:29000'
self.default_generate_address = None
self._on_block_controller = StreamController()
self._on_block_controller = EventController()
self.on_block = self._on_block_controller.stream
self.on_block.listen(lambda e: log.info('%s %s', hexlify(e['hash']), e['msg']))
self.db = BlockchainDB(self.actual_data_dir)
self.executor = ThreadPoolExecutor(max_workers=1)
self.session: Optional[aiohttp.ClientSession] = None
@classmethod
def temp_regtest(cls):
return cls(RegTestLedger(Config.with_same_dir(tempfile.mkdtemp())))
def get_block_file_path_from_number(self, block_file_number):
return os.path.join(self.actual_data_dir, 'blocks', f'blk{block_file_number:05}.dat')
async def get_block_files(self):
return await asyncio.get_running_loop().run_in_executor(
self.executor, self.db.get_block_files
)
async def get_file_details(self, block_file):
return await asyncio.get_running_loop().run_in_executor(
self.executor, self.db.get_file_details, block_file
)
@classmethod
def temp_regtest(cls):
return cls(tempfile.mkdtemp(), True)
@property
def is_regtest(self):
return isinstance(self.ledger, RegTestLedger)
@property
def rpc_url(self):
@ -150,7 +144,7 @@ class Lbrycrd:
return self.exists or await self.download()
def get_start_command(self, *args):
if self.regtest:
if self.is_regtest:
args += ('-regtest',)
return (
self.daemon_bin,
@ -164,6 +158,14 @@ class Lbrycrd:
*args
)
async def open(self):
self.session = aiohttp.ClientSession()
await self.db.open()
async def close(self):
await self.db.close()
await self.session.close()
async def start(self, *args):
loop = asyncio.get_event_loop()
command = self.get_start_command(*args)
@ -171,11 +173,11 @@ class Lbrycrd:
self.transport, self.protocol = await loop.subprocess_exec(Process, *command)
await self.protocol.ready.wait()
assert not self.protocol.stopped.is_set()
self.session = aiohttp.ClientSession()
await self.open()
async def stop(self, cleanup=True):
try:
await self.session.close()
await self.close()
self.transport.terminate()
await self.protocol.stopped.wait()
assert self.transport.get_returncode() == 0, "lbrycrd daemon exit with error"
@ -201,7 +203,7 @@ class Lbrycrd:
try:
while self.subscribed:
msg = await sock.recv_multipart()
self._on_block_controller.add({
await self._on_block_controller.add({
'hash': msg[1],
'msg': struct.unpack('<I', msg[2])[0]
})
@ -244,6 +246,15 @@ class Lbrycrd:
async def generate_to_address(self, blocks, address):
return await self.rpc("generatetoaddress", [blocks, address])
async def send_to_address(self, address, amount):
return await self.rpc("sendtoaddress", [address, amount])
async def get_block(self, block_hash):
return await self.rpc("getblock", [block_hash])
async def get_raw_transaction(self, txid):
return await self.rpc("getrawtransaction", [txid])
async def fund_raw_transaction(self, tx):
return await self.rpc("fundrawtransaction", [tx])
@ -255,3 +266,15 @@ class Lbrycrd:
async def claim_name(self, name, data, amount):
return await self.rpc("claimname", [name, data, amount])
async def update_claim(self, txid, data, amount):
return await self.rpc("updateclaim", [txid, data, amount])
async def abandon_claim(self, txid, address):
return await self.rpc("abandonclaim", [txid, address])
async def support_claim(self, name, claim_id, amount, value="", istip=False):
return await self.rpc("supportclaim", [name, claim_id, amount, value, istip])
async def abandon_support(self, txid, address):
return await self.rpc("abandonsupport", [txid, address])

File diff suppressed because it is too large Load diff

View file

@ -294,20 +294,25 @@ class Template:
class Script:
__slots__ = 'source', '_template', '_values', '_template_hint'
__slots__ = 'source', 'offset', '_template', '_values', '_template_hint'
templates: List[Template] = []
NO_SCRIPT = Template('no_script', None) # special case
def __init__(self, source=None, template=None, values=None, template_hint=None):
def __init__(self, source=None, template=None, values=None, template_hint=None, offset=None):
self.source = source
self.offset = offset
self._template = template
self._values = values
self._template_hint = template_hint
if source is None and template and values:
self.generate()
@property
def length(self):
return len(self.source)
@property
def template(self):
if self._template is None:

View file

@ -1,111 +1,220 @@
import os
import asyncio
import logging
from threading import Thread
from multiprocessing import Queue, Event
from concurrent import futures
import multiprocessing as mp
from contextvars import ContextVar
from typing import Tuple, Optional
from concurrent.futures import Executor, ThreadPoolExecutor, ProcessPoolExecutor
from lbry.wallet.stream import StreamController, EventQueuePublisher
from lbry.db import Database
from sqlalchemy import func, bindparam
from sqlalchemy.future import select
from lbry.event import EventController, BroadcastSubscription
from lbry.service.base import Service, Sync, BlockEvent
from lbry.db import (
queries, TXO_TYPES, Claim, Claimtrie, TX, TXO, TXI, Block as BlockTable,
)
from .lbrycrd import Lbrycrd
from . import worker
from .block import Block, create_block_filter
from .bcd_data_stream import BCDataStream
from .ledger import Ledger
log = logging.getLogger(__name__)
_context: ContextVar[Tuple[Lbrycrd, mp.Queue, mp.Event]] = ContextVar('ctx')
class ProgressMonitorThread(Thread):
def ctx():
return _context.get()
STOP = 'stop'
FORMAT = '{l_bar}{bar}| {n_fmt:>6}/{total_fmt:>7} [{elapsed}<{remaining:>5}, {rate_fmt:>15}]'
def __init__(self, state: dict, queue: Queue, stream_controller: StreamController):
super().__init__()
self.state = state
self.queue = queue
self.stream_controller = stream_controller
self.loop = asyncio.get_event_loop()
def initialize(url: str, ledger: Ledger, progress: mp.Queue, stop: mp.Event, track_metrics=False):
chain = Lbrycrd(ledger)
chain.db.sync_open()
_context.set((chain, progress, stop))
queries.initialize(url=url, ledger=ledger, track_metrics=track_metrics)
def run(self):
asyncio.set_event_loop(self.loop)
while True:
msg = self.queue.get()
if msg == self.STOP:
def process_block_file(block_file_number):
chain, progress, stop = ctx()
block_file_path = chain.get_block_file_path_from_number(block_file_number)
num = 0
progress.put_nowait((block_file_number, 1, num))
best_height = queries.get_best_height()
best_block_processed = -1
collector = queries.RowCollector(queries.ctx())
with open(block_file_path, 'rb') as fp:
stream = BCDataStream(fp=fp)
for num, block_info in enumerate(chain.db.sync_get_file_details(block_file_number), start=1):
if stop.is_set():
return
self.stream_controller.add(msg)
def shutdown(self):
self.queue.put(self.STOP)
self.join()
def __enter__(self):
self.start()
def __exit__(self, exc_type, exc_val, exc_tb):
self.shutdown()
if num % 100 == 0:
progress.put_nowait((block_file_number, 1, num))
fp.seek(block_info['data_offset'])
block = Block.from_data_stream(stream, block_info['height'], block_file_number)
if block.height <= best_height:
continue
best_block_processed = max(block.height, best_block_processed)
collector.add_block(block)
collector.save(lambda remaining, total: progress.put((block_file_number, 2, remaining, total)))
return best_block_processed
class BlockchainSync:
def process_claimtrie():
execute = queries.ctx().execute
chain, progress, stop = ctx()
def __init__(self, chain: Lbrycrd, db: Database, use_process_pool=False):
execute(Claimtrie.delete())
for record in chain.db.sync_get_claimtrie():
execute(
Claimtrie.insert(), {
'normalized': record['normalized'],
'claim_hash': record['claim_hash'],
'last_take_over_height': record['last_take_over_height'],
}
)
best_height = queries.get_best_height()
for record in chain.db.sync_get_claims():
execute(
Claim.update()
.where(Claim.c.claim_hash == record['claim_hash'])
.values(
activation_height=record['activation_height'],
expiration_height=record['expiration_height']
)
)
support = TXO.alias('support')
effective_amount_update = (
Claim.update()
.where(Claim.c.activation_height <= best_height)
.values(
effective_amount=(
select(func.coalesce(func.sum(support.c.amount), 0) + Claim.c.amount)
.select_from(support).where(
(support.c.claim_hash == Claim.c.claim_hash) &
(support.c.txo_type == TXO_TYPES['support']) &
(support.c.txo_hash.notin_(select(TXI.c.txo_hash)))
).scalar_subquery()
)
)
)
execute(effective_amount_update)
def process_block_and_tx_filters():
execute = queries.ctx().execute
blocks = []
for block in queries.get_blocks_without_filters():
block_filter = create_block_filter(
{r['address'] for r in queries.get_block_tx_addresses(block_hash=block['block_hash'])}
)
blocks.append({'pk': block['block_hash'], 'block_filter': block_filter})
execute(BlockTable.update().where(BlockTable.c.block_hash == bindparam('pk')), blocks)
txs = []
for tx in queries.get_transactions_without_filters():
tx_filter = create_block_filter(
{r['address'] for r in queries.get_block_tx_addresses(tx_hash=tx['tx_hash'])}
)
txs.append({'pk': tx['tx_hash'], 'tx_filter': tx_filter})
execute(TX.update().where(TX.c.tx_hash == bindparam('pk')), txs)
class BlockchainSync(Sync):
def __init__(self, service: Service, chain: Lbrycrd, multiprocess=False):
super().__init__(service)
self.chain = chain
self.db = db
self.use_process_pool = use_process_pool
self._on_progress_controller = StreamController()
self.message_queue = mp.Queue()
self.stop_event = mp.Event()
self.on_block_subscription: Optional[BroadcastSubscription] = None
self.advance_loop_task: Optional[asyncio.Task] = None
self.advance_loop_event = asyncio.Event()
self.executor = self._create_executor(multiprocess)
self._on_progress_controller = EventController()
self.on_progress = self._on_progress_controller.stream
def get_worker_pool(self, queue, full_stop) -> futures.Executor:
def _create_executor(self, multiprocess) -> Executor:
args = dict(
initializer=worker.initializer,
initargs=(self.chain.data_dir, self.chain.regtest, self.db.db_path, queue, full_stop)
initializer=initialize,
initargs=(
self.service.db.url, self.chain.ledger,
self.message_queue, self.stop_event
)
)
if not self.use_process_pool:
return futures.ThreadPoolExecutor(max_workers=1, **args)
return futures.ProcessPoolExecutor(max_workers=max(os.cpu_count()-1, 4), **args)
if multiprocess:
return ProcessPoolExecutor(
max_workers=max(os.cpu_count() - 1, 4), **args
)
else:
return ThreadPoolExecutor(
max_workers=1, **args
)
async def start(self):
await self.advance()
self.chain.subscribe()
self.advance_loop_task = asyncio.create_task(self.advance_loop())
self.on_block_subscription = self.chain.on_block.listen(
lambda e: self.advance_loop_event.set()
)
async def stop(self):
self.chain.unsubscribe()
if self.on_block_subscription is not None:
self.on_block_subscription.cancel()
self.stop_event.set()
self.advance_loop_task.cancel()
self.executor.shutdown()
async def load_blocks(self):
jobs = []
queue, full_stop = Queue(), Event()
executor = self.get_worker_pool(queue, full_stop)
files = list(await self.chain.get_block_files_not_synced())
state = {
file.file_number: {
'status': worker.PENDING,
'done_txs': 0,
'total_txs': file.txs,
'done_blocks': 0,
'total_blocks': file.blocks,
} for file in files
}
progress = EventQueuePublisher(queue, self._on_progress_controller)
progress.start()
tasks = []
for file in await self.chain.db.get_block_files():
tasks.append(asyncio.get_running_loop().run_in_executor(
self.executor, process_block_file, file['file_number']
))
done, pending = await asyncio.wait(
tasks, return_when=asyncio.FIRST_EXCEPTION
)
if pending:
self.stop_event.set()
for future in pending:
future.cancel()
return max(f.result() for f in done)
def cancel_all_the_things():
for job in jobs:
job.cancel()
full_stop.set()
for job in jobs:
exception = job.exception()
if exception is not None:
log.exception(exception)
raise exception
async def process_claims(self):
await asyncio.get_event_loop().run_in_executor(
self.executor, queries.process_claims_and_supports
)
try:
async def process_block_and_tx_filters(self):
await asyncio.get_event_loop().run_in_executor(
self.executor, process_block_and_tx_filters
)
for file in files:
jobs.append(executor.submit(worker.process_block_file, file.file_number))
async def process_claimtrie(self):
await asyncio.get_event_loop().run_in_executor(
self.executor, process_claimtrie
)
done, not_done = await asyncio.get_event_loop().run_in_executor(
None, futures.wait, jobs, None, futures.FIRST_EXCEPTION
)
if not_done:
cancel_all_the_things()
async def post_process(self):
await self.process_claims()
if self.service.conf.spv_address_filters:
await self.process_block_and_tx_filters()
await self.process_claimtrie()
except asyncio.CancelledError:
cancel_all_the_things()
raise
async def advance(self):
best_height = await self.load_blocks()
await self.post_process()
await self._on_block_controller.add(BlockEvent(best_height))
finally:
progress.stop()
executor.shutdown()
async def advance_loop(self):
while True:
await self.advance_loop_event.wait()
self.advance_loop_event.clear()
await self.advance()

View file

@ -0,0 +1,78 @@
import os
import sqlite3
import asyncio
from typing import List
from .block import Block
from .lbrycrd import Lbrycrd
def sync_create_lbrycrd_databases(dir_path: str):
for file_name, ddl in DDL.items():
connection = sqlite3.connect(os.path.join(dir_path, file_name))
connection.executescript(ddl)
connection.close()
async def create_lbrycrd_databases(dir_path: str):
await asyncio.get_running_loop().run_in_executor(
None, sync_create_lbrycrd_databases, dir_path
)
async def add_block_to_lbrycrd(chain: Lbrycrd, block: Block, takeovers: List[str]):
for tx in block.txs:
for txo in tx.outputs:
if txo.is_claim:
await insert_claim(chain, block, tx, txo)
if txo.id in takeovers:
await insert_takeover(chain, block, tx, txo)
async def insert_claim(chain, block, tx, txo):
await chain.db.execute("""
INSERT OR REPLACE INTO claim (
claimID, name, nodeName, txID, txN, originalHeight, updateHeight, validHeight,
activationHeight, expirationHeight, amount
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 10000, ?)
""", (
txo.claim_hash, txo.claim_name, txo.claim_name, tx.hash, txo.position,
block.height, block.height, block.height, block.height, txo.amount
)
)
async def insert_takeover(chain, block, tx, txo):
await chain.db.execute(
"INSERT INTO takeover (name) VALUES (?)",
(txo.claim_name,)
)
# These are extracted by opening each of lbrycrd latest sqlite databases and
# running '.schema' command.
DDL = {
'claims.sqlite': """
CREATE TABLE node (name BLOB NOT NULL PRIMARY KEY, parent BLOB REFERENCES node(name) DEFERRABLE INITIALLY DEFERRED, hash BLOB);
CREATE TABLE claim (claimID BLOB NOT NULL PRIMARY KEY, name BLOB NOT NULL, nodeName BLOB NOT NULL REFERENCES node(name) DEFERRABLE INITIALLY DEFERRED, txID BLOB NOT NULL, txN INTEGER NOT NULL, originalHeight INTEGER NOT NULL, updateHeight INTEGER NOT NULL, validHeight INTEGER NOT NULL, activationHeight INTEGER NOT NULL, expirationHeight INTEGER NOT NULL, amount INTEGER NOT NULL);
CREATE TABLE support (txID BLOB NOT NULL, txN INTEGER NOT NULL, supportedClaimID BLOB NOT NULL, name BLOB NOT NULL, nodeName BLOB NOT NULL, blockHeight INTEGER NOT NULL, validHeight INTEGER NOT NULL, activationHeight INTEGER NOT NULL, expirationHeight INTEGER NOT NULL, amount INTEGER NOT NULL, PRIMARY KEY(txID, txN));
CREATE TABLE takeover (name BLOB NOT NULL, height INTEGER NOT NULL, claimID BLOB, PRIMARY KEY(name, height DESC));
CREATE INDEX node_hash_len_name ON node (hash, LENGTH(name) DESC);
CREATE INDEX node_parent ON node (parent);
CREATE INDEX takeover_height ON takeover (height);
CREATE INDEX claim_activationHeight ON claim (activationHeight);
CREATE INDEX claim_expirationHeight ON claim (expirationHeight);
CREATE INDEX claim_nodeName ON claim (nodeName);
CREATE INDEX support_supportedClaimID ON support (supportedClaimID);
CREATE INDEX support_activationHeight ON support (activationHeight);
CREATE INDEX support_expirationHeight ON support (expirationHeight);
CREATE INDEX support_nodeName ON support (nodeName);
""",
'block_index.sqlite': """
CREATE TABLE block_file (file INTEGER NOT NULL PRIMARY KEY, blocks INTEGER NOT NULL, size INTEGER NOT NULL, undoSize INTEGER NOT NULL, heightFirst INTEGER NOT NULL, heightLast INTEGER NOT NULL, timeFirst INTEGER NOT NULL, timeLast INTEGER NOT NULL );
CREATE TABLE block_info (hash BLOB NOT NULL PRIMARY KEY, prevHash BLOB NOT NULL, height INTEGER NOT NULL, file INTEGER NOT NULL, dataPos INTEGER NOT NULL, undoPos INTEGER NOT NULL, txCount INTEGER NOT NULL, status INTEGER NOT NULL, version INTEGER NOT NULL, rootTxHash BLOB NOT NULL, rootTrieHash BLOB NOT NULL, time INTEGER NOT NULL, bits INTEGER NOT NULL, nonce INTEGER NOT NULL );
CREATE TABLE tx_to_block (txID BLOB NOT NULL PRIMARY KEY, file INTEGER NOT NULL, blockPos INTEGER NOT NULL, txPos INTEGER NOT NULL);
CREATE TABLE flag (name TEXT NOT NULL PRIMARY KEY, value INTEGER NOT NULL);
CREATE INDEX block_info_height ON block_info (height);
""",
}

View file

@ -1,9 +1,8 @@
import struct
import hashlib
import logging
import typing
from binascii import hexlify, unhexlify
from typing import List, Iterable, Optional, Tuple
from typing import List, Iterable, Optional
import ecdsa
from cryptography.hazmat.backends import default_backend
@ -13,7 +12,6 @@ from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
from cryptography.exceptions import InvalidSignature
from lbry.error import InsufficientFundsError
from lbry.crypto.hash import hash160, sha256
from lbry.crypto.base58 import Base58
from lbry.schema.url import normalize_name
@ -21,16 +19,10 @@ from lbry.schema.claim import Claim
from lbry.schema.purchase import Purchase
from .script import InputScript, OutputScript
from .constants import COIN, NULL_HASH32
from .bcd_data_stream import BCDataStream
from .hash import TXRef, TXRefImmutable
from .util import ReadOnlyList
if typing.TYPE_CHECKING:
from lbry.wallet.account import Account
from lbry.wallet.ledger import Ledger
from lbry.wallet.wallet import Wallet
log = logging.getLogger()
@ -190,20 +182,6 @@ class Input(InputOutput):
stream.write_uint32(self.sequence)
class OutputEffectiveAmountEstimator:
__slots__ = 'txo', 'txi', 'fee', 'effective_amount'
def __init__(self, ledger: 'Ledger', txo: 'Output') -> None:
self.txo = txo
self.txi = Input.spend(txo)
self.fee: int = self.txi.get_fee(ledger)
self.effective_amount: int = txo.amount - self.fee
def __lt__(self, other):
return self.effective_amount < other.effective_amount
class Output(InputOutput):
__slots__ = (
@ -283,18 +261,15 @@ class Output(InputOutput):
def get_address(self, ledger):
return ledger.hash160_to_address(self.pubkey_hash)
def get_estimator(self, ledger):
return OutputEffectiveAmountEstimator(ledger, self)
@classmethod
def pay_pubkey_hash(cls, amount, pubkey_hash):
return cls(amount, OutputScript.pay_pubkey_hash(pubkey_hash))
@classmethod
def deserialize_from(cls, stream):
def deserialize_from(cls, stream, offset):
return cls(
amount=stream.read_uint64(),
script=OutputScript(stream.read_string())
script=OutputScript(stream.read_string(), offset=offset+9)
)
def serialize_to(self, stream, alternate_script=None):
@ -525,7 +500,7 @@ class Transaction:
self.position = position
self._day = julian_day
if raw is not None:
self._deserialize()
self.deserialize()
@property
def is_broadcast(self):
@ -685,9 +660,10 @@ class Transaction:
stream.write_uint32(self.signature_hash_type(1)) # signature hash type: SIGHASH_ALL
return stream.get_bytes()
def _deserialize(self, stream=None):
def deserialize(self, stream=None):
if self._raw is not None or stream is not None:
stream = stream or BCDataStream(self._raw)
start = stream.tell()
self.version = stream.read_uint32()
input_count = stream.read_compact_size()
if input_count == 0:
@ -698,7 +674,7 @@ class Transaction:
])
output_count = stream.read_compact_size()
self._add(self._outputs, [
Output.deserialize_from(stream) for _ in range(output_count)
Output.deserialize_from(stream, stream.tell()-start) for _ in range(output_count)
])
if self.is_segwit_flag:
# drain witness portion of transaction
@ -710,180 +686,10 @@ class Transaction:
self.locktime = stream.read_uint32()
return self
@classmethod
def ensure_all_have_same_ledger_and_wallet(
cls, funding_accounts: Iterable['Account'],
change_account: 'Account' = None) -> Tuple['Ledger', 'Wallet']:
ledger = wallet = None
for account in funding_accounts:
if ledger is None:
ledger = account.ledger
wallet = account.wallet
if ledger != account.ledger:
raise ValueError(
'All funding accounts used to create a transaction must be on the same ledger.'
)
if wallet != account.wallet:
raise ValueError(
'All funding accounts used to create a transaction must be from the same wallet.'
)
if change_account is not None:
if change_account.ledger != ledger:
raise ValueError('Change account must use same ledger as funding accounts.')
if change_account.wallet != wallet:
raise ValueError('Change account must use same wallet as funding accounts.')
if ledger is None:
raise ValueError('No ledger found.')
if wallet is None:
raise ValueError('No wallet found.')
return ledger, wallet
@classmethod
async def create(cls, inputs: Iterable[Input], outputs: Iterable[Output],
funding_accounts: Iterable['Account'], change_account: 'Account',
sign: bool = True):
""" Find optimal set of inputs when only outputs are provided; add change
outputs if only inputs are provided or if inputs are greater than outputs. """
tx = cls() \
.add_inputs(inputs) \
.add_outputs(outputs)
ledger, _ = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
# value of the outputs plus associated fees
cost = (
tx.get_base_fee(ledger) +
tx.get_total_output_sum(ledger)
)
# value of the inputs less the cost to spend those inputs
payment = tx.get_effective_input_sum(ledger)
try:
for _ in range(5):
if payment < cost:
deficit = cost - payment
spendables = await ledger.get_spendable_utxos(deficit, funding_accounts)
if not spendables:
raise InsufficientFundsError()
payment += sum(s.effective_amount for s in spendables)
tx.add_inputs(s.txi for s in spendables)
cost_of_change = (
tx.get_base_fee(ledger) +
Output.pay_pubkey_hash(COIN, NULL_HASH32).get_fee(ledger)
)
if payment > cost:
change = payment - cost
if change > cost_of_change:
change_address = await change_account.change.get_or_create_usable_address()
change_hash160 = change_account.ledger.address_to_hash160(change_address)
change_amount = change - cost_of_change
change_output = Output.pay_pubkey_hash(change_amount, change_hash160)
change_output.is_internal_transfer = True
tx.add_outputs([Output.pay_pubkey_hash(change_amount, change_hash160)])
if tx._outputs:
break
# this condition and the outer range(5) loop cover an edge case
# whereby a single input is just enough to cover the fee and
# has some change left over, but the change left over is less
# than the cost_of_change: thus the input is completely
# consumed and no output is added, which is an invalid tx.
# to be able to spend this input we must increase the cost
# of the TX and run through the balance algorithm a second time
# adding an extra input and change output, making tx valid.
# we do this 5 times in case the other UTXOs added are also
# less than the fee, after 5 attempts we give up and go home
cost += cost_of_change + 1
if sign:
await tx.sign(funding_accounts)
except Exception as e:
log.exception('Failed to create transaction:')
await ledger.release_tx(tx)
raise e
return tx
@staticmethod
def signature_hash_type(hash_type):
return hash_type
async def sign(self, funding_accounts: Iterable['Account']):
ledger, wallet = self.ensure_all_have_same_ledger_and_wallet(funding_accounts)
for i, txi in enumerate(self._inputs):
assert txi.script is not None
assert txi.txo_ref.txo is not None
txo_script = txi.txo_ref.txo.script
if txo_script.is_pay_pubkey_hash:
address = ledger.hash160_to_address(txo_script.values['pubkey_hash'])
private_key = await ledger.get_private_key_for_address(wallet, address)
assert private_key is not None, 'Cannot find private key for signing output.'
tx = self._serialize_for_signature(i)
txi.script.values['signature'] = \
private_key.sign(tx) + bytes((self.signature_hash_type(1),))
txi.script.values['pubkey'] = private_key.public_key.pubkey_bytes
txi.script.generate()
else:
raise NotImplementedError("Don't know how to spend this output.")
self._reset()
@classmethod
def pay(cls, amount: int, address: bytes, funding_accounts: List['Account'], change_account: 'Account'):
ledger, _ = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
output = Output.pay_pubkey_hash(amount, ledger.address_to_hash160(address))
return cls.create([], [output], funding_accounts, change_account)
@classmethod
def claim_create(
cls, name: str, claim: Claim, amount: int, holding_address: str,
funding_accounts: List['Account'], change_account: 'Account', signing_channel: Output = None):
ledger, _ = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
claim_output = Output.pay_claim_name_pubkey_hash(
amount, name, claim, ledger.address_to_hash160(holding_address)
)
if signing_channel is not None:
claim_output.sign(signing_channel, b'placeholder txid:nout')
return cls.create([], [claim_output], funding_accounts, change_account, sign=False)
@classmethod
def claim_update(
cls, previous_claim: Output, claim: Claim, amount: int, holding_address: str,
funding_accounts: List['Account'], change_account: 'Account', signing_channel: Output = None):
ledger, _ = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
updated_claim = Output.pay_update_claim_pubkey_hash(
amount, previous_claim.claim_name, previous_claim.claim_id,
claim, ledger.address_to_hash160(holding_address)
)
if signing_channel is not None:
updated_claim.sign(signing_channel, b'placeholder txid:nout')
else:
updated_claim.clear_signature()
return cls.create(
[Input.spend(previous_claim)], [updated_claim], funding_accounts, change_account, sign=False
)
@classmethod
def support(cls, claim_name: str, claim_id: str, amount: int, holding_address: str,
funding_accounts: List['Account'], change_account: 'Account'):
ledger, _ = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
support_output = Output.pay_support_pubkey_hash(
amount, claim_name, claim_id, ledger.address_to_hash160(holding_address)
)
return cls.create([], [support_output], funding_accounts, change_account)
@classmethod
def purchase(cls, claim_id: str, amount: int, merchant_address: bytes,
funding_accounts: List['Account'], change_account: 'Account'):
ledger, _ = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
payment = Output.pay_pubkey_hash(amount, ledger.address_to_hash160(merchant_address))
data = Output.add_purchase_data(Purchase(claim_id))
return cls.create([], [payment, data], funding_accounts, change_account)
@property
def my_inputs(self):
for txi in self.inputs:

View file

@ -1,6 +1,6 @@
import re
from typing import TypeVar, Sequence, Optional
from .constants import COIN
from lbry.constants import COIN
def coins_to_satoshis(coins):
@ -40,18 +40,6 @@ def subclass_tuple(name, base):
return type(name, (base,), {'__slots__': ()})
class cachedproperty:
def __init__(self, f):
self.f = f
def __get__(self, obj, objtype):
obj = obj or objtype
value = self.f(obj)
setattr(obj, self.f.__name__, value)
return value
class ArithUint256:
# https://github.com/bitcoin/bitcoin/blob/master/src/arith_uint256.cpp

View file

@ -1,106 +0,0 @@
from typing import Optional
from contextvars import ContextVar
from multiprocessing import Queue, Event
from dataclasses import dataclass
from itertools import islice
from lbry.wallet.bcd_data_stream import BCDataStream
from lbry.db import Database
from .lbrycrd import Lbrycrd
from .block import Block
PENDING = 'pending'
RUNNING = 'running'
STOPPED = 'stopped'
def chunk(rows, step):
it, total = iter(rows), len(rows)
for _ in range(0, total, step):
yield min(step, total), islice(it, step)
total -= step
@dataclass
class WorkerContext:
lbrycrd: Lbrycrd
db: Database
progress: Queue
stop: Event
context: ContextVar[Optional[WorkerContext]] = ContextVar('context')
def initializer(data_dir: str, regtest: bool, db_path: str, progress: Queue, stop: Event):
context.set(WorkerContext(
lbrycrd=Lbrycrd(data_dir, regtest),
db=Database(db_path).sync_open(),
progress=progress,
stop=stop
))
def process_block_file(block_file_number):
ctx: WorkerContext = context.get()
lbrycrd, db, progress, stop = ctx.lbrycrd, ctx.db, ctx.progress, ctx.stop
block_file_path = lbrycrd.get_block_file_path_from_number(block_file_number)
num = 0
progress.put_nowait((block_file_number, 1, num))
with open(block_file_path, 'rb') as fp:
stream = BCDataStream(fp=fp)
blocks, txs, claims, supports, spends = [], [], [], [], []
for num, block_info in enumerate(lbrycrd.db.get_file_details(block_file_number), start=1):
if stop.is_set():
return
if num % 100 == 0:
progress.put_nowait((block_file_number, 1, num))
fp.seek(block_info['data_offset'])
block = Block(stream)
for tx in block.txs:
txs.append((block.block_hash, tx.position, tx.hash))
for txi in tx.inputs:
if not txi.is_coinbase:
spends.append((block.block_hash, tx.hash, txi.txo_ref.hash))
for output in tx.outputs:
try:
if output.is_support:
supports.append((
block.block_hash, tx.hash, output.ref.hash, output.claim_hash, output.amount
))
elif output.script.is_claim_name:
claims.append((
block.block_hash, tx.hash, tx.position, output.ref.hash, output.claim_hash,
output.claim_name, 1, output.amount, None, None
))
elif output.script.is_update_claim:
claims.append((
block.block_hash, tx.hash, tx.position, output.ref.hash, output.claim_hash,
output.claim_name, 2, output.amount, None, None
))
except Exception:
pass
blocks.append(
(block.block_hash, block.prev_block_hash, block_file_number, 0 if block.is_first_block else None)
)
progress.put((block_file_number, 1, num))
queries = (
("insert into block values (?, ?, ?, ?)", blocks),
("insert into tx values (?, ?, ?)", txs),
("insert into txi values (?, ?, ?)", spends),
("insert into support values (?, ?, ?, ?, ?)", supports),
("insert into claim_history values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", claims),
)
total_txs = len(txs)
done_txs = 0
step = int(sum(len(q[1]) for q in queries)/total_txs)
progress.put((block_file_number, 2, done_txs))
for sql, rows in queries:
for chunk_size, chunk_rows in chunk(rows, 10000):
db.sync_executemany(sql, chunk_rows)
done_txs += int(chunk_size/step)
progress.put((block_file_number, 2, done_txs))
progress.put((block_file_number, 2, total_txs))

View file

@ -1,35 +1,40 @@
import os
import time
import asyncio
import logging
from unittest import skip
from binascii import unhexlify, hexlify
import shutil
import tempfile
from binascii import hexlify, unhexlify
from random import choice
from lbry.testcase import AsyncioTestCase
from lbry.crypto.base58 import Base58
from lbry.blockchain import Lbrycrd, BlockchainSync
from lbry.conf import Config
from lbry.db import Database
from lbry.blockchain.block import Block
from lbry.crypto.base58 import Base58
from lbry.schema.claim import Stream
from lbry.wallet.transaction import Transaction, Output
from lbry.wallet.constants import CENT
from lbry.wallet.bcd_data_stream import BCDataStream
from lbry.blockchain.lbrycrd import Lbrycrd
from lbry.blockchain.dewies import dewies_to_lbc, lbc_to_dewies
from lbry.blockchain.transaction import Transaction, Output
from lbry.constants import CENT
from lbry.blockchain.ledger import RegTestLedger
from lbry.testcase import AsyncioTestCase
#logging.getLogger('lbry.blockchain').setLevel(logging.DEBUG)
log = logging.getLogger(__name__)
from lbry.service.full_node import FullNode
from lbry.service.light_client import LightClient
from lbry.service.daemon import Daemon
from lbry.service.api import Client
@skip
class TestBlockchain(AsyncioTestCase):
class BlockchainTestCase(AsyncioTestCase):
async def asyncSetUp(self):
await super().asyncSetUp()
#self.chain = Lbrycrd.temp_regtest()
self.chain = Lbrycrd('/tmp/tmp0429f0ku/', True)#.temp_regtest()
self.chain = Lbrycrd.temp_regtest()
self.ledger = self.chain.ledger
await self.chain.ensure()
await self.chain.start('-maxblockfilesize=8', '-rpcworkqueue=128')
self.addCleanup(self.chain.stop, False)
self.addCleanup(self.chain.stop)
class TestEvents(BlockchainTestCase):
async def test_block_event(self):
msgs = []
@ -50,29 +55,45 @@ class TestBlockchain(AsyncioTestCase):
res = await self.chain.generate(3)
await self.chain.on_block.where(lambda e: e['msg'] == 9)
self.assertEqual(3, len(res))
self.assertEqual([0, 1, 2, 3, 4, 7, 8, 9], msgs)
self.assertEqual([0, 1, 2, 3, 4, 7, 8, 9], msgs) # 5, 6 "missed"
async def test_sync(self):
if False:
names = ['one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'ten']
await self.chain.generate(101)
address = Base58.decode(await self.chain.get_new_address())
for _ in range(190):
tx = Transaction().add_outputs([
Output.pay_claim_name_pubkey_hash(
CENT, f'{choice(names)}{i}',
Stream().update(
title='a claim title',
description='Lorem ipsum '*400,
tags=['crypto', 'health', 'space'],
).claim,
address)
for i in range(1, 20)
])
funded = await self.chain.fund_raw_transaction(hexlify(tx.raw).decode())
signed = await self.chain.sign_raw_transaction_with_wallet(funded['hex'])
await self.chain.send_raw_transaction(signed['hex'])
await self.chain.generate(1)
class TestBlockchainSync(BlockchainTestCase):
async def asyncSetUp(self):
await super().asyncSetUp()
self.service = FullNode(
self.chain.ledger, f'sqlite:///{self.chain.data_dir}/lbry.db', self.chain
)
self.service.conf.spv_address_filters = False
self.sync = self.service.sync
self.db = self.service.db
await self.db.open()
self.addCleanup(self.db.close)
async def test_multi_block_file_sync(self):
names = ['one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'ten']
await self.chain.generate(101)
address = Base58.decode(await self.chain.get_new_address())
start = time.perf_counter()
for _ in range(190):
tx = Transaction().add_outputs([
Output.pay_claim_name_pubkey_hash(
CENT, f'{choice(names)}{i}',
Stream().update(
title='a claim title',
description='Lorem ipsum '*400,
tags=['crypto', 'health', 'space'],
).claim,
address)
for i in range(1, 20)
])
funded = await self.chain.fund_raw_transaction(hexlify(tx.raw).decode())
signed = await self.chain.sign_raw_transaction_with_wallet(funded['hex'])
await self.chain.send_raw_transaction(signed['hex'])
await self.chain.generate(1)
print(f'generating {190*20} transactions took {time.perf_counter()-start}s')
self.assertEqual(
[(0, 191, 280), (1, 89, 178), (2, 12, 24)],
@ -81,9 +102,410 @@ class TestBlockchain(AsyncioTestCase):
)
self.assertEqual(191, len(await self.chain.get_file_details(0)))
db = Database(os.path.join(self.chain.actual_data_dir, 'lbry.db'))
self.addCleanup(db.close)
await db.open()
await self.sync.advance()
sync = BlockchainSync(self.chain, use_process_pool=False)
await sync.load_blocks()
class FullNodeTestCase(BlockchainTestCase):
async def asyncSetUp(self):
await super().asyncSetUp()
self.current_height = 0
await self.generate(101, wait=False)
self.service = FullNode(self.ledger, f'sqlite:///{self.chain.data_dir}/lbry.db')
self.service.conf.spv_address_filters = False
self.sync = self.service.sync
self.db = self.service.db
self.daemon = Daemon(self.service)
self.api = self.daemon.api
self.addCleanup(self.daemon.stop)
await self.daemon.start()
if False: #os.environ.get('TEST_LBRY_API', 'light_client') == 'light_client':
light_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, light_dir, True)
ledger = RegTestLedger(Config(
data_dir=light_dir,
wallet_dir=light_dir,
api='localhost:5389',
))
self.light_client = self.service = LightClient(
ledger, f'sqlite:///{light_dir}/light_client.db'
)
self.light_api = Daemon(self.service)
await self.light_api.start()
self.addCleanup(self.light_api.stop)
#else:
# self.service = self.full_node
#self.client = Client(self.service, self.ledger.conf.api_connection_url)
async def generate(self, blocks, wait=True):
block_hashes = await self.chain.generate(blocks)
self.current_height += blocks
if wait:
await self.service.sync.on_block.where(
lambda b: self.current_height == b.height
)
return block_hashes
class TestFullNode(FullNodeTestCase):
async def test_foo(self):
await self.generate(10)
wallet = self.service.wallet_manager.default_wallet #create_wallet('test_wallet')
account = wallet.accounts[0]
addresses = await account.ensure_address_gap()
await self.chain.send_to_address(addresses[0], '5.0')
await self.generate(1)
self.assertEqual(await account.get_balance(), lbc_to_dewies('5.0'))
#self.assertEqual((await self.client.account_balance())['total'], '5.0')
tx = await wallet.create_channel('@foo', lbc_to_dewies('1.0'), account, [account], addresses[0])
await self.service.broadcast(tx)
await self.generate(1)
channels = await wallet.get_channels()
print(channels)
class TestClaimtrieSync(FullNodeTestCase):
async def asyncSetUp(self):
await super().asyncSetUp()
self.last_block_hash = None
self.address = await self.chain.get_new_address()
def find_claim_txo(self, tx):
for txo in tx.outputs:
if txo.is_claim:
return txo
async def get_transaction(self, txid):
raw = await self.chain.get_raw_transaction(txid)
return Transaction(unhexlify(raw))
async def claim_name(self, title, amount):
claim = Stream().update(title=title).claim
return await self.chain.claim_name(
'foo', hexlify(claim.to_bytes()).decode(), amount
)
async def claim_update(self, tx, amount):
claim = self.find_claim_txo(tx).claim
return await self.chain.update_claim(
tx.outputs[0].tx_ref.id, hexlify(claim.to_bytes()).decode(), amount
)
async def claim_abandon(self, tx):
return await self.chain.abandon_claim(tx.id, self.address)
async def support_claim(self, tx, amount):
txo = self.find_claim_txo(tx)
response = await self.chain.support_claim(
txo.claim_name, txo.claim_id, amount
)
return response['txId']
async def advance(self, new_height, ops):
blocks = (new_height-self.current_height)-1
if blocks > 0:
await self.generate(blocks)
txs = []
for op in ops:
if len(op) == 3:
op_type, value, amount = op
else:
(op_type, value), amount = op, None
if op_type == 'claim':
txid = await self.claim_name(value, amount)
elif op_type == 'update':
txid = await self.claim_update(value, amount)
elif op_type == 'abandon':
txid = await self.claim_abandon(value)
elif op_type == 'support':
txid = await self.support_claim(value, amount)
else:
raise ValueError(f'"{op_type}" is unknown operation')
txs.append(await self.get_transaction(txid))
self.last_block_hash, = await self.generate(1)
self.current_height = new_height
return txs
async def get_last_block(self):
return await self.chain.get_block(self.last_block_hash)
async def get_controlling(self):
sql = f"""
select
tx.height, tx.raw, txo.position, effective_amount, activation_height
from claimtrie
join claim using (claim_hash)
join txo using (txo_hash)
join tx using (tx_hash)
where
txo.txo_type in (1, 2) and
expiration_height > {self.current_height}
"""
for claim in await self.db.execute_fetchall(sql):
tx = Transaction(claim['raw'], height=claim['height'])
txo = tx.outputs[claim['position']]
return (
txo.claim.stream.title, dewies_to_lbc(txo.amount),
dewies_to_lbc(claim['effective_amount']), claim['activation_height']
)
async def get_active(self):
controlling = await self.get_controlling()
active = []
sql = f"""
select tx.height, tx.raw, txo.position, effective_amount, activation_height
from txo
join tx using (tx_hash)
join claim using (claim_hash)
where
txo.txo_type in (1, 2) and
activation_height <= {self.current_height} and
expiration_height > {self.current_height}
"""
for claim in await self.db.execute_fetchall(sql):
tx = Transaction(claim['raw'], height=claim['height'])
txo = tx.outputs[claim['position']]
if controlling and controlling[0] == txo.claim.stream.title:
continue
active.append((
txo.claim.stream.title, dewies_to_lbc(txo.amount),
dewies_to_lbc(claim['effective_amount']), claim['activation_height']
))
return active
async def get_accepted(self):
accepted = []
sql = f"""
select tx.height, tx.raw, txo.position, effective_amount, activation_height
from txo
join tx using (tx_hash)
join claim using (claim_hash)
where
txo.txo_type in (1, 2) and
activation_height > {self.current_height} and
expiration_height > {self.current_height}
"""
for claim in await self.db.execute_fetchall(sql):
tx = Transaction(claim['raw'], height=claim['height'])
txo = tx.outputs[claim['position']]
accepted.append((
txo.claim.stream.title, dewies_to_lbc(txo.amount),
dewies_to_lbc(claim['effective_amount']), claim['activation_height']
))
return accepted
async def state(self, controlling=None, active=None, accepted=None):
self.assertEqual(controlling, await self.get_controlling())
self.assertEqual(active or [], await self.get_active())
self.assertEqual(accepted or [], await self.get_accepted())
async def test_example_from_spec(self):
# https://spec.lbry.com/#claim-activation-example
advance, state = self.advance, self.state
stream, = await advance(113, [('claim', 'Claim A', '10.0')])
await state(
controlling=('Claim A', '10.0', '10.0', 113),
active=[],
accepted=[]
)
await advance(501, [('claim', 'Claim B', '20.0')])
await state(
controlling=('Claim A', '10.0', '10.0', 113),
active=[],
accepted=[('Claim B', '20.0', '0.0', 513)]
)
await advance(510, [('support', stream, '14')])
await state(
controlling=('Claim A', '10.0', '24.0', 113),
active=[],
accepted=[('Claim B', '20.0', '0.0', 513)]
)
await advance(512, [('claim', 'Claim C', '50.0')])
await state(
controlling=('Claim A', '10.0', '24.0', 113),
active=[],
accepted=[
('Claim B', '20.0', '0.0', 513),
('Claim C', '50.0', '0.0', 524)]
)
await advance(513, [])
await state(
controlling=('Claim A', '10.0', '24.0', 113),
active=[('Claim B', '20.0', '20.0', 513)],
accepted=[('Claim C', '50.0', '0.0', 524)]
)
await advance(520, [('claim', 'Claim D', '60.0')])
await state(
controlling=('Claim A', '10.0', '24.0', 113),
active=[('Claim B', '20.0', '20.0', 513)],
accepted=[
('Claim C', '50.0', '0.0', 524),
('Claim D', '60.0', '0.0', 532)]
)
await advance(524, [])
await state(
controlling=('Claim D', '60.0', '60.0', 524),
active=[
('Claim A', '10.0', '24.0', 113),
('Claim B', '20.0', '20.0', 513),
('Claim C', '50.0', '50.0', 524)],
accepted=[]
)
# beyond example
await advance(525, [('update', stream, '70.0')])
await state(
controlling=('Claim A', '70.0', '84.0', 525),
active=[
('Claim B', '20.0', '20.0', 513),
('Claim C', '50.0', '50.0', 524),
('Claim D', '60.0', '60.0', 524),
],
accepted=[]
)
async def test_competing_claims_subsequent_blocks_height_wins(self):
advance, state = self.advance, self.state
await advance(113, [('claim', 'Claim A', '1.0')])
await state(
controlling=('Claim A', '1.0', '1.0', 113),
active=[],
accepted=[]
)
await advance(114, [('claim', 'Claim B', '1.0')])
await state(
controlling=('Claim A', '1.0', '1.0', 113),
active=[('Claim B', '1.0', '1.0', 114)],
accepted=[]
)
await advance(115, [('claim', 'Claim C', '1.0')])
await state(
controlling=('Claim A', '1.0', '1.0', 113),
active=[
('Claim B', '1.0', '1.0', 114),
('Claim C', '1.0', '1.0', 115)],
accepted=[]
)
async def test_competing_claims_in_single_block_position_wins(self):
claim_a, claim_b = await self.advance(113, [
('claim', 'Claim A', '1.0'),
('claim', 'Claim B', '1.0')
])
block = await self.get_last_block()
# order of tx in block is non-deterministic,
# figure out what ordered we ended up with
if block['tx'][1] == claim_a.id:
winner, other = 'Claim A', 'Claim B'
else:
winner, other = 'Claim B', 'Claim A'
await self.state(
controlling=(winner, '1.0', '1.0', 113),
active=[(other, '1.0', '1.0', 113)],
accepted=[]
)
async def test_competing_claims_in_single_block_effective_amount_wins(self):
await self.advance(113, [
('claim', 'Claim A', '1.0'),
('claim', 'Claim B', '2.0')
])
await self.state(
controlling=('Claim B', '2.0', '2.0', 113),
active=[('Claim A', '1.0', '1.0', 113)],
accepted=[]
)
async def test_winning_claim_deleted(self):
claim1, claim2 = await self.advance(113, [
('claim', 'Claim A', '1.0'),
('claim', 'Claim B', '2.0')
])
await self.state(
controlling=('Claim B', '2.0', '2.0', 113),
active=[('Claim A', '1.0', '1.0', 113)],
accepted=[]
)
await self.advance(114, [('abandon', claim2)])
await self.state(
controlling=('Claim A', '1.0', '1.0', 113),
active=[],
accepted=[]
)
async def test_winning_claim_deleted_and_new_claim_becomes_winner(self):
claim1, claim2 = await self.advance(113, [
('claim', 'Claim A', '1.0'),
('claim', 'Claim B', '2.0')
])
await self.state(
controlling=('Claim B', '2.0', '2.0', 113),
active=[('Claim A', '1.0', '1.0', 113)],
accepted=[]
)
await self.advance(115, [
('abandon', claim2),
('claim', 'Claim C', '3.0')
])
await self.state(
controlling=('Claim C', '3.0', '3.0', 115),
active=[('Claim A', '1.0', '1.0', 113)],
accepted=[]
)
async def test_winning_claim_expires_and_another_takes_over(self):
await self.advance(110, [('claim', 'Claim A', '2.0')])
await self.advance(120, [('claim', 'Claim B', '1.0')])
await self.state(
controlling=('Claim A', '2.0', '2.0', 110),
active=[('Claim B', '1.0', '1.0', 120)],
accepted=[]
)
await self.advance(610, [])
await self.state(
controlling=('Claim B', '1.0', '1.0', 120),
active=[],
accepted=[]
)
await self.advance(620, [])
await self.state(
controlling=None,
active=[],
accepted=[]
)
async def test_create_and_multiple_updates_in_same_block(self):
await self.chain.generate(10)
txid = await self.claim_name('Claim A', '1.0')
txid = await self.claim_update(await self.get_transaction(txid), '2.0')
await self.claim_update(await self.get_transaction(txid), '3.0')
await self.chain.generate(1)
await self.sync.advance()
self.current_height += 11
await self.state(
controlling=('Claim A', '3.0', '3.0', 112),
active=[],
accepted=[]
)
async def test_create_and_abandon_in_same_block(self):
await self.chain.generate(10)
txid = await self.claim_name('Claim A', '1.0')
await self.claim_abandon(await self.get_transaction(txid))
await self.chain.generate(1)
await self.sync.advance()
self.current_height += 11
await self.state(
controlling=None,
active=[],
accepted=[]
)

View file

@ -9,8 +9,8 @@ from lbry.error import InsufficientFundsError
from lbry.extras.daemon.daemon import DEFAULT_PAGE_SIZE
from lbry.testcase import CommandTestCase
from lbry.wallet.transaction import Transaction
from lbry.wallet.util import satoshis_to_coins as lbc
from lbry.blockchain.transaction import Transaction
from lbry.blockchain.util import satoshis_to_coins as lbc
log = logging.getLogger(__name__)
@ -142,7 +142,6 @@ class ClaimSearchCommand(ClaimTestCase):
await self.assertFindsClaims([signed2], channel_ids=[channel_id2, self.channel_id],
valid_channel_signature=True, invalid_channel_signature=False)
# invalid signature still returns channel_id
self.ledger._tx_cache.clear()
invalid_claims = await self.claim_search(invalid_channel_signature=True, has_channel_signature=True)
self.assertEqual(3, len(invalid_claims))
self.assertTrue(all([not c['is_channel_signature_valid'] for c in invalid_claims]))
@ -234,7 +233,7 @@ class ClaimSearchCommand(ClaimTestCase):
await self.assertFindsClaims([claim4, claim3, claim2], all_tags=['abc'], any_tags=['def', 'ghi'])
async def test_order_by(self):
height = self.ledger.network.remote_height
height = self.ledger.sync.network.remote_height
claims = [await self.stream_create(f'claim{i}') for i in range(5)]
await self.assertFindsClaims(claims, order_by=["^height"])
@ -820,7 +819,7 @@ class ChannelCommands(CommandTestCase):
async def test_create_channel_names(self):
# claim new name
await self.channel_create('@foo')
self.assertItemCount(await self.daemon.jsonrpc_channel_list(), 1)
self.assertItemCount(await self.api.channel_list(), 1)
await self.assertBalance(self.account, '8.991893')
# fail to claim duplicate
@ -832,12 +831,12 @@ class ChannelCommands(CommandTestCase):
await self.channel_create('foo')
# nothing's changed after failed attempts
self.assertItemCount(await self.daemon.jsonrpc_channel_list(), 1)
self.assertItemCount(await self.api.channel_list(), 1)
await self.assertBalance(self.account, '8.991893')
# succeed overriding duplicate restriction
await self.channel_create('@foo', allow_duplicate_name=True)
self.assertItemCount(await self.daemon.jsonrpc_channel_list(), 2)
self.assertItemCount(await self.api.channel_list(), 2)
await self.assertBalance(self.account, '7.983786')
async def test_channel_bids(self):

View file

@ -4,7 +4,7 @@ from unittest.mock import Mock
from binascii import unhexlify
import lbry
from lbry.wallet.network import Network
from lbry.service.network import Network
from lbry.wallet.orchstr8.node import SPVNode
from lbry.wallet.rpc import RPCSession
from lbry.testcase import IntegrationTestCase, AsyncioTestCase

View file

@ -1,8 +1,16 @@
import os
import asyncio
import logging
from lbry.testcase import IntegrationTestCase, WalletNode
import aiohttp
from sqlalchemy import text
from lbry.testcase import IntegrationTestCase, WalletNode, CommandTestCase
from lbry.constants import CENT
from lbry.wallet import WalletManager, RegTestLedger, Transaction, Output
from lbry.blockchain import Lbrycrd
from lbry.db import Database, TXI
from lbry.blockchain import Synchronizer
class SyncTests(IntegrationTestCase):

View file

@ -80,7 +80,7 @@ class BasicTransactionTests(IntegrationTestCase):
async def test_sending_and_receiving(self):
account1, account2 = self.account, self.wallet.generate_account(self.ledger)
await self.ledger.subscribe_account(account2)
await self.ledger.sync.subscribe_account(account2)
await self.assertBalance(account1, '0.0')
await self.assertBalance(account2, '0.0')
@ -151,8 +151,8 @@ class BasicTransactionTests(IntegrationTestCase):
for batch in range(0, len(sends), 10):
txids = await asyncio.gather(*sends[batch:batch + 10])
await asyncio.wait([self.on_transaction_id(txid) for txid in txids])
remote_status = await self.ledger.network.subscribe_address(address)
self.assertTrue(await self.ledger.update_history(address, remote_status))
remote_status = await self.ledger.sync.network.subscribe_address(address)
self.assertTrue(await self.ledger.sync.update_history(address, remote_status))
# 20 unconfirmed txs, 10 from blockchain, 10 from local to local
utxos = await self.account.get_utxos()
txs = []
@ -165,11 +165,11 @@ class BasicTransactionTests(IntegrationTestCase):
await self.broadcast(tx)
txs.append(tx)
await asyncio.wait([self.on_transaction_address(tx, address) for tx in txs], timeout=1)
remote_status = await self.ledger.network.subscribe_address(address)
self.assertTrue(await self.ledger.update_history(address, remote_status))
remote_status = await self.ledger.sync.network.subscribe_address(address)
self.assertTrue(await self.ledger.sync.update_history(address, remote_status))
# server history grows unordered
txid = await self.blockchain.send_to_address(address, 1)
await self.on_transaction_id(txid)
self.assertTrue(await self.ledger.update_history(address, remote_status))
self.assertEqual(21, len((await self.ledger.get_local_status_and_history(address))[1]))
self.assertTrue(await self.ledger.sync.update_history(address, remote_status))
self.assertEqual(21, len((await self.ledger.sync.get_local_status_and_history(address))[1]))
self.assertEqual(0, len(self.ledger._known_addresses_out_of_sync))

View file

@ -3,7 +3,7 @@ import asyncio
import lbry
import lbry.wallet
from lbry.error import ServerPaymentFeeAboveMaxAllowedError
from lbry.wallet.network import ClientSession
from lbry.service.network import ClientSession
from lbry.testcase import IntegrationTestCase, CommandTestCase
from lbry.wallet.orchstr8.node import SPVNode

View file

@ -1,9 +1,9 @@
import unittest
from unittest import TestCase
from lbry.wallet.bcd_data_stream import BCDataStream
from lbry.blockchain.bcd_data_stream import BCDataStream
class TestBCDataStream(unittest.TestCase):
class TestBCDataStream(TestCase):
def test_write_read(self):
s = BCDataStream()

View file

@ -1,7 +1,7 @@
import unittest
from binascii import hexlify, unhexlify
from lbry.wallet.claim_proofs import get_hash_for_outpoint, verify_proof
from lbry.blockchain.claim_proofs import get_hash_for_outpoint, verify_proof
from lbry.crypto.hash import double_sha256

View file

@ -1,6 +1,6 @@
import unittest
from lbry.wallet.dewies import lbc_to_dewies as l2d, dewies_to_lbc as d2l
from lbry.blockchain.dewies import lbc_to_dewies as l2d, dewies_to_lbc as d2l
class TestDeweyConversion(unittest.TestCase):

View file

@ -3,9 +3,9 @@ import asyncio
import tempfile
from binascii import unhexlify
from lbry.wallet.util import ArithUint256
from lbry.testcase import AsyncioTestCase
from lbry.wallet.ledger import Headers as _Headers
from lbry.blockchain.util import ArithUint256
from lbry.blockchain.ledger import Headers as _Headers
class Headers(_Headers):
@ -168,9 +168,9 @@ class TestHeaders(AsyncioTestCase):
await headers.open()
self.assertEqual(
cm.output, [
'WARNING:lbry.wallet.header:Reader file size doesnt match header size. '
'WARNING:lbry.blockchain.header:Reader file size doesnt match header size. '
'Repairing, might take a while.',
'WARNING:lbry.wallet.header:Header file corrupted at height 9, truncating '
'WARNING:lbry.blockchain.header:Header file corrupted at height 9, truncating '
'it.'
]
)

View file

@ -1,8 +1,8 @@
import unittest
from binascii import hexlify, unhexlify
from lbry.wallet.bcd_data_stream import BCDataStream
from lbry.wallet.script import (
from lbry.blockchain.bcd_data_stream import BCDataStream
from lbry.blockchain.script import (
InputScript, OutputScript, Template, ParseError, tokenize, push_data,
PUSH_SINGLE, PUSH_INTEGER, PUSH_MANY, OP_HASH160, OP_EQUAL
)

View file

@ -0,0 +1,523 @@
import tempfile
import ecdsa
import hashlib
from binascii import hexlify
from typing import List, Tuple
from lbry.testcase import AsyncioTestCase, get_output
from lbry.conf import Config
from lbry.db import RowCollector
from lbry.schema.claim import Claim
from lbry.schema.result import Censor
from lbry.blockchain.block import Block
from lbry.constants import COIN
from lbry.blockchain.transaction import Transaction, Input, Output
from lbry.service.full_node import FullNode
from lbry.blockchain.ledger import Ledger
from lbry.blockchain.lbrycrd import Lbrycrd
from lbry.blockchain.testing import create_lbrycrd_databases, add_block_to_lbrycrd
def get_input(fuzz=1):
return Input.spend(get_output(COIN, fuzz.to_bytes(32, 'little')))
def get_tx(fuzz=1):
return Transaction().add_inputs([get_input(fuzz)])
def search(**constraints) -> List:
return reader.search_claims(Censor(), **constraints)
def censored_search(**constraints) -> Tuple[List, Censor]:
rows, _, _, _, censor = reader.search(constraints)
return rows, censor
class TestSQLDB(AsyncioTestCase):
async def asyncSetUp(self):
await super().asyncSetUp()
self.chain = Lbrycrd(Ledger(Config.with_same_dir(tempfile.mkdtemp())))
self.addCleanup(self.chain.cleanup)
await create_lbrycrd_databases(self.chain.actual_data_dir)
await self.chain.open()
self.addCleanup(self.chain.close)
self.service = FullNode(
self.chain.ledger, f'sqlite:///{self.chain.data_dir}/lbry.db', self.chain
)
self.service.conf.spv_address_filters = False
self.db = self.service.db
self.addCleanup(self.db.close)
await self.db.open()
self._txos = {}
async def advance(self, height, txs, takeovers=None):
block = Block(
height=height, version=1, file_number=0,
block_hash=f'beef{height}'.encode(), prev_block_hash=f'beef{height-1}'.encode(),
merkle_root=b'beef', claim_trie_root=b'beef',
timestamp=99, bits=1, nonce=1, txs=txs
)
await add_block_to_lbrycrd(self.chain, block, takeovers or [])
await RowCollector(self.db).add_block(block).save()
await self.service.sync.post_process()
return [tx.outputs[0] for tx in txs]
def _make_tx(self, output, txi=None, **kwargs):
tx = get_tx(**kwargs).add_outputs([output])
if txi is not None:
tx.add_inputs([txi])
self._txos[output.ref.hash] = output
return tx
def _set_channel_key(self, channel, key):
private_key = ecdsa.SigningKey.from_string(key*32, curve=ecdsa.SECP256k1, hashfunc=hashlib.sha256)
channel.private_key = private_key
channel.claim.channel.public_key_bytes = private_key.get_verifying_key().to_der()
channel.script.generate()
def get_channel(self, title, amount, name='@foo', key=b'a', **kwargs):
claim = Claim()
claim.channel.title = title
channel = Output.pay_claim_name_pubkey_hash(amount, name, claim, b'abc')
self._set_channel_key(channel, key)
return self._make_tx(channel, **kwargs)
def get_channel_update(self, channel, amount, key=b'a'):
self._set_channel_key(channel, key)
return self._make_tx(
Output.pay_update_claim_pubkey_hash(
amount, channel.claim_name, channel.claim_id, channel.claim, b'abc'
),
Input.spend(channel)
)
def get_stream(self, title, amount, name='foo', channel=None, **kwargs):
claim = Claim()
claim.stream.update(title=title, **kwargs)
result = self._make_tx(Output.pay_claim_name_pubkey_hash(amount, name, claim, b'abc'))
if channel:
result.outputs[0].sign(channel)
result._reset()
return result
def get_stream_update(self, tx, amount, channel=None):
stream = Transaction(tx[0].raw).outputs[0]
result = self._make_tx(
Output.pay_update_claim_pubkey_hash(
amount, stream.claim_name, stream.claim_id, stream.claim, b'abc'
),
Input.spend(stream)
)
if channel:
result.outputs[0].sign(channel)
result._reset()
return result
def get_repost(self, claim_id, amount, channel):
claim = Claim()
claim.repost.reference.claim_id = claim_id
result = self._make_tx(Output.pay_claim_name_pubkey_hash(amount, 'repost', claim, b'abc'))
result.outputs[0].sign(channel)
result._reset()
return result
def get_abandon(self, tx):
claim = Transaction(tx[0].raw).outputs[0]
return self._make_tx(
Output.pay_pubkey_hash(claim.amount, b'abc'),
Input.spend(claim)
)
def get_support(self, tx, amount):
claim = Transaction(tx[0].raw).outputs[0]
return self._make_tx(
Output.pay_support_pubkey_hash(
amount, claim.claim_name, claim.claim_id, b'abc'
)
)
class TestClaimtrie(TestSQLDB):
def setUp(self):
super().setUp()
self._input_counter = 1
def _get_x_with_claim_id_prefix(self, getter, prefix, cached_iteration=None, **kwargs):
iterations = cached_iteration+1 if cached_iteration else 100
for i in range(cached_iteration or 1, iterations):
stream = getter(f'claim #{i}', COIN, fuzz=self._input_counter, **kwargs)
if stream.outputs[0].claim_id.startswith(prefix):
cached_iteration is None and print(f'Found "{prefix}" in {i} iterations.')
self._input_counter += 1
return stream
if cached_iteration:
raise ValueError(f'Failed to find "{prefix}" at cached iteration, run with None to find iteration.')
raise ValueError(f'Failed to find "{prefix}" in {iterations} iterations, try different values.')
def get_channel_with_claim_id_prefix(self, prefix, cached_iteration=None, **kwargs):
return self._get_x_with_claim_id_prefix(self.get_channel, prefix, cached_iteration, **kwargs)
def get_stream_with_claim_id_prefix(self, prefix, cached_iteration=None, **kwargs):
return self._get_x_with_claim_id_prefix(self.get_stream, prefix, cached_iteration, **kwargs)
async def test_canonical_url_and_channel_validation(self):
advance, search = self.advance, partial(self.service.search_claims, [])
tx_chan_a = self.get_channel_with_claim_id_prefix('a', 1, key=b'c')
tx_chan_ab = self.get_channel_with_claim_id_prefix('ab', 20, key=b'c')
txo_chan_a = tx_chan_a.outputs[0]
txo_chan_ab = tx_chan_ab.outputs[0]
await advance(1, [tx_chan_a])
await advance(2, [tx_chan_ab])
(r_ab, r_a) = search(order_by=['creation_height'], limit=2)
self.assertEqual("@foo#a", r_a['short_url'])
self.assertEqual("@foo#ab", r_ab['short_url'])
self.assertIsNone(r_a['canonical_url'])
self.assertIsNone(r_ab['canonical_url'])
self.assertEqual(0, r_a['claims_in_channel'])
self.assertEqual(0, r_ab['claims_in_channel'])
tx_a = self.get_stream_with_claim_id_prefix('a', 2)
tx_ab = self.get_stream_with_claim_id_prefix('ab', 42)
tx_abc = self.get_stream_with_claim_id_prefix('abc', 65)
await advance(3, [tx_a])
await advance(4, [tx_ab, tx_abc])
(r_abc, r_ab, r_a) = search(order_by=['creation_height', 'tx_position'], limit=3)
self.assertEqual("foo#a", r_a['short_url'])
self.assertEqual("foo#ab", r_ab['short_url'])
self.assertEqual("foo#abc", r_abc['short_url'])
self.assertIsNone(r_a['canonical_url'])
self.assertIsNone(r_ab['canonical_url'])
self.assertIsNone(r_abc['canonical_url'])
tx_a2 = self.get_stream_with_claim_id_prefix('a', 7, channel=txo_chan_a)
tx_ab2 = self.get_stream_with_claim_id_prefix('ab', 23, channel=txo_chan_a)
a2_claim = tx_a2.outputs[0]
ab2_claim = tx_ab2.outputs[0]
await advance(6, [tx_a2])
await advance(7, [tx_ab2])
(r_ab2, r_a2) = search(order_by=['creation_height'], limit=2)
self.assertEqual(f"foo#{a2_claim.claim_id[:2]}", r_a2['short_url'])
self.assertEqual(f"foo#{ab2_claim.claim_id[:4]}", r_ab2['short_url'])
self.assertEqual("@foo#a/foo#a", r_a2['canonical_url'])
self.assertEqual("@foo#a/foo#ab", r_ab2['canonical_url'])
self.assertEqual(2, search(claim_id=txo_chan_a.claim_id, limit=1)[0]['claims_in_channel'])
# change channel public key, invaliding stream claim signatures
await advance(8, [self.get_channel_update(txo_chan_a, COIN, key=b'a')])
(r_ab2, r_a2) = search(order_by=['creation_height'], limit=2)
self.assertEqual(f"foo#{a2_claim.claim_id[:2]}", r_a2['short_url'])
self.assertEqual(f"foo#{ab2_claim.claim_id[:4]}", r_ab2['short_url'])
self.assertIsNone(r_a2['canonical_url'])
self.assertIsNone(r_ab2['canonical_url'])
self.assertEqual(0, search(claim_id=txo_chan_a.claim_id, limit=1)[0]['claims_in_channel'])
# reinstate previous channel public key (previous stream claim signatures become valid again)
channel_update = self.get_channel_update(txo_chan_a, COIN, key=b'c')
await advance(9, [channel_update])
(r_ab2, r_a2) = search(order_by=['creation_height'], limit=2)
self.assertEqual(f"foo#{a2_claim.claim_id[:2]}", r_a2['short_url'])
self.assertEqual(f"foo#{ab2_claim.claim_id[:4]}", r_ab2['short_url'])
self.assertEqual("@foo#a/foo#a", r_a2['canonical_url'])
self.assertEqual("@foo#a/foo#ab", r_ab2['canonical_url'])
self.assertEqual(2, search(claim_id=txo_chan_a.claim_id, limit=1)[0]['claims_in_channel'])
self.assertEqual(0, search(claim_id=txo_chan_ab.claim_id, limit=1)[0]['claims_in_channel'])
# change channel of stream
self.assertEqual("@foo#a/foo#ab", search(claim_id=ab2_claim.claim_id, limit=1)[0]['canonical_url'])
tx_ab2 = self.get_stream_update(tx_ab2, COIN, txo_chan_ab)
await advance(10, [tx_ab2])
self.assertEqual("@foo#ab/foo#a", search(claim_id=ab2_claim.claim_id, limit=1)[0]['canonical_url'])
# TODO: currently there is a bug where stream leaving a channel does not update that channels claims count
self.assertEqual(2, search(claim_id=txo_chan_a.claim_id, limit=1)[0]['claims_in_channel'])
# TODO: after bug is fixed remove test above and add test below
#self.assertEqual(1, search(claim_id=txo_chan_a.claim_id, limit=1)[0]['claims_in_channel'])
self.assertEqual(1, search(claim_id=txo_chan_ab.claim_id, limit=1)[0]['claims_in_channel'])
# claim abandon updates claims_in_channel
await advance(11, [self.get_abandon(tx_ab2)])
self.assertEqual(0, search(claim_id=txo_chan_ab.claim_id, limit=1)[0]['claims_in_channel'])
# delete channel, invaliding stream claim signatures
await advance(12, [self.get_abandon(channel_update)])
(r_a2,) = search(order_by=['creation_height'], limit=1)
self.assertEqual(f"foo#{a2_claim.claim_id[:2]}", r_a2['short_url'])
self.assertIsNone(r_a2['canonical_url'])
def test_resolve_issue_2448(self):
advance = self.advance
tx_chan_a = self.get_channel_with_claim_id_prefix('a', 1, key=b'c')
tx_chan_ab = self.get_channel_with_claim_id_prefix('ab', 72, key=b'c')
txo_chan_a = tx_chan_a[0].outputs[0]
txo_chan_ab = tx_chan_ab[0].outputs[0]
advance(1, [tx_chan_a])
advance(2, [tx_chan_ab])
self.assertEqual(reader.resolve_url("@foo#a")['claim_hash'], txo_chan_a.claim_hash)
self.assertEqual(reader.resolve_url("@foo#ab")['claim_hash'], txo_chan_ab.claim_hash)
# update increase last height change of channel
advance(9, [self.get_channel_update(txo_chan_a, COIN, key=b'c')])
# make sure that activation_height is used instead of height (issue #2448)
self.assertEqual(reader.resolve_url("@foo#a")['claim_hash'], txo_chan_a.claim_hash)
self.assertEqual(reader.resolve_url("@foo#ab")['claim_hash'], txo_chan_ab.claim_hash)
def test_canonical_find_shortest_id(self):
new_hash = 'abcdef0123456789beef'
other0 = '1bcdef0123456789beef'
other1 = 'ab1def0123456789beef'
other2 = 'abc1ef0123456789beef'
other3 = 'abcdef0123456789bee1'
f = FindShortestID()
f.step(other0, new_hash)
self.assertEqual('#a', f.finalize())
f.step(other1, new_hash)
self.assertEqual('#abc', f.finalize())
f.step(other2, new_hash)
self.assertEqual('#abcd', f.finalize())
f.step(other3, new_hash)
self.assertEqual('#abcdef0123456789beef', f.finalize())
class TestTrending(TestSQLDB):
def test_trending(self):
advance = self.advance
no_trend = self.get_stream('Claim A', COIN)
downwards = self.get_stream('Claim B', COIN)
up_small = self.get_stream('Claim C', COIN)
up_medium = self.get_stream('Claim D', COIN)
up_biggly = self.get_stream('Claim E', COIN)
claims = advance(1, [up_biggly, up_medium, up_small, no_trend, downwards])
for window in range(1, 8):
advance(zscore.TRENDING_WINDOW * window, [
self.get_support(downwards, (20-window)*COIN),
self.get_support(up_small, int(20+(window/10)*COIN)),
self.get_support(up_medium, (20+(window*(2 if window == 7 else 1)))*COIN),
self.get_support(up_biggly, (20+(window*(3 if window == 7 else 1)))*COIN),
])
results = search(order_by=['trending_local'])
self.assertEqual([c.claim_id for c in claims], [hexlify(c['claim_hash'][::-1]).decode() for c in results])
self.assertEqual([10, 6, 2, 0, -2], [int(c['trending_local']) for c in results])
self.assertEqual([53, 38, -32, 0, -6], [int(c['trending_global']) for c in results])
self.assertEqual([4, 4, 2, 0, 1], [int(c['trending_group']) for c in results])
self.assertEqual([53, 38, 2, 0, -6], [int(c['trending_mixed']) for c in results])
def test_edge(self):
problematic = self.get_stream('Problem', COIN)
self.advance(1, [problematic])
self.advance(zscore.TRENDING_WINDOW, [self.get_support(problematic, 53000000000)])
self.advance(zscore.TRENDING_WINDOW * 2, [self.get_support(problematic, 500000000)])
class TestContentBlocking(TestSQLDB):
def test_blocking_and_filtering(self):
# content claims and channels
tx0 = self.get_channel('A Channel', COIN, '@channel1')
regular_channel = tx0[0].outputs[0]
tx1 = self.get_stream('Claim One', COIN, 'claim1')
tx2 = self.get_stream('Claim Two', COIN, 'claim2', regular_channel)
tx3 = self.get_stream('Claim Three', COIN, 'claim3')
self.advance(1, [tx0, tx1, tx2, tx3])
claim1, claim2, claim3 = tx1[0].outputs[0], tx2[0].outputs[0], tx3[0].outputs[0]
# block and filter channels
tx0 = self.get_channel('Blocking Channel', COIN, '@block')
tx1 = self.get_channel('Filtering Channel', COIN, '@filter')
blocking_channel = tx0[0].outputs[0]
filtering_channel = tx1[0].outputs[0]
self.sql.blocking_channel_hashes.add(blocking_channel.claim_hash)
self.sql.filtering_channel_hashes.add(filtering_channel.claim_hash)
self.advance(2, [tx0, tx1])
self.assertEqual({}, dict(self.sql.blocked_streams))
self.assertEqual({}, dict(self.sql.blocked_channels))
self.assertEqual({}, dict(self.sql.filtered_streams))
self.assertEqual({}, dict(self.sql.filtered_channels))
# nothing blocked
results, _ = reader.resolve([
claim1.claim_name, claim2.claim_name,
claim3.claim_name, regular_channel.claim_name
])
self.assertEqual(claim1.claim_hash, results[0]['claim_hash'])
self.assertEqual(claim2.claim_hash, results[1]['claim_hash'])
self.assertEqual(claim3.claim_hash, results[2]['claim_hash'])
self.assertEqual(regular_channel.claim_hash, results[3]['claim_hash'])
# nothing filtered
results, censor = censored_search()
self.assertEqual(6, len(results))
self.assertEqual(0, censor.total)
self.assertEqual({}, censor.censored)
# block claim reposted to blocking channel, also gets filtered
repost_tx1 = self.get_repost(claim1.claim_id, COIN, blocking_channel)
repost1 = repost_tx1[0].outputs[0]
self.advance(3, [repost_tx1])
self.assertEqual(
{repost1.claim.repost.reference.claim_hash: blocking_channel.claim_hash},
dict(self.sql.blocked_streams)
)
self.assertEqual({}, dict(self.sql.blocked_channels))
self.assertEqual(
{repost1.claim.repost.reference.claim_hash: blocking_channel.claim_hash},
dict(self.sql.filtered_streams)
)
self.assertEqual({}, dict(self.sql.filtered_channels))
# claim is blocked from results by direct repost
results, censor = censored_search(text='Claim')
self.assertEqual(2, len(results))
self.assertEqual(claim2.claim_hash, results[0]['claim_hash'])
self.assertEqual(claim3.claim_hash, results[1]['claim_hash'])
self.assertEqual(1, censor.total)
self.assertEqual({blocking_channel.claim_hash: 1}, censor.censored)
results, _ = reader.resolve([claim1.claim_name])
self.assertEqual(
f"Resolve of 'claim1' was censored by channel with claim id '{blocking_channel.claim_id}'.",
results[0].args[0]
)
results, _ = reader.resolve([
claim2.claim_name, regular_channel.claim_name # claim2 and channel still resolved
])
self.assertEqual(claim2.claim_hash, results[0]['claim_hash'])
self.assertEqual(regular_channel.claim_hash, results[1]['claim_hash'])
# block claim indirectly by blocking its parent channel
repost_tx2 = self.get_repost(regular_channel.claim_id, COIN, blocking_channel)
repost2 = repost_tx2[0].outputs[0]
self.advance(4, [repost_tx2])
self.assertEqual(
{repost1.claim.repost.reference.claim_hash: blocking_channel.claim_hash},
dict(self.sql.blocked_streams)
)
self.assertEqual(
{repost2.claim.repost.reference.claim_hash: blocking_channel.claim_hash},
dict(self.sql.blocked_channels)
)
self.assertEqual(
{repost1.claim.repost.reference.claim_hash: blocking_channel.claim_hash},
dict(self.sql.filtered_streams)
)
self.assertEqual(
{repost2.claim.repost.reference.claim_hash: blocking_channel.claim_hash},
dict(self.sql.filtered_channels)
)
# claim in blocked channel is filtered from search and can't resolve
results, censor = censored_search(text='Claim')
self.assertEqual(1, len(results))
self.assertEqual(claim3.claim_hash, results[0]['claim_hash'])
self.assertEqual(2, censor.total)
self.assertEqual({blocking_channel.claim_hash: 2}, censor.censored)
results, _ = reader.resolve([
claim2.claim_name, regular_channel.claim_name # claim2 and channel don't resolve
])
self.assertEqual(
f"Resolve of 'claim2' was censored by channel with claim id '{blocking_channel.claim_id}'.",
results[0].args[0]
)
self.assertEqual(
f"Resolve of '@channel1' was censored by channel with claim id '{blocking_channel.claim_id}'.",
results[1].args[0]
)
results, _ = reader.resolve([claim3.claim_name]) # claim3 still resolved
self.assertEqual(claim3.claim_hash, results[0]['claim_hash'])
# filtered claim is only filtered and not blocked
repost_tx3 = self.get_repost(claim3.claim_id, COIN, filtering_channel)
repost3 = repost_tx3[0].outputs[0]
self.advance(5, [repost_tx3])
self.assertEqual(
{repost1.claim.repost.reference.claim_hash: blocking_channel.claim_hash},
dict(self.sql.blocked_streams)
)
self.assertEqual(
{repost2.claim.repost.reference.claim_hash: blocking_channel.claim_hash},
dict(self.sql.blocked_channels)
)
self.assertEqual(
{repost1.claim.repost.reference.claim_hash: blocking_channel.claim_hash,
repost3.claim.repost.reference.claim_hash: filtering_channel.claim_hash},
dict(self.sql.filtered_streams)
)
self.assertEqual(
{repost2.claim.repost.reference.claim_hash: blocking_channel.claim_hash},
dict(self.sql.filtered_channels)
)
# filtered claim doesn't return in search but is resolveable
results, censor = censored_search(text='Claim')
self.assertEqual(0, len(results))
self.assertEqual(3, censor.total)
self.assertEqual({blocking_channel.claim_hash: 2, filtering_channel.claim_hash: 1}, censor.censored)
results, _ = reader.resolve([claim3.claim_name]) # claim3 still resolved
self.assertEqual(claim3.claim_hash, results[0]['claim_hash'])
# abandon unblocks content
self.advance(6, [
self.get_abandon(repost_tx1),
self.get_abandon(repost_tx2),
self.get_abandon(repost_tx3)
])
self.assertEqual({}, dict(self.sql.blocked_streams))
self.assertEqual({}, dict(self.sql.blocked_channels))
self.assertEqual({}, dict(self.sql.filtered_streams))
self.assertEqual({}, dict(self.sql.filtered_channels))
results, censor = censored_search(text='Claim')
self.assertEqual(3, len(results))
self.assertEqual(0, censor.total)
results, censor = censored_search()
self.assertEqual(6, len(results))
self.assertEqual(0, censor.total)
results, _ = reader.resolve([
claim1.claim_name, claim2.claim_name,
claim3.claim_name, regular_channel.claim_name
])
self.assertEqual(claim1.claim_hash, results[0]['claim_hash'])
self.assertEqual(claim2.claim_hash, results[1]['claim_hash'])
self.assertEqual(claim3.claim_hash, results[2]['claim_hash'])
self.assertEqual(regular_channel.claim_hash, results[3]['claim_hash'])
def test_pagination(self):
one, two, three, four, five, six, seven, filter_channel = self.advance(1, [
self.get_stream('One', COIN),
self.get_stream('Two', COIN),
self.get_stream('Three', COIN),
self.get_stream('Four', COIN),
self.get_stream('Five', COIN),
self.get_stream('Six', COIN),
self.get_stream('Seven', COIN),
self.get_channel('Filtering Channel', COIN, '@filter'),
])
self.sql.filtering_channel_hashes.add(filter_channel.claim_hash)
# nothing filtered
results, censor = censored_search(order_by='^height', offset=1, limit=3)
self.assertEqual(3, len(results))
self.assertEqual(
[two.claim_hash, three.claim_hash, four.claim_hash],
[r['claim_hash'] for r in results]
)
self.assertEqual(0, censor.total)
# content filtered
repost1, repost2 = self.advance(2, [
self.get_repost(one.claim_id, COIN, filter_channel),
self.get_repost(two.claim_id, COIN, filter_channel),
])
results, censor = censored_search(order_by='^height', offset=1, limit=3)
self.assertEqual(3, len(results))
self.assertEqual(
[four.claim_hash, five.claim_hash, six.claim_hash],
[r['claim_hash'] for r in results]
)
self.assertEqual(2, censor.total)
self.assertEqual({filter_channel.claim_hash: 2}, censor.censored)

View file

@ -1,51 +1,21 @@
import unittest
from unittest import TestCase
from binascii import hexlify, unhexlify
from itertools import cycle
from lbry.testcase import AsyncioTestCase
from lbry.wallet.constants import CENT, COIN, NULL_HASH32
from lbry.wallet import Wallet, Account, Ledger, Headers, Transaction, Output, Input
from lbry.db import Database
from lbry.blockchain.ledger import Ledger
from lbry.constants import CENT, NULL_HASH32
from lbry.blockchain.transaction import Transaction
from lbry.testcase import (
get_transaction, get_input, get_output, get_claim_transaction
)
NULL_HASH = b'\x00'*32
FEE_PER_BYTE = 50
FEE_PER_CHAR = 200000
def get_output(amount=CENT, pubkey_hash=NULL_HASH32, height=-2):
return Transaction(height=height) \
.add_outputs([Output.pay_pubkey_hash(amount, pubkey_hash)]) \
.outputs[0]
class TestSizeAndFeeEstimation(TestCase):
def get_input(amount=CENT, pubkey_hash=NULL_HASH):
return Input.spend(get_output(amount, pubkey_hash))
def get_transaction(txo=None):
return Transaction() \
.add_inputs([get_input()]) \
.add_outputs([txo or Output.pay_pubkey_hash(CENT, NULL_HASH32)])
def get_claim_transaction(claim_name, claim=b''):
return get_transaction(
Output.pay_claim_name_pubkey_hash(CENT, claim_name, claim, NULL_HASH32)
)
class TestSizeAndFeeEstimation(AsyncioTestCase):
async def asyncSetUp(self):
self.ledger = Ledger({
'db': Database('sqlite:///:memory:'),
'headers': Headers(':memory:')
})
await self.ledger.db.open()
async def asyncTearDown(self):
await self.ledger.db.close()
def setUp(self):
self.ledger = Ledger()
def test_output_size_and_fee(self):
txo = get_output()
@ -81,7 +51,7 @@ class TestSizeAndFeeEstimation(AsyncioTestCase):
self.assertEqual(tx.get_base_fee(self.ledger), FEE_PER_BYTE * tx.base_size)
class TestAccountBalanceImpactFromTransaction(unittest.TestCase):
class TestAccountBalanceImpactFromTransaction(TestCase):
def test_is_my_output_not_set(self):
tx = get_transaction()
@ -97,8 +67,8 @@ class TestAccountBalanceImpactFromTransaction(unittest.TestCase):
def test_paying_from_my_account_to_other_account(self):
tx = Transaction() \
.add_inputs([get_input(300*CENT)]) \
.add_outputs([get_output(190*CENT, NULL_HASH),
get_output(100*CENT, NULL_HASH)])
.add_outputs([get_output(190*CENT, NULL_HASH32),
get_output(100*CENT, NULL_HASH32)])
tx.inputs[0].txo_ref.txo.is_my_output = True
tx.outputs[0].is_my_output = False
tx.outputs[1].is_my_output = True
@ -107,8 +77,8 @@ class TestAccountBalanceImpactFromTransaction(unittest.TestCase):
def test_paying_from_other_account_to_my_account(self):
tx = Transaction() \
.add_inputs([get_input(300*CENT)]) \
.add_outputs([get_output(190*CENT, NULL_HASH),
get_output(100*CENT, NULL_HASH)])
.add_outputs([get_output(190*CENT, NULL_HASH32),
get_output(100*CENT, NULL_HASH32)])
tx.inputs[0].txo_ref.txo.is_my_output = False
tx.outputs[0].is_my_output = True
tx.outputs[1].is_my_output = False
@ -117,15 +87,15 @@ class TestAccountBalanceImpactFromTransaction(unittest.TestCase):
def test_paying_from_my_account_to_my_account(self):
tx = Transaction() \
.add_inputs([get_input(300*CENT)]) \
.add_outputs([get_output(190*CENT, NULL_HASH),
get_output(100*CENT, NULL_HASH)])
.add_outputs([get_output(190*CENT, NULL_HASH32),
get_output(100*CENT, NULL_HASH32)])
tx.inputs[0].txo_ref.txo.is_my_output = True
tx.outputs[0].is_my_output = True
tx.outputs[1].is_my_output = True
self.assertEqual(tx.net_account_balance, -10*CENT) # lost to fee
class TestTransactionSerialization(unittest.TestCase):
class TestTransactionSerialization(TestCase):
def test_genesis_transaction(self):
raw = unhexlify(
@ -259,164 +229,3 @@ class TestTransactionSerialization(unittest.TestCase):
tx._reset()
self.assertEqual(tx.raw, raw)
class TestTransactionSigning(AsyncioTestCase):
async def asyncSetUp(self):
self.ledger = Ledger({
'db': Database('sqlite:///:memory:'),
'headers': Headers(':memory:')
})
await self.ledger.db.open()
async def asyncTearDown(self):
await self.ledger.db.close()
async def test_sign(self):
account = Account.from_dict(
self.ledger, Wallet(), {
"seed":
"carbon smart garage balance margin twelve chest sword toas"
"t envelope bottom stomach absent"
}
)
await account.ensure_address_gap()
address1, address2 = await account.receiving.get_addresses(limit=2)
pubkey_hash1 = self.ledger.address_to_hash160(address1)
pubkey_hash2 = self.ledger.address_to_hash160(address2)
tx = Transaction() \
.add_inputs([Input.spend(get_output(int(2*COIN), pubkey_hash1))]) \
.add_outputs([Output.pay_pubkey_hash(int(1.9*COIN), pubkey_hash2)])
await tx.sign([account])
self.assertEqual(
hexlify(tx.inputs[0].script.values['signature']),
b'304402200dafa26ad7cf38c5a971c8a25ce7d85a076235f146126762296b1223c42ae21e022020ef9eeb8'
b'398327891008c5c0be4357683f12cb22346691ff23914f457bf679601'
)
class TransactionIOBalancing(AsyncioTestCase):
async def asyncSetUp(self):
self.ledger = Ledger({
'db': Database('sqlite:///:memory:'),
'headers': Headers(':memory:')
})
await self.ledger.db.open()
self.account = Account.from_dict(
self.ledger, Wallet(), {
"seed": "carbon smart garage balance margin twelve chest sword "
"toast envelope bottom stomach absent"
}
)
addresses = await self.account.ensure_address_gap()
self.pubkey_hash = [self.ledger.address_to_hash160(a) for a in addresses]
self.hash_cycler = cycle(self.pubkey_hash)
async def asyncTearDown(self):
await self.ledger.db.close()
def txo(self, amount, address=None):
return get_output(int(amount*COIN), address or next(self.hash_cycler))
def txi(self, txo):
return Input.spend(txo)
def tx(self, inputs, outputs):
return Transaction.create(inputs, outputs, [self.account], self.account)
async def create_utxos(self, amounts):
utxos = [self.txo(amount) for amount in amounts]
self.funding_tx = Transaction(is_verified=True) \
.add_inputs([self.txi(self.txo(sum(amounts)+0.1))]) \
.add_outputs(utxos)
await self.ledger.db.insert_transaction(self.funding_tx)
for utxo in utxos:
await self.ledger.db.save_transaction_io(
self.funding_tx,
self.ledger.hash160_to_address(utxo.script.values['pubkey_hash']),
utxo.script.values['pubkey_hash'], ''
)
return utxos
@staticmethod
def inputs(tx):
return [round(i.amount/COIN, 2) for i in tx.inputs]
@staticmethod
def outputs(tx):
return [round(o.amount/COIN, 2) for o in tx.outputs]
async def test_basic_use_cases(self):
self.ledger.fee_per_byte = int(.01*CENT)
# available UTXOs for filling missing inputs
utxos = await self.create_utxos([
1, 1, 3, 5, 10
])
# pay 3 coins (3.02 w/ fees)
tx = await self.tx(
[], # inputs
[self.txo(3)] # outputs
)
# best UTXO match is 5 (as UTXO 3 will be short 0.02 to cover fees)
self.assertListEqual(self.inputs(tx), [5])
# a change of 1.98 is added to reach balance
self.assertListEqual(self.outputs(tx), [3, 1.98])
await self.ledger.release_outputs(utxos)
# pay 2.98 coins (3.00 w/ fees)
tx = await self.tx(
[], # inputs
[self.txo(2.98)] # outputs
)
# best UTXO match is 3 and no change is needed
self.assertListEqual(self.inputs(tx), [3])
self.assertListEqual(self.outputs(tx), [2.98])
await self.ledger.release_outputs(utxos)
# supplied input and output, but input is not enough to cover output
tx = await self.tx(
[self.txi(self.txo(10))], # inputs
[self.txo(11)] # outputs
)
# additional input is chosen (UTXO 3)
self.assertListEqual([10, 3], self.inputs(tx))
# change is now needed to consume extra input
self.assertListEqual([11, 1.96], self.outputs(tx))
await self.ledger.release_outputs(utxos)
# liquidating a UTXO
tx = await self.tx(
[self.txi(self.txo(10))], # inputs
[] # outputs
)
self.assertListEqual([10], self.inputs(tx))
# missing change added to consume the amount
self.assertListEqual([9.98], self.outputs(tx))
await self.ledger.release_outputs(utxos)
# liquidating at a loss, requires adding extra inputs
tx = await self.tx(
[self.txi(self.txo(0.01))], # inputs
[] # outputs
)
# UTXO 1 is added to cover some of the fee
self.assertListEqual([0.01, 1], self.inputs(tx))
# change is now needed to consume extra input
self.assertListEqual([0.97], self.outputs(tx))

View file

@ -1,7 +1,7 @@
import unittest
from lbry.wallet.util import ArithUint256
from lbry.wallet.util import coins_to_satoshis as c2s, satoshis_to_coins as s2c
from lbry.blockchain.util import ArithUint256
from lbry.blockchain.util import coins_to_satoshis as c2s, satoshis_to_coins as s2c
class TestCoinValueParsing(unittest.TestCase):