merged torba into lbry

This commit is contained in:
Lex Berezhny 2019-12-30 18:47:37 -05:00
parent 1c00129f76
commit 0b23f68fb2
113 changed files with 3967 additions and 7117 deletions

2
lbry/.gitignore vendored
View file

@ -12,3 +12,5 @@ _trial_temp/
/tests/integration/files
/tests/.coverage.*
/lbry/wallet/bin

View file

@ -1,10 +1,711 @@
import time
from torba.server.block_processor import BlockProcessor
from lbry.schema.claim import Claim
from lbry.wallet.server.db.writer import SQLDB
import asyncio
from struct import pack, unpack
import torba
from torba.server.daemon import DaemonError
from torba.server.hash import hash_to_hex_str, HASHX_LEN
from torba.server.util import chunks, class_logger
from torba.server.db import FlushData
class Prefetcher:
"""Prefetches blocks (in the forward direction only)."""
def __init__(self, daemon, coin, blocks_event):
self.logger = class_logger(__name__, self.__class__.__name__)
self.daemon = daemon
self.coin = coin
self.blocks_event = blocks_event
self.blocks = []
self.caught_up = False
# Access to fetched_height should be protected by the semaphore
self.fetched_height = None
self.semaphore = asyncio.Semaphore()
self.refill_event = asyncio.Event()
# The prefetched block cache size. The min cache size has
# little effect on sync time.
self.cache_size = 0
self.min_cache_size = 10 * 1024 * 1024
# This makes the first fetch be 10 blocks
self.ave_size = self.min_cache_size // 10
self.polling_delay = 5
async def main_loop(self, bp_height):
"""Loop forever polling for more blocks."""
await self.reset_height(bp_height)
while True:
try:
# Sleep a while if there is nothing to prefetch
await self.refill_event.wait()
if not await self._prefetch_blocks():
await asyncio.sleep(self.polling_delay)
except DaemonError as e:
self.logger.info(f'ignoring daemon error: {e}')
def get_prefetched_blocks(self):
"""Called by block processor when it is processing queued blocks."""
blocks = self.blocks
self.blocks = []
self.cache_size = 0
self.refill_event.set()
return blocks
async def reset_height(self, height):
"""Reset to prefetch blocks from the block processor's height.
Used in blockchain reorganisations. This coroutine can be
called asynchronously to the _prefetch_blocks coroutine so we
must synchronize with a semaphore.
"""
async with self.semaphore:
self.blocks.clear()
self.cache_size = 0
self.fetched_height = height
self.refill_event.set()
daemon_height = await self.daemon.height()
behind = daemon_height - height
if behind > 0:
self.logger.info(f'catching up to daemon height {daemon_height:,d} '
f'({behind:,d} blocks behind)')
else:
self.logger.info(f'caught up to daemon height {daemon_height:,d}')
async def _prefetch_blocks(self):
"""Prefetch some blocks and put them on the queue.
Repeats until the queue is full or caught up.
"""
daemon = self.daemon
daemon_height = await daemon.height()
async with self.semaphore:
while self.cache_size < self.min_cache_size:
# Try and catch up all blocks but limit to room in cache.
# Constrain fetch count to between 0 and 500 regardless;
# testnet can be lumpy.
cache_room = self.min_cache_size // self.ave_size
count = min(daemon_height - self.fetched_height, cache_room)
count = min(500, max(count, 0))
if not count:
self.caught_up = True
return False
first = self.fetched_height + 1
hex_hashes = await daemon.block_hex_hashes(first, count)
if self.caught_up:
self.logger.info('new block height {:,d} hash {}'
.format(first + count-1, hex_hashes[-1]))
blocks = await daemon.raw_blocks(hex_hashes)
assert count == len(blocks)
# Special handling for genesis block
if first == 0:
blocks[0] = self.coin.genesis_block(blocks[0])
self.logger.info(f'verified genesis block with hash {hex_hashes[0]}')
# Update our recent average block size estimate
size = sum(len(block) for block in blocks)
if count >= 10:
self.ave_size = size // count
else:
self.ave_size = (size + (10 - count) * self.ave_size) // 10
self.blocks.extend(blocks)
self.cache_size += size
self.fetched_height += count
self.blocks_event.set()
self.refill_event.clear()
return True
class ChainError(Exception):
"""Raised on error processing blocks."""
class BlockProcessor:
"""Process blocks and update the DB state to match.
Employ a prefetcher to prefetch blocks in batches for processing.
Coordinate backing up in case of chain reorganisations.
"""
def __init__(self, env, db, daemon, notifications):
self.env = env
self.db = db
self.daemon = daemon
self.notifications = notifications
self.coin = env.coin
self.blocks_event = asyncio.Event()
self.prefetcher = Prefetcher(daemon, env.coin, self.blocks_event)
self.logger = class_logger(__name__, self.__class__.__name__)
# Meta
self.next_cache_check = 0
self.touched = set()
self.reorg_count = 0
# Caches of unflushed items.
self.headers = []
self.tx_hashes = []
self.undo_infos = []
# UTXO cache
self.utxo_cache = {}
self.db_deletes = []
# If the lock is successfully acquired, in-memory chain state
# is consistent with self.height
self.state_lock = asyncio.Lock()
async def run_in_thread_with_lock(self, func, *args):
# Run in a thread to prevent blocking. Shielded so that
# cancellations from shutdown don't lose work - when the task
# completes the data will be flushed and then we shut down.
# Take the state lock to be certain in-memory state is
# consistent and not being updated elsewhere.
async def run_in_thread_locked():
async with self.state_lock:
return await asyncio.get_event_loop().run_in_executor(None, func, *args)
return await asyncio.shield(run_in_thread_locked())
async def check_and_advance_blocks(self, raw_blocks):
"""Process the list of raw blocks passed. Detects and handles
reorgs.
"""
if not raw_blocks:
return
first = self.height + 1
blocks = [self.coin.block(raw_block, first + n)
for n, raw_block in enumerate(raw_blocks)]
headers = [block.header for block in blocks]
hprevs = [self.coin.header_prevhash(h) for h in headers]
chain = [self.tip] + [self.coin.header_hash(h) for h in headers[:-1]]
if hprevs == chain:
start = time.time()
await self.run_in_thread_with_lock(self.advance_blocks, blocks)
await self._maybe_flush()
if not self.db.first_sync:
s = '' if len(blocks) == 1 else 's'
self.logger.info('processed {:,d} block{} in {:.1f}s'
.format(len(blocks), s,
time.time() - start))
if self._caught_up_event.is_set():
await self.notifications.on_block(self.touched, self.height)
self.touched = set()
elif hprevs[0] != chain[0]:
await self.reorg_chain()
else:
# It is probably possible but extremely rare that what
# bitcoind returns doesn't form a chain because it
# reorg-ed the chain as it was processing the batched
# block hash requests. Should this happen it's simplest
# just to reset the prefetcher and try again.
self.logger.warning('daemon blocks do not form a chain; '
'resetting the prefetcher')
await self.prefetcher.reset_height(self.height)
async def reorg_chain(self, count=None):
"""Handle a chain reorganisation.
Count is the number of blocks to simulate a reorg, or None for
a real reorg."""
if count is None:
self.logger.info('chain reorg detected')
else:
self.logger.info(f'faking a reorg of {count:,d} blocks')
await self.flush(True)
async def get_raw_blocks(last_height, hex_hashes):
heights = range(last_height, last_height - len(hex_hashes), -1)
try:
blocks = [self.db.read_raw_block(height) for height in heights]
self.logger.info(f'read {len(blocks)} blocks from disk')
return blocks
except FileNotFoundError:
return await self.daemon.raw_blocks(hex_hashes)
def flush_backup():
# self.touched can include other addresses which is
# harmless, but remove None.
self.touched.discard(None)
self.db.flush_backup(self.flush_data(), self.touched)
start, last, hashes = await self.reorg_hashes(count)
# Reverse and convert to hex strings.
hashes = [hash_to_hex_str(hash) for hash in reversed(hashes)]
for hex_hashes in chunks(hashes, 50):
raw_blocks = await get_raw_blocks(last, hex_hashes)
await self.run_in_thread_with_lock(self.backup_blocks, raw_blocks)
await self.run_in_thread_with_lock(flush_backup)
last -= len(raw_blocks)
await self.prefetcher.reset_height(self.height)
async def reorg_hashes(self, count):
"""Return a pair (start, last, hashes) of blocks to back up during a
reorg.
The hashes are returned in order of increasing height. Start
is the height of the first hash, last of the last.
"""
start, count = await self.calc_reorg_range(count)
last = start + count - 1
s = '' if count == 1 else 's'
self.logger.info(f'chain was reorganised replacing {count:,d} '
f'block{s} at heights {start:,d}-{last:,d}')
return start, last, await self.db.fs_block_hashes(start, count)
async def calc_reorg_range(self, count):
"""Calculate the reorg range"""
def diff_pos(hashes1, hashes2):
"""Returns the index of the first difference in the hash lists.
If both lists match returns their length."""
for n, (hash1, hash2) in enumerate(zip(hashes1, hashes2)):
if hash1 != hash2:
return n
return len(hashes)
if count is None:
# A real reorg
start = self.height - 1
count = 1
while start > 0:
hashes = await self.db.fs_block_hashes(start, count)
hex_hashes = [hash_to_hex_str(hash) for hash in hashes]
d_hex_hashes = await self.daemon.block_hex_hashes(start, count)
n = diff_pos(hex_hashes, d_hex_hashes)
if n > 0:
start += n
break
count = min(count * 2, start)
start -= count
count = (self.height - start) + 1
else:
start = (self.height - count) + 1
return start, count
def estimate_txs_remaining(self):
# Try to estimate how many txs there are to go
daemon_height = self.daemon.cached_height()
coin = self.coin
tail_count = daemon_height - max(self.height, coin.TX_COUNT_HEIGHT)
# Damp the initial enthusiasm
realism = max(2.0 - 0.9 * self.height / coin.TX_COUNT_HEIGHT, 1.0)
return (tail_count * coin.TX_PER_BLOCK +
max(coin.TX_COUNT - self.tx_count, 0)) * realism
# - Flushing
def flush_data(self):
"""The data for a flush. The lock must be taken."""
assert self.state_lock.locked()
return FlushData(self.height, self.tx_count, self.headers,
self.tx_hashes, self.undo_infos, self.utxo_cache,
self.db_deletes, self.tip)
async def flush(self, flush_utxos):
def flush():
self.db.flush_dbs(self.flush_data(), flush_utxos,
self.estimate_txs_remaining)
await self.run_in_thread_with_lock(flush)
async def _maybe_flush(self):
# If caught up, flush everything as client queries are
# performed on the DB.
if self._caught_up_event.is_set():
await self.flush(True)
elif time.time() > self.next_cache_check:
flush_arg = self.check_cache_size()
if flush_arg is not None:
await self.flush(flush_arg)
self.next_cache_check = time.time() + 30
def check_cache_size(self):
"""Flush a cache if it gets too big."""
# Good average estimates based on traversal of subobjects and
# requesting size from Python (see deep_getsizeof).
one_MB = 1000*1000
utxo_cache_size = len(self.utxo_cache) * 205
db_deletes_size = len(self.db_deletes) * 57
hist_cache_size = self.db.history.unflushed_memsize()
# Roughly ntxs * 32 + nblocks * 42
tx_hash_size = ((self.tx_count - self.db.fs_tx_count) * 32
+ (self.height - self.db.fs_height) * 42)
utxo_MB = (db_deletes_size + utxo_cache_size) // one_MB
hist_MB = (hist_cache_size + tx_hash_size) // one_MB
self.logger.info('our height: {:,d} daemon: {:,d} '
'UTXOs {:,d}MB hist {:,d}MB'
.format(self.height, self.daemon.cached_height(),
utxo_MB, hist_MB))
# Flush history if it takes up over 20% of cache memory.
# Flush UTXOs once they take up 80% of cache memory.
cache_MB = self.env.cache_MB
if utxo_MB + hist_MB >= cache_MB or hist_MB >= cache_MB // 5:
return utxo_MB >= cache_MB * 4 // 5
return None
def advance_blocks(self, blocks):
"""Synchronously advance the blocks.
It is already verified they correctly connect onto our tip.
"""
min_height = self.db.min_undo_height(self.daemon.cached_height())
height = self.height
for block in blocks:
height += 1
undo_info = self.advance_txs(
height, block.transactions, self.coin.electrum_header(block.header, height)
)
if height >= min_height:
self.undo_infos.append((undo_info, height))
self.db.write_raw_block(block.raw, height)
headers = [block.header for block in blocks]
self.height = height
self.headers.extend(headers)
self.tip = self.coin.header_hash(headers[-1])
def advance_txs(self, height, txs, header):
self.tx_hashes.append(b''.join(tx_hash for tx, tx_hash in txs))
# Use local vars for speed in the loops
undo_info = []
tx_num = self.tx_count
script_hashX = self.coin.hashX_from_script
s_pack = pack
put_utxo = self.utxo_cache.__setitem__
spend_utxo = self.spend_utxo
undo_info_append = undo_info.append
update_touched = self.touched.update
hashXs_by_tx = []
append_hashXs = hashXs_by_tx.append
for tx, tx_hash in txs:
hashXs = []
append_hashX = hashXs.append
tx_numb = s_pack('<I', tx_num)
# Spend the inputs
for txin in tx.inputs:
if txin.is_generation():
continue
cache_value = spend_utxo(txin.prev_hash, txin.prev_idx)
undo_info_append(cache_value)
append_hashX(cache_value[:-12])
# Add the new UTXOs
for idx, txout in enumerate(tx.outputs):
# Get the hashX. Ignore unspendable outputs
hashX = script_hashX(txout.pk_script)
if hashX:
append_hashX(hashX)
put_utxo(tx_hash + s_pack('<H', idx),
hashX + tx_numb + s_pack('<Q', txout.value))
append_hashXs(hashXs)
update_touched(hashXs)
tx_num += 1
self.db.history.add_unflushed(hashXs_by_tx, self.tx_count)
self.tx_count = tx_num
self.db.tx_counts.append(tx_num)
return undo_info
def backup_blocks(self, raw_blocks):
"""Backup the raw blocks and flush.
The blocks should be in order of decreasing height, starting at.
self.height. A flush is performed once the blocks are backed up.
"""
self.db.assert_flushed(self.flush_data())
assert self.height >= len(raw_blocks)
coin = self.coin
for raw_block in raw_blocks:
# Check and update self.tip
block = coin.block(raw_block, self.height)
header_hash = coin.header_hash(block.header)
if header_hash != self.tip:
raise ChainError('backup block {} not tip {} at height {:,d}'
.format(hash_to_hex_str(header_hash),
hash_to_hex_str(self.tip),
self.height))
self.tip = coin.header_prevhash(block.header)
self.backup_txs(block.transactions)
self.height -= 1
self.db.tx_counts.pop()
self.logger.info(f'backed up to height {self.height:,d}')
def backup_txs(self, txs):
# Prevout values, in order down the block (coinbase first if present)
# undo_info is in reverse block order
undo_info = self.db.read_undo_info(self.height)
if undo_info is None:
raise ChainError(f'no undo information found for height {self.height:,d}')
n = len(undo_info)
# Use local vars for speed in the loops
s_pack = pack
put_utxo = self.utxo_cache.__setitem__
spend_utxo = self.spend_utxo
script_hashX = self.coin.hashX_from_script
touched = self.touched
undo_entry_len = 12 + HASHX_LEN
for tx, tx_hash in reversed(txs):
for idx, txout in enumerate(tx.outputs):
# Spend the TX outputs. Be careful with unspendable
# outputs - we didn't save those in the first place.
hashX = script_hashX(txout.pk_script)
if hashX:
cache_value = spend_utxo(tx_hash, idx)
touched.add(cache_value[:-12])
# Restore the inputs
for txin in reversed(tx.inputs):
if txin.is_generation():
continue
n -= undo_entry_len
undo_item = undo_info[n:n + undo_entry_len]
put_utxo(txin.prev_hash + s_pack('<H', txin.prev_idx),
undo_item)
touched.add(undo_item[:-12])
assert n == 0
self.tx_count -= len(txs)
"""An in-memory UTXO cache, representing all changes to UTXO state
since the last DB flush.
We want to store millions of these in memory for optimal
performance during initial sync, because then it is possible to
spend UTXOs without ever going to the database (other than as an
entry in the address history, and there is only one such entry per
TX not per UTXO). So store them in a Python dictionary with
binary keys and values.
Key: TX_HASH + TX_IDX (32 + 2 = 34 bytes)
Value: HASHX + TX_NUM + VALUE (11 + 4 + 8 = 23 bytes)
That's 57 bytes of raw data in-memory. Python dictionary overhead
means each entry actually uses about 205 bytes of memory. So
almost 5 million UTXOs can fit in 1GB of RAM. There are
approximately 42 million UTXOs on bitcoin mainnet at height
433,000.
Semantics:
add: Add it to the cache dictionary.
spend: Remove it if in the cache dictionary. Otherwise it's
been flushed to the DB. Each UTXO is responsible for two
entries in the DB. Mark them for deletion in the next
cache flush.
The UTXO database format has to be able to do two things efficiently:
1. Given an address be able to list its UTXOs and their values
so its balance can be efficiently computed.
2. When processing transactions, for each prevout spent - a (tx_hash,
idx) pair - we have to be able to remove it from the DB. To send
notifications to clients we also need to know any address it paid
to.
To this end we maintain two "tables", one for each point above:
1. Key: b'u' + address_hashX + tx_idx + tx_num
Value: the UTXO value as a 64-bit unsigned integer
2. Key: b'h' + compressed_tx_hash + tx_idx + tx_num
Value: hashX
The compressed tx hash is just the first few bytes of the hash of
the tx in which the UTXO was created. As this is not unique there
will be potential collisions so tx_num is also in the key. When
looking up a UTXO the prefix space of the compressed hash needs to
be searched and resolved if necessary with the tx_num. The
collision rate is low (<0.1%).
"""
def spend_utxo(self, tx_hash, tx_idx):
"""Spend a UTXO and return the 33-byte value.
If the UTXO is not in the cache it must be on disk. We store
all UTXOs so not finding one indicates a logic error or DB
corruption.
"""
# Fast track is it being in the cache
idx_packed = pack('<H', tx_idx)
cache_value = self.utxo_cache.pop(tx_hash + idx_packed, None)
if cache_value:
return cache_value
# Spend it from the DB.
# Key: b'h' + compressed_tx_hash + tx_idx + tx_num
# Value: hashX
prefix = b'h' + tx_hash[:4] + idx_packed
candidates = {db_key: hashX for db_key, hashX
in self.db.utxo_db.iterator(prefix=prefix)}
for hdb_key, hashX in candidates.items():
tx_num_packed = hdb_key[-4:]
if len(candidates) > 1:
tx_num, = unpack('<I', tx_num_packed)
hash, height = self.db.fs_tx_hash(tx_num)
if hash != tx_hash:
assert hash is not None # Should always be found
continue
# Key: b'u' + address_hashX + tx_idx + tx_num
# Value: the UTXO value as a 64-bit unsigned integer
udb_key = b'u' + hashX + hdb_key[-6:]
utxo_value_packed = self.db.utxo_db.get(udb_key)
if utxo_value_packed:
# Remove both entries for this UTXO
self.db_deletes.append(hdb_key)
self.db_deletes.append(udb_key)
return hashX + tx_num_packed + utxo_value_packed
raise ChainError('UTXO {} / {:,d} not found in "h" table'
.format(hash_to_hex_str(tx_hash), tx_idx))
async def _process_prefetched_blocks(self):
"""Loop forever processing blocks as they arrive."""
while True:
if self.height == self.daemon.cached_height():
if not self._caught_up_event.is_set():
await self._first_caught_up()
self._caught_up_event.set()
await self.blocks_event.wait()
self.blocks_event.clear()
if self.reorg_count:
await self.reorg_chain(self.reorg_count)
self.reorg_count = 0
else:
blocks = self.prefetcher.get_prefetched_blocks()
await self.check_and_advance_blocks(blocks)
async def _first_caught_up(self):
self.logger.info(f'caught up to height {self.height}')
# Flush everything but with first_sync->False state.
first_sync = self.db.first_sync
self.db.first_sync = False
await self.flush(True)
if first_sync:
self.logger.info(f'{torba.__version__} synced to '
f'height {self.height:,d}')
# Reopen for serving
await self.db.open_for_serving()
async def _first_open_dbs(self):
await self.db.open_for_sync()
self.height = self.db.db_height
self.tip = self.db.db_tip
self.tx_count = self.db.db_tx_count
# --- External API
async def fetch_and_process_blocks(self, caught_up_event):
"""Fetch, process and index blocks from the daemon.
Sets caught_up_event when first caught up. Flushes to disk
and shuts down cleanly if cancelled.
This is mainly because if, during initial sync ElectrumX is
asked to shut down when a large number of blocks have been
processed but not written to disk, it should write those to
disk before exiting, as otherwise a significant amount of work
could be lost.
"""
self._caught_up_event = caught_up_event
try:
await self._first_open_dbs()
await asyncio.wait([
self.prefetcher.main_loop(self.height),
self._process_prefetched_blocks()
])
except asyncio.CancelledError:
raise
except:
self.logger.exception("Block processing failed!")
raise
finally:
# Shut down block processing
self.logger.info('flushing to DB for a clean shutdown...')
await self.flush(True)
self.db.close()
def force_chain_reorg(self, count):
"""Force a reorg of the given number of blocks.
Returns True if a reorg is queued, false if not caught up.
"""
if self._caught_up_event.is_set():
self.reorg_count = count
self.blocks_event.set()
return True
return False
class DecredBlockProcessor(BlockProcessor):
async def calc_reorg_range(self, count):
start, count = await super().calc_reorg_range(count)
if start > 0:
# A reorg in Decred can invalidate the previous block
start -= 1
count += 1
return start, count
class NamecoinBlockProcessor(BlockProcessor):
def advance_txs(self, txs):
result = super().advance_txs(txs)
tx_num = self.tx_count - len(txs)
script_name_hashX = self.coin.name_hashX_from_script
update_touched = self.touched.update
hashXs_by_tx = []
append_hashXs = hashXs_by_tx.append
for tx, tx_hash in txs:
hashXs = []
append_hashX = hashXs.append
# Add the new UTXOs and associate them with the name script
for idx, txout in enumerate(tx.outputs):
# Get the hashX of the name script. Ignore non-name scripts.
hashX = script_name_hashX(txout.pk_script)
if hashX:
append_hashX(hashX)
append_hashXs(hashXs)
update_touched(hashXs)
tx_num += 1
self.db.history.add_unflushed(hashXs_by_tx, self.tx_count - len(txs))
return result
class Timer:

View file

@ -4,7 +4,6 @@ from hashlib import sha256
from torba.server.script import ScriptPubKey, OpCodes
from torba.server.util import cachedproperty
from torba.server.hash import hash_to_hex_str, HASHX_LEN
from torba.server.coins import Coin, CoinError
from torba.server.tx import DeserializerSegWit
from lbry.wallet.script import OutputScript
@ -12,6 +11,241 @@ from .session import LBRYElectrumX, LBRYSessionManager
from .block_processor import LBRYBlockProcessor
from .daemon import LBCDaemon
from .db.writer import LBRYDB
from collections import namedtuple
import re
import struct
from decimal import Decimal
from hashlib import sha256
from functools import partial
import base64
from typing import Type, List
import torba.server.util as util
from torba.server.hash import Base58, hash160, double_sha256, hash_to_hex_str
from torba.server.hash import HASHX_LEN, hex_str_to_hash
from torba.server.script import ScriptPubKey, OpCodes
import torba.server.tx as lib_tx
import torba.server.block_processor as block_proc
from torba.server.db import DB
import torba.server.daemon as daemon
from torba.server.session import ElectrumX, DashElectrumX, SessionManager
Block = namedtuple("Block", "raw header transactions")
OP_RETURN = OpCodes.OP_RETURN
class CoinError(Exception):
"""Exception raised for coin-related errors."""
class Coin:
"""Base class of coin hierarchy."""
REORG_LIMIT = 200
# Not sure if these are coin-specific
RPC_URL_REGEX = re.compile('.+@(\\[[0-9a-fA-F:]+\\]|[^:]+)(:[0-9]+)?')
VALUE_PER_COIN = 100000000
CHUNK_SIZE = 2016
BASIC_HEADER_SIZE = 80
STATIC_BLOCK_HEADERS = True
SESSIONCLS = ElectrumX
DESERIALIZER = lib_tx.Deserializer
DAEMON = daemon.Daemon
BLOCK_PROCESSOR = block_proc.BlockProcessor
SESSION_MANAGER = SessionManager
DB = DB
HEADER_VALUES = [
'version', 'prev_block_hash', 'merkle_root', 'timestamp', 'bits', 'nonce'
]
HEADER_UNPACK = struct.Struct('< I 32s 32s I I I').unpack_from
MEMPOOL_HISTOGRAM_REFRESH_SECS = 500
XPUB_VERBYTES = bytes('????', 'utf-8')
XPRV_VERBYTES = bytes('????', 'utf-8')
ENCODE_CHECK = Base58.encode_check
DECODE_CHECK = Base58.decode_check
# Peer discovery
PEER_DEFAULT_PORTS = {'t': '50001', 's': '50002'}
PEERS: List[str] = []
@classmethod
def lookup_coin_class(cls, name, net):
"""Return a coin class given name and network.
Raise an exception if unrecognised."""
req_attrs = ['TX_COUNT', 'TX_COUNT_HEIGHT', 'TX_PER_BLOCK']
for coin in util.subclasses(Coin):
if (coin.NAME.lower() == name.lower() and
coin.NET.lower() == net.lower()):
coin_req_attrs = req_attrs.copy()
missing = [attr for attr in coin_req_attrs
if not hasattr(coin, attr)]
if missing:
raise CoinError(f'coin {name} missing {missing} attributes')
return coin
raise CoinError(f'unknown coin {name} and network {net} combination')
@classmethod
def sanitize_url(cls, url):
# Remove surrounding ws and trailing /s
url = url.strip().rstrip('/')
match = cls.RPC_URL_REGEX.match(url)
if not match:
raise CoinError(f'invalid daemon URL: "{url}"')
if match.groups()[1] is None:
url += f':{cls.RPC_PORT:d}'
if not url.startswith('http://') and not url.startswith('https://'):
url = 'http://' + url
return url + '/'
@classmethod
def genesis_block(cls, block):
"""Check the Genesis block is the right one for this coin.
Return the block less its unspendable coinbase.
"""
header = cls.block_header(block, 0)
header_hex_hash = hash_to_hex_str(cls.header_hash(header))
if header_hex_hash != cls.GENESIS_HASH:
raise CoinError(f'genesis block has hash {header_hex_hash} expected {cls.GENESIS_HASH}')
return header + bytes(1)
@classmethod
def hashX_from_script(cls, script):
"""Returns a hashX from a script, or None if the script is provably
unspendable so the output can be dropped.
"""
if script and script[0] == OP_RETURN:
return None
return sha256(script).digest()[:HASHX_LEN]
@staticmethod
def lookup_xverbytes(verbytes):
"""Return a (is_xpub, coin_class) pair given xpub/xprv verbytes."""
# Order means BTC testnet will override NMC testnet
for coin in util.subclasses(Coin):
if verbytes == coin.XPUB_VERBYTES:
return True, coin
if verbytes == coin.XPRV_VERBYTES:
return False, coin
raise CoinError('version bytes unrecognised')
@classmethod
def address_to_hashX(cls, address):
"""Return a hashX given a coin address."""
return cls.hashX_from_script(cls.pay_to_address_script(address))
@classmethod
def P2PKH_address_from_hash160(cls, hash160):
"""Return a P2PKH address given a public key."""
assert len(hash160) == 20
return cls.ENCODE_CHECK(cls.P2PKH_VERBYTE + hash160)
@classmethod
def P2PKH_address_from_pubkey(cls, pubkey):
"""Return a coin address given a public key."""
return cls.P2PKH_address_from_hash160(hash160(pubkey))
@classmethod
def P2SH_address_from_hash160(cls, hash160):
"""Return a coin address given a hash160."""
assert len(hash160) == 20
return cls.ENCODE_CHECK(cls.P2SH_VERBYTES[0] + hash160)
@classmethod
def hash160_to_P2PKH_script(cls, hash160):
return ScriptPubKey.P2PKH_script(hash160)
@classmethod
def hash160_to_P2PKH_hashX(cls, hash160):
return cls.hashX_from_script(cls.hash160_to_P2PKH_script(hash160))
@classmethod
def pay_to_address_script(cls, address):
"""Return a pubkey script that pays to a pubkey hash.
Pass the address (either P2PKH or P2SH) in base58 form.
"""
raw = cls.DECODE_CHECK(address)
# Require version byte(s) plus hash160.
verbyte = -1
verlen = len(raw) - 20
if verlen > 0:
verbyte, hash160 = raw[:verlen], raw[verlen:]
if verbyte == cls.P2PKH_VERBYTE:
return cls.hash160_to_P2PKH_script(hash160)
if verbyte in cls.P2SH_VERBYTES:
return ScriptPubKey.P2SH_script(hash160)
raise CoinError(f'invalid address: {address}')
@classmethod
def privkey_WIF(cls, privkey_bytes, compressed):
"""Return the private key encoded in Wallet Import Format."""
payload = bytearray(cls.WIF_BYTE) + privkey_bytes
if compressed:
payload.append(0x01)
return cls.ENCODE_CHECK(payload)
@classmethod
def header_hash(cls, header):
"""Given a header return hash"""
return double_sha256(header)
@classmethod
def header_prevhash(cls, header):
"""Given a header return previous hash"""
return header[4:36]
@classmethod
def static_header_offset(cls, height):
"""Given a header height return its offset in the headers file.
If header sizes change at some point, this is the only code
that needs updating."""
assert cls.STATIC_BLOCK_HEADERS
return height * cls.BASIC_HEADER_SIZE
@classmethod
def static_header_len(cls, height):
"""Given a header height return its length."""
return (cls.static_header_offset(height + 1)
- cls.static_header_offset(height))
@classmethod
def block_header(cls, block, height):
"""Returns the block header given a block and its height."""
return block[:cls.static_header_len(height)]
@classmethod
def block(cls, raw_block, height):
"""Return a Block namedtuple given a raw block and its height."""
header = cls.block_header(raw_block, height)
txs = cls.DESERIALIZER(raw_block, start=len(header)).read_tx_block()
return Block(raw_block, header, txs)
@classmethod
def decimal_value(cls, value):
"""Return the number of standard coin units as a Decimal given a
quantity of smallest units.
For example 1 BTC is returned for 100 million satoshis.
"""
return Decimal(value) / cls.VALUE_PER_COIN
@classmethod
def electrum_header(cls, header, height):
h = dict(zip(cls.HEADER_VALUES, cls.HEADER_UNPACK(header)))
# Add the height that is not present in the header itself
h['block_height'] = height
# Convert bytes to str
h['prev_block_hash'] = hash_to_hex_str(h['prev_block_hash'])
h['merkle_root'] = hash_to_hex_str(h['merkle_root'])
return h
class LBC(Coin):

View file

@ -1,7 +1,464 @@
from functools import wraps
from torba.rpc.jsonrpc import RPCError
from torba.server.daemon import Daemon, DaemonError
import asyncio
import itertools
import json
import time
from calendar import timegm
from struct import pack
from time import strptime
import aiohttp
from torba.server.util import hex_to_bytes, class_logger, \
unpack_le_uint16_from, pack_varint
from torba.server.hash import hex_str_to_hash, hash_to_hex_str
from torba.server.tx import DeserializerDecred
from torba.rpc import JSONRPC
class DaemonError(Exception):
"""Raised when the daemon returns an error in its results."""
class WarmingUpError(Exception):
"""Internal - when the daemon is warming up."""
class WorkQueueFullError(Exception):
"""Internal - when the daemon's work queue is full."""
class Daemon:
"""Handles connections to a daemon at the given URL."""
WARMING_UP = -28
id_counter = itertools.count()
def __init__(self, coin, url, max_workqueue=10, init_retry=0.25,
max_retry=4.0):
self.coin = coin
self.logger = class_logger(__name__, self.__class__.__name__)
self.set_url(url)
# Limit concurrent RPC calls to this number.
# See DEFAULT_HTTP_WORKQUEUE in bitcoind, which is typically 16
self.workqueue_semaphore = asyncio.Semaphore(value=max_workqueue)
self.init_retry = init_retry
self.max_retry = max_retry
self._height = None
self.available_rpcs = {}
self.connector = aiohttp.TCPConnector()
async def close(self):
if self.connector:
await self.connector.close()
self.connector = None
def set_url(self, url):
"""Set the URLS to the given list, and switch to the first one."""
urls = url.split(',')
urls = [self.coin.sanitize_url(url) for url in urls]
for n, url in enumerate(urls):
status = '' if n else ' (current)'
logged_url = self.logged_url(url)
self.logger.info(f'daemon #{n + 1} at {logged_url}{status}')
self.url_index = 0
self.urls = urls
def current_url(self):
"""Returns the current daemon URL."""
return self.urls[self.url_index]
def logged_url(self, url=None):
"""The host and port part, for logging."""
url = url or self.current_url()
return url[url.rindex('@') + 1:]
def failover(self):
"""Call to fail-over to the next daemon URL.
Returns False if there is only one, otherwise True.
"""
if len(self.urls) > 1:
self.url_index = (self.url_index + 1) % len(self.urls)
self.logger.info(f'failing over to {self.logged_url()}')
return True
return False
def client_session(self):
"""An aiohttp client session."""
return aiohttp.ClientSession(connector=self.connector, connector_owner=False)
async def _send_data(self, data):
if not self.connector:
raise asyncio.CancelledError('Tried to send request during shutdown.')
async with self.workqueue_semaphore:
async with self.client_session() as session:
async with session.post(self.current_url(), data=data) as resp:
kind = resp.headers.get('Content-Type', None)
if kind == 'application/json':
return await resp.json()
# bitcoind's HTTP protocol "handling" is a bad joke
text = await resp.text()
if 'Work queue depth exceeded' in text:
raise WorkQueueFullError
text = text.strip() or resp.reason
self.logger.error(text)
raise DaemonError(text)
async def _send(self, payload, processor):
"""Send a payload to be converted to JSON.
Handles temporary connection issues. Daemon response errors
are raise through DaemonError.
"""
def log_error(error):
nonlocal last_error_log, retry
now = time.time()
if now - last_error_log > 60:
last_error_log = now
self.logger.error(f'{error} Retrying occasionally...')
if retry == self.max_retry and self.failover():
retry = 0
on_good_message = None
last_error_log = 0
data = json.dumps(payload)
retry = self.init_retry
while True:
try:
result = await self._send_data(data)
result = processor(result)
if on_good_message:
self.logger.info(on_good_message)
return result
except asyncio.TimeoutError:
log_error('timeout error.')
except aiohttp.ServerDisconnectedError:
log_error('disconnected.')
on_good_message = 'connection restored'
except aiohttp.ClientConnectionError:
log_error('connection problem - is your daemon running?')
on_good_message = 'connection restored'
except aiohttp.ClientError as e:
log_error(f'daemon error: {e}')
on_good_message = 'running normally'
except WarmingUpError:
log_error('starting up checking blocks.')
on_good_message = 'running normally'
except WorkQueueFullError:
log_error('work queue full.')
on_good_message = 'running normally'
await asyncio.sleep(retry)
retry = max(min(self.max_retry, retry * 2), self.init_retry)
async def _send_single(self, method, params=None):
"""Send a single request to the daemon."""
def processor(result):
err = result['error']
if not err:
return result['result']
if err.get('code') == self.WARMING_UP:
raise WarmingUpError
raise DaemonError(err)
payload = {'method': method, 'id': next(self.id_counter)}
if params:
payload['params'] = params
return await self._send(payload, processor)
async def _send_vector(self, method, params_iterable, replace_errs=False):
"""Send several requests of the same method.
The result will be an array of the same length as params_iterable.
If replace_errs is true, any item with an error is returned as None,
otherwise an exception is raised."""
def processor(result):
errs = [item['error'] for item in result if item['error']]
if any(err.get('code') == self.WARMING_UP for err in errs):
raise WarmingUpError
if not errs or replace_errs:
return [item['result'] for item in result]
raise DaemonError(errs)
payload = [{'method': method, 'params': p, 'id': next(self.id_counter)}
for p in params_iterable]
if payload:
return await self._send(payload, processor)
return []
async def _is_rpc_available(self, method):
"""Return whether given RPC method is available in the daemon.
Results are cached and the daemon will generally not be queried with
the same method more than once."""
available = self.available_rpcs.get(method)
if available is None:
available = True
try:
await self._send_single(method)
except DaemonError as e:
err = e.args[0]
error_code = err.get("code")
available = error_code != JSONRPC.METHOD_NOT_FOUND
self.available_rpcs[method] = available
return available
async def block_hex_hashes(self, first, count):
"""Return the hex hashes of count block starting at height first."""
params_iterable = ((h, ) for h in range(first, first + count))
return await self._send_vector('getblockhash', params_iterable)
async def deserialised_block(self, hex_hash):
"""Return the deserialised block with the given hex hash."""
return await self._send_single('getblock', (hex_hash, True))
async def raw_blocks(self, hex_hashes):
"""Return the raw binary blocks with the given hex hashes."""
params_iterable = ((h, False) for h in hex_hashes)
blocks = await self._send_vector('getblock', params_iterable)
# Convert hex string to bytes
return [hex_to_bytes(block) for block in blocks]
async def mempool_hashes(self):
"""Update our record of the daemon's mempool hashes."""
return await self._send_single('getrawmempool')
async def estimatefee(self, block_count):
"""Return the fee estimate for the block count. Units are whole
currency units per KB, e.g. 0.00000995, or -1 if no estimate
is available.
"""
args = (block_count, )
if await self._is_rpc_available('estimatesmartfee'):
estimate = await self._send_single('estimatesmartfee', args)
return estimate.get('feerate', -1)
return await self._send_single('estimatefee', args)
async def getnetworkinfo(self):
"""Return the result of the 'getnetworkinfo' RPC call."""
return await self._send_single('getnetworkinfo')
async def relayfee(self):
"""The minimum fee a low-priority tx must pay in order to be accepted
to the daemon's memory pool."""
network_info = await self.getnetworkinfo()
return network_info['relayfee']
async def getrawtransaction(self, hex_hash, verbose=False):
"""Return the serialized raw transaction with the given hash."""
# Cast to int because some coin daemons are old and require it
return await self._send_single('getrawtransaction',
(hex_hash, int(verbose)))
async def getrawtransactions(self, hex_hashes, replace_errs=True):
"""Return the serialized raw transactions with the given hashes.
Replaces errors with None by default."""
params_iterable = ((hex_hash, 0) for hex_hash in hex_hashes)
txs = await self._send_vector('getrawtransaction', params_iterable,
replace_errs=replace_errs)
# Convert hex strings to bytes
return [hex_to_bytes(tx) if tx else None for tx in txs]
async def broadcast_transaction(self, raw_tx):
"""Broadcast a transaction to the network."""
return await self._send_single('sendrawtransaction', (raw_tx, ))
async def height(self):
"""Query the daemon for its current height."""
self._height = await self._send_single('getblockcount')
return self._height
def cached_height(self):
"""Return the cached daemon height.
If the daemon has not been queried yet this returns None."""
return self._height
class DashDaemon(Daemon):
async def masternode_broadcast(self, params):
"""Broadcast a transaction to the network."""
return await self._send_single('masternodebroadcast', params)
async def masternode_list(self, params):
"""Return the masternode status."""
return await self._send_single('masternodelist', params)
class FakeEstimateFeeDaemon(Daemon):
"""Daemon that simulates estimatefee and relayfee RPC calls. Coin that
wants to use this daemon must define ESTIMATE_FEE & RELAY_FEE"""
async def estimatefee(self, block_count):
"""Return the fee estimate for the given parameters."""
return self.coin.ESTIMATE_FEE
async def relayfee(self):
"""The minimum fee a low-priority tx must pay in order to be accepted
to the daemon's memory pool."""
return self.coin.RELAY_FEE
class LegacyRPCDaemon(Daemon):
"""Handles connections to a daemon at the given URL.
This class is useful for daemons that don't have the new 'getblock'
RPC call that returns the block in hex, the workaround is to manually
recreate the block bytes. The recreated block bytes may not be the exact
as in the underlying blockchain but it is good enough for our indexing
purposes."""
async def raw_blocks(self, hex_hashes):
"""Return the raw binary blocks with the given hex hashes."""
params_iterable = ((h, ) for h in hex_hashes)
block_info = await self._send_vector('getblock', params_iterable)
blocks = []
for i in block_info:
raw_block = await self.make_raw_block(i)
blocks.append(raw_block)
# Convert hex string to bytes
return blocks
async def make_raw_header(self, b):
pbh = b.get('previousblockhash')
if pbh is None:
pbh = '0' * 64
return b''.join([
pack('<L', b.get('version')),
hex_str_to_hash(pbh),
hex_str_to_hash(b.get('merkleroot')),
pack('<L', self.timestamp_safe(b['time'])),
pack('<L', int(b.get('bits'), 16)),
pack('<L', int(b.get('nonce')))
])
async def make_raw_block(self, b):
"""Construct a raw block"""
header = await self.make_raw_header(b)
transactions = []
if b.get('height') > 0:
transactions = await self.getrawtransactions(b.get('tx'), False)
raw_block = header
num_txs = len(transactions)
if num_txs > 0:
raw_block += pack_varint(num_txs)
raw_block += b''.join(transactions)
else:
raw_block += b'\x00'
return raw_block
def timestamp_safe(self, t):
if isinstance(t, int):
return t
return timegm(strptime(t, "%Y-%m-%d %H:%M:%S %Z"))
class DecredDaemon(Daemon):
async def raw_blocks(self, hex_hashes):
"""Return the raw binary blocks with the given hex hashes."""
params_iterable = ((h, False) for h in hex_hashes)
blocks = await self._send_vector('getblock', params_iterable)
raw_blocks = []
valid_tx_tree = {}
for block in blocks:
# Convert to bytes from hex
raw_block = hex_to_bytes(block)
raw_blocks.append(raw_block)
# Check if previous block is valid
prev = self.prev_hex_hash(raw_block)
votebits = unpack_le_uint16_from(raw_block[100:102])[0]
valid_tx_tree[prev] = self.is_valid_tx_tree(votebits)
processed_raw_blocks = []
for hash, raw_block in zip(hex_hashes, raw_blocks):
if hash in valid_tx_tree:
is_valid = valid_tx_tree[hash]
else:
# Do something complicated to figure out if this block is valid
header = await self._send_single('getblockheader', (hash, ))
if 'nextblockhash' not in header:
raise DaemonError(f'Could not find next block for {hash}')
next_hash = header['nextblockhash']
next_header = await self._send_single('getblockheader',
(next_hash, ))
is_valid = self.is_valid_tx_tree(next_header['votebits'])
if is_valid:
processed_raw_blocks.append(raw_block)
else:
# If this block is invalid remove the normal transactions
self.logger.info(f'block {hash} is invalidated')
processed_raw_blocks.append(self.strip_tx_tree(raw_block))
return processed_raw_blocks
@staticmethod
def prev_hex_hash(raw_block):
return hash_to_hex_str(raw_block[4:36])
@staticmethod
def is_valid_tx_tree(votebits):
# Check if previous block was invalidated.
return bool(votebits & (1 << 0) != 0)
def strip_tx_tree(self, raw_block):
c = self.coin
assert issubclass(c.DESERIALIZER, DeserializerDecred)
d = c.DESERIALIZER(raw_block, start=c.BASIC_HEADER_SIZE)
d.read_tx_tree() # Skip normal transactions
# Create a fake block without any normal transactions
return raw_block[:c.BASIC_HEADER_SIZE] + b'\x00' + raw_block[d.cursor:]
async def height(self):
height = await super().height()
if height > 0:
# Lie about the daemon height as the current tip can be invalidated
height -= 1
self._height = height
return height
async def mempool_hashes(self):
mempool = await super().mempool_hashes()
# Add current tip transactions to the 'fake' mempool.
real_height = await self._send_single('getblockcount')
tip_hash = await self._send_single('getblockhash', (real_height,))
tip = await self.deserialised_block(tip_hash)
# Add normal transactions except coinbase
mempool += tip['tx'][1:]
# Add stake transactions if applicable
mempool += tip.get('stx', [])
return mempool
def client_session(self):
# FIXME allow self signed certificates
connector = aiohttp.TCPConnector(verify_ssl=False)
return aiohttp.ClientSession(connector=connector)
class PreLegacyRPCDaemon(LegacyRPCDaemon):
"""Handles connections to a daemon at the given URL.
This class is useful for daemons that don't have the new 'getblock'
RPC call that returns the block in hex, and need the False parameter
for the getblock"""
async def deserialised_block(self, hex_hash):
"""Return the deserialised block with the given hex hash."""
return await self._send_single('getblock', (hex_hash, False))
def handles_errors(decorated_function):

File diff suppressed because it is too large Load diff

View file

@ -1,9 +1,15 @@
import os
import sys
from lbry import __name__, __version__
from setuptools import setup, find_packages
BASE = os.path.dirname(__file__)
README_PATH = os.path.join(BASE, 'README.md')
with open(os.path.join(BASE, 'README.md'), encoding='utf-8') as fh:
long_description = fh.read()
PLYVEL = []
if sys.platform.startswith('linux'):
PLYVEL.append('plyvel==1.0.5')
setup(
name=__name__,
@ -12,7 +18,7 @@ setup(
author_email="hello@lbry.com",
url="https://lbry.com",
description="A decentralized media library and marketplace",
long_description=open(README_PATH, encoding='utf-8').read(),
long_description=long_description,
long_description_content_type="text/markdown",
keywords="lbry protocol media",
license='MIT',
@ -20,10 +26,12 @@ setup(
packages=find_packages(exclude=('tests',)),
zip_safe=False,
entry_points={
'console_scripts': 'lbrynet=lbry.extras.cli:main'
'console_scripts': [
'lbrynet=lbry.extras.cli:main',
'torba-server=torba.server.cli:main',
],
},
install_requires=[
'torba',
'aiohttp==3.5.4',
'aioupnp==0.0.16',
'appdirs==1.4.3',
@ -40,5 +48,22 @@ setup(
'docopt==0.6.2',
'hachoir',
'multidict==4.6.1',
'coincurve==11.0.0',
'pbkdf2==1.3',
'attrs==18.2.0',
'pylru==1.1.0'
] + PLYVEL,
classifiers=[
'Framework :: AsyncIO',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Operating System :: OS Independent',
'Topic :: Internet',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Distributed Computing',
'Topic :: Utilities',
],
)

View file

@ -40,6 +40,63 @@ class TestAccount(AsyncioTestCase):
addresses = await account.change.get_addresses()
self.assertEqual(len(addresses), 6)
async def test_generate_keys_over_batch_threshold_saves_it_properly(self):
async with self.account.receiving.address_generator_lock:
await self.account.receiving._generate_keys(0, 200)
records = await self.account.receiving.get_address_records()
self.assertEqual(len(records), 201)
async def test_ensure_address_gap(self):
account = self.account
self.assertIsInstance(account.receiving, HierarchicalDeterministic)
async with account.receiving.address_generator_lock:
await account.receiving._generate_keys(4, 7)
await account.receiving._generate_keys(0, 3)
await account.receiving._generate_keys(8, 11)
records = await account.receiving.get_address_records()
self.assertListEqual(
[r['pubkey'].n for r in records],
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
)
# we have 12, but default gap is 20
new_keys = await account.receiving.ensure_address_gap()
self.assertEqual(len(new_keys), 8)
records = await account.receiving.get_address_records()
self.assertListEqual(
[r['pubkey'].n for r in records],
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
)
# case #1: no new addresses needed
empty = await account.receiving.ensure_address_gap()
self.assertEqual(len(empty), 0)
# case #2: only one new addressed needed
records = await account.receiving.get_address_records()
await self.ledger.db.set_address_history(records[0]['address'], 'a:1:')
new_keys = await account.receiving.ensure_address_gap()
self.assertEqual(len(new_keys), 1)
# case #3: 20 addresses needed
await self.ledger.db.set_address_history(new_keys[0], 'a:1:')
new_keys = await account.receiving.ensure_address_gap()
self.assertEqual(len(new_keys), 20)
async def test_get_or_create_usable_address(self):
account = self.account
keys = await account.receiving.get_addresses()
self.assertEqual(len(keys), 0)
address = await account.receiving.get_or_create_usable_address()
self.assertIsNotNone(address)
keys = await account.receiving.get_addresses()
self.assertEqual(len(keys), 20)
async def test_generate_account_from_seed(self):
account = Account.from_dict(
self.ledger, Wallet(), {
@ -74,7 +131,7 @@ class TestAccount(AsyncioTestCase):
)
self.assertIsNone(private_key)
def test_load_and_save_account(self):
async def test_load_and_save_account(self):
account_data = {
'name': 'Main Account',
'modified_on': 123.456,
@ -97,6 +154,14 @@ class TestAccount(AsyncioTestCase):
}
account = Account.from_dict(self.ledger, Wallet(), account_data)
await account.ensure_address_gap()
addresses = await account.receiving.get_addresses()
self.assertEqual(len(addresses), 5)
addresses = await account.change.get_addresses()
self.assertEqual(len(addresses), 5)
account_data['ledger'] = 'lbc_mainnet'
self.assertDictEqual(account_data, account.to_dict())
@ -116,3 +181,317 @@ class TestAccount(AsyncioTestCase):
# doesn't fail for single-address account
account2 = Account.generate(self.ledger, Wallet(), 'lbryum', {'name': 'single-address'})
await account2.save_max_gap()
def test_merge_diff(self):
account_data = {
'name': 'My Account',
'modified_on': 123.456,
'seed':
"carbon smart garage balance margin twelve chest sword toast envelope bottom stomac"
"h absent",
'encrypted': False,
'private_key':
'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp'
'5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna',
'public_key':
'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7'
'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g',
'address_generator': {
'name': 'deterministic-chain',
'receiving': {'gap': 5, 'maximum_uses_per_address': 2},
'change': {'gap': 5, 'maximum_uses_per_address': 2}
}
}
account = self.ledger.account_class.from_dict(self.ledger, Wallet(), account_data)
self.assertEqual(account.name, 'My Account')
self.assertEqual(account.modified_on, 123.456)
self.assertEqual(account.change.gap, 5)
self.assertEqual(account.change.maximum_uses_per_address, 2)
self.assertEqual(account.receiving.gap, 5)
self.assertEqual(account.receiving.maximum_uses_per_address, 2)
account_data['name'] = 'Changed Name'
account_data['address_generator']['change']['gap'] = 6
account_data['address_generator']['change']['maximum_uses_per_address'] = 7
account_data['address_generator']['receiving']['gap'] = 8
account_data['address_generator']['receiving']['maximum_uses_per_address'] = 9
account.merge(account_data)
# no change because modified_on is not newer
self.assertEqual(account.name, 'My Account')
account_data['modified_on'] = 200.00
account.merge(account_data)
self.assertEqual(account.name, 'Changed Name')
self.assertEqual(account.change.gap, 6)
self.assertEqual(account.change.maximum_uses_per_address, 7)
self.assertEqual(account.receiving.gap, 8)
self.assertEqual(account.receiving.maximum_uses_per_address, 9)
class TestSingleKeyAccount(AsyncioTestCase):
async def asyncSetUp(self):
self.ledger = ledger_class({
'db': ledger_class.database_class(':memory:'),
'headers': ledger_class.headers_class(':memory:'),
})
await self.ledger.db.open()
self.account = self.ledger.account_class.generate(
self.ledger, Wallet(), "torba", {'name': 'single-address'})
async def asyncTearDown(self):
await self.ledger.db.close()
async def test_generate_account(self):
account = self.account
self.assertEqual(account.ledger, self.ledger)
self.assertIsNotNone(account.seed)
self.assertEqual(account.public_key.ledger, self.ledger)
self.assertEqual(account.private_key.public_key, account.public_key)
addresses = await account.receiving.get_addresses()
self.assertEqual(len(addresses), 0)
addresses = await account.change.get_addresses()
self.assertEqual(len(addresses), 0)
await account.ensure_address_gap()
addresses = await account.receiving.get_addresses()
self.assertEqual(len(addresses), 1)
self.assertEqual(addresses[0], account.public_key.address)
addresses = await account.change.get_addresses()
self.assertEqual(len(addresses), 1)
self.assertEqual(addresses[0], account.public_key.address)
addresses = await account.get_addresses()
self.assertEqual(len(addresses), 1)
self.assertEqual(addresses[0], account.public_key.address)
async def test_ensure_address_gap(self):
account = self.account
self.assertIsInstance(account.receiving, SingleKey)
addresses = await account.receiving.get_addresses()
self.assertListEqual(addresses, [])
# we have 12, but default gap is 20
new_keys = await account.receiving.ensure_address_gap()
self.assertEqual(len(new_keys), 1)
self.assertEqual(new_keys[0], account.public_key.address)
records = await account.receiving.get_address_records()
pubkey = records[0].pop('pubkey')
self.assertListEqual(records, [{
'chain': 0,
'account': account.public_key.address,
'address': account.public_key.address,
'history': None,
'used_times': 0
}])
self.assertEqual(
pubkey.extended_key_string(),
account.public_key.extended_key_string()
)
# case #1: no new addresses needed
empty = await account.receiving.ensure_address_gap()
self.assertEqual(len(empty), 0)
# case #2: after use, still no new address needed
records = await account.receiving.get_address_records()
await self.ledger.db.set_address_history(records[0]['address'], 'a:1:')
empty = await account.receiving.ensure_address_gap()
self.assertEqual(len(empty), 0)
async def test_get_or_create_usable_address(self):
account = self.account
addresses = await account.receiving.get_addresses()
self.assertEqual(len(addresses), 0)
address1 = await account.receiving.get_or_create_usable_address()
self.assertIsNotNone(address1)
await self.ledger.db.set_address_history(address1, 'a:1:b:2:c:3:')
records = await account.receiving.get_address_records()
self.assertEqual(records[0]['used_times'], 3)
address2 = await account.receiving.get_or_create_usable_address()
self.assertEqual(address1, address2)
keys = await account.receiving.get_addresses()
self.assertEqual(len(keys), 1)
async def test_generate_account_from_seed(self):
account = self.ledger.account_class.from_dict(
self.ledger, Wallet(), {
"seed":
"carbon smart garage balance margin twelve chest sword toas"
"t envelope bottom stomach absent",
'address_generator': {'name': 'single-address'}
}
)
self.assertEqual(
account.private_key.extended_key_string(),
'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp'
'5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna',
)
self.assertEqual(
account.public_key.extended_key_string(),
'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7'
'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g',
)
address = await account.receiving.ensure_address_gap()
self.assertEqual(address[0], account.public_key.address)
private_key = await self.ledger.get_private_key_for_address(
account.wallet, address[0]
)
self.assertEqual(
private_key.extended_key_string(),
'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp'
'5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna',
)
invalid_key = await self.ledger.get_private_key_for_address(
account.wallet, 'BcQjRlhDOIrQez1WHfz3whnB33Bp34sUgX'
)
self.assertIsNone(invalid_key)
self.assertEqual(
hexlify(private_key.wif()),
b'1c92caa0ef99bfd5e2ceb73b66da8cd726a9370be8c368d448a322f3c5b23aaab901'
)
async def test_load_and_save_account(self):
account_data = {
'name': 'My Account',
'modified_on': 123.456,
'seed':
"carbon smart garage balance margin twelve chest sword toast envelope bottom stomac"
"h absent",
'encrypted': False,
'private_key':
'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp'
'5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna',
'public_key':
'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7'
'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g',
'address_generator': {'name': 'single-address'}
}
account = self.ledger.account_class.from_dict(self.ledger, Wallet(), account_data)
await account.ensure_address_gap()
addresses = await account.receiving.get_addresses()
self.assertEqual(len(addresses), 1)
addresses = await account.change.get_addresses()
self.assertEqual(len(addresses), 1)
self.maxDiff = None
account_data['ledger'] = 'btc_mainnet'
self.assertDictEqual(account_data, account.to_dict())
class AccountEncryptionTests(AsyncioTestCase):
password = "password"
init_vector = b'0000000000000000'
unencrypted_account = {
'name': 'My Account',
'seed':
"carbon smart garage balance margin twelve chest sword toast envelope bottom stomac"
"h absent",
'encrypted': False,
'private_key':
'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp'
'5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna',
'public_key':
'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7'
'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g',
'address_generator': {'name': 'single-address'}
}
encrypted_account = {
'name': 'My Account',
'seed':
"MDAwMDAwMDAwMDAwMDAwMJ4e4W4pE6nQtPiD6MujNIQ7aFPhUBl63GwPziAgGN"
"MBTMoaSjZfyyvw7ELMCqAYTWJ61aV7K4lmd2hR11g9dpdnnpCb9f9j3zLZHRv7+"
"bIkZ//trah9AIkmrc/ZvNkC0Q==",
'encrypted': True,
'private_key':
'MDAwMDAwMDAwMDAwMDAwMLkWikOLScA/ZxlFSGU7dl//7Q/1gS9h7vqQyrd8DX+'
'jwcp7SwlJ1mkMwuraUaWLq9/LxiaGmqJBUZ50p77YVZbDycaCN1unBr1/i1q6RP'
'Ob2MNCaG8nyjxZhQai+V/2JmJ+UnFMp3nHany7F8/Hr0g=',
'public_key':
'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7'
'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g',
'address_generator': {'name': 'single-address'}
}
async def asyncSetUp(self):
self.ledger = ledger_class({
'db': ledger_class.database_class(':memory:'),
'headers': ledger_class.headers_class(':memory:'),
})
def test_encrypt_wallet(self):
account = self.ledger.account_class.from_dict(self.ledger, Wallet(), self.unencrypted_account)
account.init_vectors = {
'seed': self.init_vector,
'private_key': self.init_vector
}
self.assertFalse(account.encrypted)
self.assertIsNotNone(account.private_key)
account.encrypt(self.password)
self.assertTrue(account.encrypted)
self.assertEqual(account.seed, self.encrypted_account['seed'])
self.assertEqual(account.private_key_string, self.encrypted_account['private_key'])
self.assertIsNone(account.private_key)
self.assertEqual(account.to_dict()['seed'], self.encrypted_account['seed'])
self.assertEqual(account.to_dict()['private_key'], self.encrypted_account['private_key'])
account.decrypt(self.password)
self.assertEqual(account.init_vectors['private_key'], self.init_vector)
self.assertEqual(account.init_vectors['seed'], self.init_vector)
self.assertEqual(account.seed, self.unencrypted_account['seed'])
self.assertEqual(account.private_key.extended_key_string(), self.unencrypted_account['private_key'])
self.assertEqual(account.to_dict(encrypt_password=self.password)['seed'], self.encrypted_account['seed'])
self.assertEqual(account.to_dict(encrypt_password=self.password)['private_key'], self.encrypted_account['private_key'])
self.assertFalse(account.encrypted)
def test_decrypt_wallet(self):
account = self.ledger.account_class.from_dict(self.ledger, Wallet(), self.encrypted_account)
self.assertTrue(account.encrypted)
account.decrypt(self.password)
self.assertEqual(account.init_vectors['private_key'], self.init_vector)
self.assertEqual(account.init_vectors['seed'], self.init_vector)
self.assertFalse(account.encrypted)
self.assertEqual(account.seed, self.unencrypted_account['seed'])
self.assertEqual(account.private_key.extended_key_string(), self.unencrypted_account['private_key'])
self.assertEqual(account.to_dict(encrypt_password=self.password)['seed'], self.encrypted_account['seed'])
self.assertEqual(account.to_dict(encrypt_password=self.password)['private_key'], self.encrypted_account['private_key'])
self.assertEqual(account.to_dict()['seed'], self.unencrypted_account['seed'])
self.assertEqual(account.to_dict()['private_key'], self.unencrypted_account['private_key'])
def test_encrypt_decrypt_read_only_account(self):
account_data = self.unencrypted_account.copy()
del account_data['seed']
del account_data['private_key']
account = self.ledger.account_class.from_dict(self.ledger, Wallet(), account_data)
encrypted = account.to_dict('password')
self.assertFalse(encrypted['seed'])
self.assertFalse(encrypted['private_key'])
account.encrypt('password')
account.decrypt('password')

View file

@ -1,3 +1,12 @@
import asyncio
import os
import tempfile
from binascii import hexlify
from torba.client.hash import sha256
from torba.testcase import AsyncioTestCase
from torba.coin.bitcoinsegwit import MainHeaders
from binascii import unhexlify
from torba.testcase import AsyncioTestCase
@ -6,6 +15,157 @@ from torba.client.util import ArithUint256
from lbry.wallet.ledger import Headers
def block_bytes(blocks):
return blocks * MainHeaders.header_size
class BitcoinHeadersTestCase(AsyncioTestCase):
HEADER_FILE = 'bitcoin_headers'
RETARGET_BLOCK = 32256 # difficulty: 1 -> 1.18
def setUp(self):
self.maxDiff = None
self.header_file_name = os.path.join(os.path.dirname(__file__), self.HEADER_FILE)
def get_bytes(self, upto: int = -1, after: int = 0) -> bytes:
with open(self.header_file_name, 'rb') as headers:
headers.seek(after, os.SEEK_SET)
return headers.read(upto)
async def get_headers(self, upto: int = -1):
h = MainHeaders(':memory:')
h.io.write(self.get_bytes(upto))
return h
class BasicHeadersTests(BitcoinHeadersTestCase):
async def test_serialization(self):
h = await self.get_headers()
self.assertDictEqual(h[0], {
'bits': 486604799,
'block_height': 0,
'merkle_root': b'4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b',
'nonce': 2083236893,
'prev_block_hash': b'0000000000000000000000000000000000000000000000000000000000000000',
'timestamp': 1231006505,
'version': 1
})
self.assertDictEqual(h[self.RETARGET_BLOCK-1], {
'bits': 486604799,
'block_height': 32255,
'merkle_root': b'89b4f223789e40b5b475af6483bb05bceda54059e17d2053334b358f6bb310ac',
'nonce': 312762301,
'prev_block_hash': b'000000006baebaa74cecde6c6787c26ee0a616a3c333261bff36653babdac149',
'timestamp': 1262152739,
'version': 1
})
self.assertDictEqual(h[self.RETARGET_BLOCK], {
'bits': 486594666,
'block_height': 32256,
'merkle_root': b'64b5e5f5a262f47af443a0120609206a3305877693edfe03e994f20a024ab627',
'nonce': 121087187,
'prev_block_hash': b'00000000984f962134a7291e3693075ae03e521f0ee33378ec30a334d860034b',
'timestamp': 1262153464,
'version': 1
})
self.assertDictEqual(h[self.RETARGET_BLOCK+1], {
'bits': 486594666,
'block_height': 32257,
'merkle_root': b'4d1488981f08b3037878193297dbac701a2054e0f803d4424fe6a4d763d62334',
'nonce': 274675219,
'prev_block_hash': b'000000004f2886a170adb7204cb0c7a824217dd24d11a74423d564c4e0904967',
'timestamp': 1262154352,
'version': 1
})
self.assertEqual(
h.serialize(h[0]),
h.get_raw_header(0)
)
self.assertEqual(
h.serialize(h[self.RETARGET_BLOCK]),
h.get_raw_header(self.RETARGET_BLOCK)
)
async def test_connect_from_genesis_to_3000_past_first_chunk_at_2016(self):
headers = MainHeaders(':memory:')
self.assertEqual(headers.height, -1)
await headers.connect(0, self.get_bytes(block_bytes(3001)))
self.assertEqual(headers.height, 3000)
async def test_connect_9_blocks_passing_a_retarget_at_32256(self):
retarget = block_bytes(self.RETARGET_BLOCK-5)
headers = await self.get_headers(upto=retarget)
remainder = self.get_bytes(after=retarget)
self.assertEqual(headers.height, 32250)
await headers.connect(len(headers), remainder)
self.assertEqual(headers.height, 32259)
async def test_bounds(self):
headers = MainHeaders(':memory:')
await headers.connect(0, self.get_bytes(block_bytes(3001)))
self.assertEqual(headers.height, 3000)
with self.assertRaises(IndexError):
_ = headers[3001]
with self.assertRaises(IndexError):
_ = headers[-1]
self.assertIsNotNone(headers[3000])
self.assertIsNotNone(headers[0])
async def test_repair(self):
headers = MainHeaders(':memory:')
await headers.connect(0, self.get_bytes(block_bytes(3001)))
self.assertEqual(headers.height, 3000)
await headers.repair()
self.assertEqual(headers.height, 3000)
# corrupt the middle of it
headers.io.seek(block_bytes(1500))
headers.io.write(b"wtf")
await headers.repair()
self.assertEqual(headers.height, 1499)
self.assertEqual(len(headers), 1500)
# corrupt by appending
headers.io.seek(block_bytes(len(headers)))
headers.io.write(b"appending")
await headers.repair()
self.assertEqual(headers.height, 1499)
await headers.connect(len(headers), self.get_bytes(block_bytes(3001 - 1500), after=block_bytes(1500)))
self.assertEqual(headers.height, 3000)
async def test_checkpointed_writer(self):
headers = MainHeaders(':memory:')
headers.checkpoint = 100, hexlify(sha256(self.get_bytes(block_bytes(100))))
genblocks = lambda start, end: self.get_bytes(block_bytes(end - start), block_bytes(start))
async with headers.checkpointed_connector() as buff:
buff.write(genblocks(0, 10))
self.assertEqual(len(headers), 10)
async with headers.checkpointed_connector() as buff:
buff.write(genblocks(10, 100))
self.assertEqual(len(headers), 100)
headers = MainHeaders(':memory:')
async with headers.checkpointed_connector() as buff:
buff.write(genblocks(0, 300))
self.assertEqual(len(headers), 300)
async def test_concurrency(self):
BLOCKS = 30
headers_temporary_file = tempfile.mktemp()
headers = MainHeaders(headers_temporary_file)
await headers.open()
self.addCleanup(os.remove, headers_temporary_file)
async def writer():
for block_index in range(BLOCKS):
await headers.connect(block_index, self.get_bytes(block_bytes(block_index + 1), block_bytes(block_index)))
async def reader():
for block_index in range(BLOCKS):
while len(headers) < block_index:
await asyncio.sleep(0.000001)
assert headers[block_index]['block_height'] == block_index
reader_task = asyncio.create_task(reader())
await writer()
await reader_task
class TestHeaders(AsyncioTestCase):
def test_deserialize(self):

View file

@ -1,3 +1,11 @@
import os
from binascii import hexlify
from torba.coin.bitcoinsegwit import MainNetLedger
from torba.client.wallet import Wallet
from client_tests.unit.test_transaction import get_transaction, get_output
from client_tests.unit.test_headers import BitcoinHeadersTestCase, block_bytes
from torba.testcase import AsyncioTestCase
from torba.client.wallet import Wallet
@ -6,6 +14,32 @@ from lbry.wallet.transaction import Transaction, Output, Input
from lbry.wallet.ledger import MainNetLedger
class MockNetwork:
def __init__(self, history, transaction):
self.history = history
self.transaction = transaction
self.address = None
self.get_history_called = []
self.get_transaction_called = []
self.is_connected = False
def retriable_call(self, function, *args, **kwargs):
return function(*args, **kwargs)
async def get_history(self, address):
self.get_history_called.append(address)
self.address = address
return self.history
async def get_merkle(self, txid, height):
return {'merkle': ['abcd01'], 'pos': 1}
async def get_transaction(self, tx_hash, _=None):
self.get_transaction_called.append(tx_hash)
return self.transaction[tx_hash]
class LedgerTestCase(AsyncioTestCase):
async def asyncSetUp(self):
@ -19,6 +53,129 @@ class LedgerTestCase(AsyncioTestCase):
async def asyncTearDown(self):
await self.ledger.db.close()
def make_header(self, **kwargs):
header = {
'bits': 486604799,
'block_height': 0,
'merkle_root': b'4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b',
'nonce': 2083236893,
'prev_block_hash': b'0000000000000000000000000000000000000000000000000000000000000000',
'timestamp': 1231006505,
'version': 1
}
header.update(kwargs)
header['merkle_root'] = header['merkle_root'].ljust(64, b'a')
header['prev_block_hash'] = header['prev_block_hash'].ljust(64, b'0')
return self.ledger.headers.serialize(header)
def add_header(self, **kwargs):
serialized = self.make_header(**kwargs)
self.ledger.headers.io.seek(0, os.SEEK_END)
self.ledger.headers.io.write(serialized)
self.ledger.headers._size = None
class TestSynchronization(LedgerTestCase):
async def test_update_history(self):
account = self.ledger.account_class.generate(self.ledger, Wallet(), "torba")
address = await account.receiving.get_or_create_usable_address()
address_details = await self.ledger.db.get_address(address=address)
self.assertIsNone(address_details['history'])
self.add_header(block_height=0, merkle_root=b'abcd04')
self.add_header(block_height=1, merkle_root=b'abcd04')
self.add_header(block_height=2, merkle_root=b'abcd04')
self.add_header(block_height=3, merkle_root=b'abcd04')
self.ledger.network = MockNetwork([
{'tx_hash': 'abcd01', 'height': 0},
{'tx_hash': 'abcd02', 'height': 1},
{'tx_hash': 'abcd03', 'height': 2},
], {
'abcd01': hexlify(get_transaction(get_output(1)).raw),
'abcd02': hexlify(get_transaction(get_output(2)).raw),
'abcd03': hexlify(get_transaction(get_output(3)).raw),
})
await self.ledger.update_history(address, '')
self.assertListEqual(self.ledger.network.get_history_called, [address])
self.assertListEqual(self.ledger.network.get_transaction_called, ['abcd01', 'abcd02', 'abcd03'])
address_details = await self.ledger.db.get_address(address=address)
self.assertEqual(
address_details['history'],
'252bda9b22cc902ca2aa2de3548ee8baf06b8501ff7bfb3b0b7d980dbd1bf792:0:'
'ab9c0654dd484ac20437030f2034e25dcb29fc507e84b91138f80adc3af738f9:1:'
'a2ae3d1db3c727e7d696122cab39ee20a7f81856dab7019056dd539f38c548a0:2:'
)
self.ledger.network.get_history_called = []
self.ledger.network.get_transaction_called = []
await self.ledger.update_history(address, '')
self.assertListEqual(self.ledger.network.get_history_called, [address])
self.assertListEqual(self.ledger.network.get_transaction_called, [])
self.ledger.network.history.append({'tx_hash': 'abcd04', 'height': 3})
self.ledger.network.transaction['abcd04'] = hexlify(get_transaction(get_output(4)).raw)
self.ledger.network.get_history_called = []
self.ledger.network.get_transaction_called = []
await self.ledger.update_history(address, '')
self.assertListEqual(self.ledger.network.get_history_called, [address])
self.assertListEqual(self.ledger.network.get_transaction_called, ['abcd04'])
address_details = await self.ledger.db.get_address(address=address)
self.assertEqual(
address_details['history'],
'252bda9b22cc902ca2aa2de3548ee8baf06b8501ff7bfb3b0b7d980dbd1bf792:0:'
'ab9c0654dd484ac20437030f2034e25dcb29fc507e84b91138f80adc3af738f9:1:'
'a2ae3d1db3c727e7d696122cab39ee20a7f81856dab7019056dd539f38c548a0:2:'
'047cf1d53ef68f0fd586d46f90c09ff8e57a4180f67e7f4b8dd0135c3741e828:3:'
)
class MocHeaderNetwork(MockNetwork):
def __init__(self, responses):
super().__init__(None, None)
self.responses = responses
async def get_headers(self, height, blocks):
return self.responses[height]
class BlockchainReorganizationTests(LedgerTestCase):
async def test_1_block_reorganization(self):
self.ledger.network = MocHeaderNetwork({
20: {'height': 20, 'count': 5, 'hex': hexlify(
self.get_bytes(after=block_bytes(20), upto=block_bytes(5))
)},
25: {'height': 25, 'count': 0, 'hex': b''}
})
headers = self.ledger.headers
await headers.connect(0, self.get_bytes(upto=block_bytes(20)))
self.add_header(block_height=len(headers))
self.assertEqual(headers.height, 20)
await self.ledger.receive_header([{
'height': 21, 'hex': hexlify(self.make_header(block_height=21))
}])
async def test_3_block_reorganization(self):
self.ledger.network = MocHeaderNetwork({
20: {'height': 20, 'count': 5, 'hex': hexlify(
self.get_bytes(after=block_bytes(20), upto=block_bytes(5))
)},
21: {'height': 21, 'count': 1, 'hex': hexlify(self.make_header(block_height=21))},
22: {'height': 22, 'count': 1, 'hex': hexlify(self.make_header(block_height=22))},
25: {'height': 25, 'count': 0, 'hex': b''}
})
headers = self.ledger.headers
await headers.connect(0, self.get_bytes(upto=block_bytes(20)))
self.add_header(block_height=len(headers))
self.add_header(block_height=len(headers))
self.add_header(block_height=len(headers))
self.assertEqual(headers.height, 22)
await self.ledger.receive_header(({
'height': 23, 'hex': hexlify(self.make_header(block_height=23))
},))
class BasicAccountingTests(LedgerTestCase):

View file

@ -2,6 +2,224 @@ import unittest
from binascii import hexlify, unhexlify
from lbry.wallet.script import OutputScript
import unittest
from binascii import hexlify, unhexlify
from torba.client.bcd_data_stream import BCDataStream
from torba.client.basescript import Template, ParseError, tokenize, push_data
from torba.client.basescript import PUSH_SINGLE, PUSH_INTEGER, PUSH_MANY, OP_HASH160, OP_EQUAL
from torba.client.basescript import BaseInputScript, BaseOutputScript
def parse(opcodes, source):
template = Template('test', opcodes)
s = BCDataStream()
for t in source:
if isinstance(t, bytes):
s.write_many(push_data(t))
elif isinstance(t, int):
s.write_uint8(t)
else:
raise ValueError()
s.reset()
return template.parse(tokenize(s))
class TestScriptTemplates(unittest.TestCase):
def test_push_data(self):
self.assertDictEqual(parse(
(PUSH_SINGLE('script_hash'),),
(b'abcdef',)
), {
'script_hash': b'abcdef'
}
)
self.assertDictEqual(parse(
(PUSH_SINGLE('first'), PUSH_INTEGER('rating')),
(b'Satoshi', (1000).to_bytes(2, 'little'))
), {
'first': b'Satoshi',
'rating': 1000,
}
)
self.assertDictEqual(parse(
(OP_HASH160, PUSH_SINGLE('script_hash'), OP_EQUAL),
(OP_HASH160, b'abcdef', OP_EQUAL)
), {
'script_hash': b'abcdef'
}
)
def test_push_data_many(self):
self.assertDictEqual(parse(
(PUSH_MANY('names'),),
(b'amit',)
), {
'names': [b'amit']
}
)
self.assertDictEqual(parse(
(PUSH_MANY('names'),),
(b'jeremy', b'amit', b'victor')
), {
'names': [b'jeremy', b'amit', b'victor']
}
)
self.assertDictEqual(parse(
(OP_HASH160, PUSH_MANY('names'), OP_EQUAL),
(OP_HASH160, b'grin', b'jack', OP_EQUAL)
), {
'names': [b'grin', b'jack']
}
)
def test_push_data_mixed(self):
self.assertDictEqual(parse(
(PUSH_SINGLE('CEO'), PUSH_MANY('Devs'), PUSH_SINGLE('CTO'), PUSH_SINGLE('State')),
(b'jeremy', b'lex', b'amit', b'victor', b'jack', b'grin', b'NH')
), {
'CEO': b'jeremy',
'CTO': b'grin',
'Devs': [b'lex', b'amit', b'victor', b'jack'],
'State': b'NH'
}
)
def test_push_data_many_separated(self):
self.assertDictEqual(parse(
(PUSH_MANY('Chiefs'), OP_HASH160, PUSH_MANY('Devs')),
(b'jeremy', b'grin', OP_HASH160, b'lex', b'jack')
), {
'Chiefs': [b'jeremy', b'grin'],
'Devs': [b'lex', b'jack']
}
)
def test_push_data_many_not_separated(self):
with self.assertRaisesRegex(ParseError, 'consecutive PUSH_MANY'):
parse((PUSH_MANY('Chiefs'), PUSH_MANY('Devs')), (b'jeremy', b'grin', b'lex', b'jack'))
class TestRedeemPubKeyHash(unittest.TestCase):
def redeem_pubkey_hash(self, sig, pubkey):
# this checks that factory function correctly sets up the script
src1 = BaseInputScript.redeem_pubkey_hash(unhexlify(sig), unhexlify(pubkey))
self.assertEqual(src1.template.name, 'pubkey_hash')
self.assertEqual(hexlify(src1.values['signature']), sig)
self.assertEqual(hexlify(src1.values['pubkey']), pubkey)
# now we test that it will round trip
src2 = BaseInputScript(src1.source)
self.assertEqual(src2.template.name, 'pubkey_hash')
self.assertEqual(hexlify(src2.values['signature']), sig)
self.assertEqual(hexlify(src2.values['pubkey']), pubkey)
return hexlify(src1.source)
def test_redeem_pubkey_hash_1(self):
self.assertEqual(
self.redeem_pubkey_hash(
b'30450221009dc93f25184a8d483745cd3eceff49727a317c9bfd8be8d3d04517e9cdaf8dd502200e'
b'02dc5939cad9562d2b1f303f185957581c4851c98d497af281118825e18a8301',
b'025415a06514230521bff3aaface31f6db9d9bbc39bf1ca60a189e78731cfd4e1b'
),
b'4830450221009dc93f25184a8d483745cd3eceff49727a317c9bfd8be8d3d04517e9cdaf8dd502200e02d'
b'c5939cad9562d2b1f303f185957581c4851c98d497af281118825e18a830121025415a06514230521bff3'
b'aaface31f6db9d9bbc39bf1ca60a189e78731cfd4e1b'
)
class TestRedeemScriptHash(unittest.TestCase):
def redeem_script_hash(self, sigs, pubkeys):
# this checks that factory function correctly sets up the script
src1 = BaseInputScript.redeem_script_hash(
[unhexlify(sig) for sig in sigs],
[unhexlify(pubkey) for pubkey in pubkeys]
)
subscript1 = src1.values['script']
self.assertEqual(src1.template.name, 'script_hash')
self.assertListEqual([hexlify(v) for v in src1.values['signatures']], sigs)
self.assertListEqual([hexlify(p) for p in subscript1.values['pubkeys']], pubkeys)
self.assertEqual(subscript1.values['signatures_count'], len(sigs))
self.assertEqual(subscript1.values['pubkeys_count'], len(pubkeys))
# now we test that it will round trip
src2 = BaseInputScript(src1.source)
subscript2 = src2.values['script']
self.assertEqual(src2.template.name, 'script_hash')
self.assertListEqual([hexlify(v) for v in src2.values['signatures']], sigs)
self.assertListEqual([hexlify(p) for p in subscript2.values['pubkeys']], pubkeys)
self.assertEqual(subscript2.values['signatures_count'], len(sigs))
self.assertEqual(subscript2.values['pubkeys_count'], len(pubkeys))
return hexlify(src1.source)
def test_redeem_script_hash_1(self):
self.assertEqual(
self.redeem_script_hash([
b'3045022100fec82ed82687874f2a29cbdc8334e114af645c45298e85bb1efe69fcf15c617a0220575'
b'e40399f9ada388d8e522899f4ec3b7256896dd9b02742f6567d960b613f0401',
b'3044022024890462f731bd1a42a4716797bad94761fc4112e359117e591c07b8520ea33b02201ac68'
b'9e35c4648e6beff1d42490207ba14027a638a62663b2ee40153299141eb01',
b'30450221009910823e0142967a73c2d16c1560054d71c0625a385904ba2f1f53e0bc1daa8d02205cd'
b'70a89c6cf031a8b07d1d5eb0d65d108c4d49c2d403f84fb03ad3dc318777a01'
], [
b'0372ba1fd35e5f1b1437cba0c4ebfc4025b7349366f9f9c7c8c4b03a47bd3f68a4',
b'03061d250182b2db1ba144167fd8b0ef3fe0fc3a2fa046958f835ffaf0dfdb7692',
b'02463bfbc1eaec74b5c21c09239ae18dbf6fc07833917df10d0b43e322810cee0c',
b'02fa6a6455c26fb516cfa85ea8de81dd623a893ffd579ee2a00deb6cdf3633d6bb',
b'0382910eae483ce4213d79d107bfc78f3d77e2a31ea597be45256171ad0abeaa89'
]),
b'00483045022100fec82ed82687874f2a29cbdc8334e114af645c45298e85bb1efe69fcf15c617a0220575e'
b'40399f9ada388d8e522899f4ec3b7256896dd9b02742f6567d960b613f0401473044022024890462f731bd'
b'1a42a4716797bad94761fc4112e359117e591c07b8520ea33b02201ac689e35c4648e6beff1d42490207ba'
b'14027a638a62663b2ee40153299141eb014830450221009910823e0142967a73c2d16c1560054d71c0625a'
b'385904ba2f1f53e0bc1daa8d02205cd70a89c6cf031a8b07d1d5eb0d65d108c4d49c2d403f84fb03ad3dc3'
b'18777a014cad53210372ba1fd35e5f1b1437cba0c4ebfc4025b7349366f9f9c7c8c4b03a47bd3f68a42103'
b'061d250182b2db1ba144167fd8b0ef3fe0fc3a2fa046958f835ffaf0dfdb76922102463bfbc1eaec74b5c2'
b'1c09239ae18dbf6fc07833917df10d0b43e322810cee0c2102fa6a6455c26fb516cfa85ea8de81dd623a89'
b'3ffd579ee2a00deb6cdf3633d6bb210382910eae483ce4213d79d107bfc78f3d77e2a31ea597be45256171'
b'ad0abeaa8955ae'
)
class TestPayPubKeyHash(unittest.TestCase):
def pay_pubkey_hash(self, pubkey_hash):
# this checks that factory function correctly sets up the script
src1 = BaseOutputScript.pay_pubkey_hash(unhexlify(pubkey_hash))
self.assertEqual(src1.template.name, 'pay_pubkey_hash')
self.assertEqual(hexlify(src1.values['pubkey_hash']), pubkey_hash)
# now we test that it will round trip
src2 = BaseOutputScript(src1.source)
self.assertEqual(src2.template.name, 'pay_pubkey_hash')
self.assertEqual(hexlify(src2.values['pubkey_hash']), pubkey_hash)
return hexlify(src1.source)
def test_pay_pubkey_hash_1(self):
self.assertEqual(
self.pay_pubkey_hash(b'64d74d12acc93ba1ad495e8d2d0523252d664f4d'),
b'76a91464d74d12acc93ba1ad495e8d2d0523252d664f4d88ac'
)
class TestPayScriptHash(unittest.TestCase):
def pay_script_hash(self, script_hash):
# this checks that factory function correctly sets up the script
src1 = BaseOutputScript.pay_script_hash(unhexlify(script_hash))
self.assertEqual(src1.template.name, 'pay_script_hash')
self.assertEqual(hexlify(src1.values['script_hash']), script_hash)
# now we test that it will round trip
src2 = BaseOutputScript(src1.source)
self.assertEqual(src2.template.name, 'pay_script_hash')
self.assertEqual(hexlify(src2.values['script_hash']), script_hash)
return hexlify(src1.source)
def test_pay_pubkey_hash_1(self):
self.assertEqual(
self.pay_script_hash(b'63d65a2ee8c44426d06050cfd71c0f0ff3fc41ac'),
b'a91463d65a2ee8c44426d06050cfd71c0f0ff3fc41ac87'
)
class TestPayClaimNamePubkeyHash(unittest.TestCase):

View file

@ -81,6 +81,50 @@ class TestSizeAndFeeEstimation(AsyncioTestCase):
self.assertEqual(tx.get_base_fee(self.ledger), FEE_PER_BYTE * tx.base_size)
class TestAccountBalanceImpactFromTransaction(unittest.TestCase):
def test_is_my_account_not_set(self):
tx = get_transaction()
with self.assertRaisesRegex(ValueError, "Cannot access net_account_balance"):
_ = tx.net_account_balance
tx.inputs[0].txo_ref.txo.is_my_account = True
with self.assertRaisesRegex(ValueError, "Cannot access net_account_balance"):
_ = tx.net_account_balance
tx.outputs[0].is_my_account = True
# all inputs/outputs are set now so it should work
_ = tx.net_account_balance
def test_paying_from_my_account_to_other_account(self):
tx = ledger_class.transaction_class() \
.add_inputs([get_input(300*CENT)]) \
.add_outputs([get_output(190*CENT, NULL_HASH),
get_output(100*CENT, NULL_HASH)])
tx.inputs[0].txo_ref.txo.is_my_account = True
tx.outputs[0].is_my_account = False
tx.outputs[1].is_my_account = True
self.assertEqual(tx.net_account_balance, -200*CENT)
def test_paying_from_other_account_to_my_account(self):
tx = ledger_class.transaction_class() \
.add_inputs([get_input(300*CENT)]) \
.add_outputs([get_output(190*CENT, NULL_HASH),
get_output(100*CENT, NULL_HASH)])
tx.inputs[0].txo_ref.txo.is_my_account = False
tx.outputs[0].is_my_account = True
tx.outputs[1].is_my_account = False
self.assertEqual(tx.net_account_balance, 190*CENT)
def test_paying_from_my_account_to_my_account(self):
tx = ledger_class.transaction_class() \
.add_inputs([get_input(300*CENT)]) \
.add_outputs([get_output(190*CENT, NULL_HASH),
get_output(100*CENT, NULL_HASH)])
tx.inputs[0].txo_ref.txo.is_my_account = True
tx.outputs[0].is_my_account = True
tx.outputs[1].is_my_account = True
self.assertEqual(tx.net_account_balance, -10*CENT) # lost to fee
class TestTransactionSerialization(unittest.TestCase):
def test_genesis_transaction(self):
@ -254,3 +298,125 @@ class TestTransactionSigning(AsyncioTestCase):
b'304402200dafa26ad7cf38c5a971c8a25ce7d85a076235f146126762296b1223c42ae21e022020ef9eeb8'
b'398327891008c5c0be4357683f12cb22346691ff23914f457bf679601'
)
class TransactionIOBalancing(AsyncioTestCase):
async def asyncSetUp(self):
self.ledger = ledger_class({
'db': ledger_class.database_class(':memory:'),
'headers': ledger_class.headers_class(':memory:'),
})
await self.ledger.db.open()
self.account = self.ledger.account_class.from_dict(
self.ledger, Wallet(), {
"seed": "carbon smart garage balance margin twelve chest sword "
"toast envelope bottom stomach absent"
}
)
addresses = await self.account.ensure_address_gap()
self.pubkey_hash = [self.ledger.address_to_hash160(a) for a in addresses]
self.hash_cycler = cycle(self.pubkey_hash)
async def asyncTearDown(self):
await self.ledger.db.close()
def txo(self, amount, address=None):
return get_output(int(amount*COIN), address or next(self.hash_cycler))
def txi(self, txo):
return ledger_class.transaction_class.input_class.spend(txo)
def tx(self, inputs, outputs):
return ledger_class.transaction_class.create(inputs, outputs, [self.account], self.account)
async def create_utxos(self, amounts):
utxos = [self.txo(amount) for amount in amounts]
self.funding_tx = ledger_class.transaction_class(is_verified=True) \
.add_inputs([self.txi(self.txo(sum(amounts)+0.1))]) \
.add_outputs(utxos)
await self.ledger.db.insert_transaction(self.funding_tx)
for utxo in utxos:
await self.ledger.db.save_transaction_io(
self.funding_tx,
self.ledger.hash160_to_address(utxo.script.values['pubkey_hash']),
utxo.script.values['pubkey_hash'], ''
)
return utxos
@staticmethod
def inputs(tx):
return [round(i.amount/COIN, 2) for i in tx.inputs]
@staticmethod
def outputs(tx):
return [round(o.amount/COIN, 2) for o in tx.outputs]
async def test_basic_use_cases(self):
self.ledger.fee_per_byte = int(.01*CENT)
# available UTXOs for filling missing inputs
utxos = await self.create_utxos([
1, 1, 3, 5, 10
])
# pay 3 coins (3.02 w/ fees)
tx = await self.tx(
[], # inputs
[self.txo(3)] # outputs
)
# best UTXO match is 5 (as UTXO 3 will be short 0.02 to cover fees)
self.assertListEqual(self.inputs(tx), [5])
# a change of 1.98 is added to reach balance
self.assertListEqual(self.outputs(tx), [3, 1.98])
await self.ledger.release_outputs(utxos)
# pay 2.98 coins (3.00 w/ fees)
tx = await self.tx(
[], # inputs
[self.txo(2.98)] # outputs
)
# best UTXO match is 3 and no change is needed
self.assertListEqual(self.inputs(tx), [3])
self.assertListEqual(self.outputs(tx), [2.98])
await self.ledger.release_outputs(utxos)
# supplied input and output, but input is not enough to cover output
tx = await self.tx(
[self.txi(self.txo(10))], # inputs
[self.txo(11)] # outputs
)
# additional input is chosen (UTXO 3)
self.assertListEqual([10, 3], self.inputs(tx))
# change is now needed to consume extra input
self.assertListEqual([11, 1.96], self.outputs(tx))
await self.ledger.release_outputs(utxos)
# liquidating a UTXO
tx = await self.tx(
[self.txi(self.txo(10))], # inputs
[] # outputs
)
self.assertListEqual([10], self.inputs(tx))
# missing change added to consume the amount
self.assertListEqual([9.98], self.outputs(tx))
await self.ledger.release_outputs(utxos)
# liquidating at a loss, requires adding extra inputs
tx = await self.tx(
[self.txi(self.txo(0.01))], # inputs
[] # outputs
)
# UTXO 1 is added to cover some of the fee
self.assertListEqual([0.01, 1], self.inputs(tx))
# change is now needed to consume extra input
self.assertListEqual([0.97], self.outputs(tx))

20
torba/.gitignore vendored
View file

@ -1,20 +0,0 @@
# packaging
torba.egg-info/
dist/
# PyCharm
.idea/
# testing
.tox/
torba/bin
# cache and logs
__pycache__/
.mypy_cache/
_trial_temp/
_trial_temp-*/
# OS X DS_Store
*.DS_Store

View file

View file

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2018 LBRY Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,3 +0,0 @@
# <img src="https://raw.githubusercontent.com/lbryio/torba/master/torba.png" alt="Torba" width="42" height="30" /> Torba [![Build Status](https://travis-ci.org/lbryio/torba.svg?branch=master)](https://travis-ci.org/lbryio/torba)
A new wallet library to help bitcoin based projects build fast, correct and scalable crypto currency wallets in Python.

View file

@ -1,68 +0,0 @@
import os
import sys
from setuptools import setup, find_packages
import torba
BASE = os.path.dirname(__file__)
with open(os.path.join(BASE, 'README.md'), encoding='utf-8') as fh:
long_description = fh.read()
REQUIRES = [
'aiohttp==3.5.4',
'cffi==1.12.1', # TODO: 1.12.2 fails on travis in wine
'coincurve==11.0.0',
'pbkdf2==1.3',
'cryptography==2.5',
'attrs==18.2.0',
'pylru==1.1.0'
]
if sys.platform.startswith('linux'):
REQUIRES.append('plyvel==1.0.5')
setup(
name='torba',
version=torba.__version__,
url='https://github.com/lbryio/torba',
license='MIT',
author='LBRY Inc.',
author_email='hello@lbry.io',
description='Wallet client/server framework for bitcoin based currencies.',
long_description=long_description,
long_description_content_type="text/markdown",
keywords='wallet,crypto,currency,money,bitcoin,electrum,electrumx',
classifiers=[
'Framework :: AsyncIO',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Operating System :: OS Independent',
'Topic :: Internet',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Benchmark',
'Topic :: System :: Distributed Computing',
'Topic :: Utilities',
],
packages=find_packages(exclude=('tests',)),
python_requires='>=3.7',
install_requires=REQUIRES,
extras_require={
'gui': (
'pyside2',
)
},
entry_points={
'console_scripts': [
'torba-client=torba.client.cli:main',
'torba-server=torba.server.cli:main',
'orchstr8=torba.orchstr8.cli:main',
],
'gui_scripts': [
'torba=torba.ui:main [gui]',
'torba-workbench=torba.workbench:main [gui]',
]
}
)

View file

@ -1,493 +0,0 @@
from binascii import hexlify
from torba.testcase import AsyncioTestCase
from torba.coin.bitcoinsegwit import MainNetLedger as ledger_class
from torba.client.baseaccount import HierarchicalDeterministic, SingleKey
from torba.client.wallet import Wallet
class TestHierarchicalDeterministicAccount(AsyncioTestCase):
async def asyncSetUp(self):
self.ledger = ledger_class({
'db': ledger_class.database_class(':memory:'),
'headers': ledger_class.headers_class(':memory:'),
})
await self.ledger.db.open()
self.account = self.ledger.account_class.generate(self.ledger, Wallet(), "torba")
async def asyncTearDown(self):
await self.ledger.db.close()
async def test_generate_account(self):
account = self.account
self.assertEqual(account.ledger, self.ledger)
self.assertIsNotNone(account.seed)
self.assertEqual(account.public_key.ledger, self.ledger)
self.assertEqual(account.private_key.public_key, account.public_key)
addresses = await account.receiving.get_addresses()
self.assertEqual(len(addresses), 0)
addresses = await account.change.get_addresses()
self.assertEqual(len(addresses), 0)
await account.ensure_address_gap()
addresses = await account.receiving.get_addresses()
self.assertEqual(len(addresses), 20)
addresses = await account.change.get_addresses()
self.assertEqual(len(addresses), 6)
addresses = await account.get_addresses()
self.assertEqual(len(addresses), 26)
async def test_generate_keys_over_batch_threshold_saves_it_properly(self):
async with self.account.receiving.address_generator_lock:
await self.account.receiving._generate_keys(0, 200)
records = await self.account.receiving.get_address_records()
self.assertEqual(len(records), 201)
async def test_ensure_address_gap(self):
account = self.account
self.assertIsInstance(account.receiving, HierarchicalDeterministic)
async with account.receiving.address_generator_lock:
await account.receiving._generate_keys(4, 7)
await account.receiving._generate_keys(0, 3)
await account.receiving._generate_keys(8, 11)
records = await account.receiving.get_address_records()
self.assertListEqual(
[r['pubkey'].n for r in records],
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
)
# we have 12, but default gap is 20
new_keys = await account.receiving.ensure_address_gap()
self.assertEqual(len(new_keys), 8)
records = await account.receiving.get_address_records()
self.assertListEqual(
[r['pubkey'].n for r in records],
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
)
# case #1: no new addresses needed
empty = await account.receiving.ensure_address_gap()
self.assertEqual(len(empty), 0)
# case #2: only one new addressed needed
records = await account.receiving.get_address_records()
await self.ledger.db.set_address_history(records[0]['address'], 'a:1:')
new_keys = await account.receiving.ensure_address_gap()
self.assertEqual(len(new_keys), 1)
# case #3: 20 addresses needed
await self.ledger.db.set_address_history(new_keys[0], 'a:1:')
new_keys = await account.receiving.ensure_address_gap()
self.assertEqual(len(new_keys), 20)
async def test_get_or_create_usable_address(self):
account = self.account
keys = await account.receiving.get_addresses()
self.assertEqual(len(keys), 0)
address = await account.receiving.get_or_create_usable_address()
self.assertIsNotNone(address)
keys = await account.receiving.get_addresses()
self.assertEqual(len(keys), 20)
async def test_generate_account_from_seed(self):
account = self.ledger.account_class.from_dict(
self.ledger, Wallet(), {
"seed": "carbon smart garage balance margin twelve chest sword "
"toast envelope bottom stomach absent",
"address_generator": {
'name': 'deterministic-chain',
'receiving': {'gap': 3, 'maximum_uses_per_address': 1},
'change': {'gap': 2, 'maximum_uses_per_address': 1}
}
}
)
self.assertEqual(
account.private_key.extended_key_string(),
'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp5BxK'
'Kfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna'
)
self.assertEqual(
account.public_key.extended_key_string(),
'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7UbpV'
'NzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g'
)
address = await account.receiving.ensure_address_gap()
self.assertEqual(address[0], '1CDLuMfwmPqJiNk5C2Bvew6tpgjAGgUk8J')
private_key = await self.ledger.get_private_key_for_address(
account.wallet, '1CDLuMfwmPqJiNk5C2Bvew6tpgjAGgUk8J'
)
self.assertEqual(
private_key.extended_key_string(),
'xprv9xV7rhbg6M4yWrdTeLorz3Q1GrQb4aQzzGWboP3du7W7UUztzNTUrEYTnDfz7o'
'ptBygDxXYRppyiuenJpoBTgYP2C26E1Ah5FEALM24CsWi'
)
invalid_key = await self.ledger.get_private_key_for_address(
account.wallet, 'BcQjRlhDOIrQez1WHfz3whnB33Bp34sUgX'
)
self.assertIsNone(invalid_key)
self.assertEqual(
hexlify(private_key.wif()),
b'1c01ae1e4c7d89e39f6d3aa7792c097a30ca7d40be249b6de52c81ec8cf9aab48b01'
)
async def test_load_and_save_account(self):
account_data = {
'name': 'My Account',
'modified_on': 123.456,
'seed':
"carbon smart garage balance margin twelve chest sword toast envelope bottom stomac"
"h absent",
'encrypted': False,
'private_key':
'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp'
'5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna',
'public_key':
'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7'
'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g',
'address_generator': {
'name': 'deterministic-chain',
'receiving': {'gap': 5, 'maximum_uses_per_address': 2},
'change': {'gap': 5, 'maximum_uses_per_address': 2}
}
}
account = self.ledger.account_class.from_dict(self.ledger, Wallet(), account_data)
await account.ensure_address_gap()
addresses = await account.receiving.get_addresses()
self.assertEqual(len(addresses), 5)
addresses = await account.change.get_addresses()
self.assertEqual(len(addresses), 5)
self.maxDiff = None
account_data['ledger'] = 'btc_mainnet'
self.assertDictEqual(account_data, account.to_dict())
def test_merge_diff(self):
account_data = {
'name': 'My Account',
'modified_on': 123.456,
'seed':
"carbon smart garage balance margin twelve chest sword toast envelope bottom stomac"
"h absent",
'encrypted': False,
'private_key':
'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp'
'5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna',
'public_key':
'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7'
'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g',
'address_generator': {
'name': 'deterministic-chain',
'receiving': {'gap': 5, 'maximum_uses_per_address': 2},
'change': {'gap': 5, 'maximum_uses_per_address': 2}
}
}
account = self.ledger.account_class.from_dict(self.ledger, Wallet(), account_data)
self.assertEqual(account.name, 'My Account')
self.assertEqual(account.modified_on, 123.456)
self.assertEqual(account.change.gap, 5)
self.assertEqual(account.change.maximum_uses_per_address, 2)
self.assertEqual(account.receiving.gap, 5)
self.assertEqual(account.receiving.maximum_uses_per_address, 2)
account_data['name'] = 'Changed Name'
account_data['address_generator']['change']['gap'] = 6
account_data['address_generator']['change']['maximum_uses_per_address'] = 7
account_data['address_generator']['receiving']['gap'] = 8
account_data['address_generator']['receiving']['maximum_uses_per_address'] = 9
account.merge(account_data)
# no change because modified_on is not newer
self.assertEqual(account.name, 'My Account')
account_data['modified_on'] = 200.00
account.merge(account_data)
self.assertEqual(account.name, 'Changed Name')
self.assertEqual(account.change.gap, 6)
self.assertEqual(account.change.maximum_uses_per_address, 7)
self.assertEqual(account.receiving.gap, 8)
self.assertEqual(account.receiving.maximum_uses_per_address, 9)
class TestSingleKeyAccount(AsyncioTestCase):
async def asyncSetUp(self):
self.ledger = ledger_class({
'db': ledger_class.database_class(':memory:'),
'headers': ledger_class.headers_class(':memory:'),
})
await self.ledger.db.open()
self.account = self.ledger.account_class.generate(
self.ledger, Wallet(), "torba", {'name': 'single-address'})
async def asyncTearDown(self):
await self.ledger.db.close()
async def test_generate_account(self):
account = self.account
self.assertEqual(account.ledger, self.ledger)
self.assertIsNotNone(account.seed)
self.assertEqual(account.public_key.ledger, self.ledger)
self.assertEqual(account.private_key.public_key, account.public_key)
addresses = await account.receiving.get_addresses()
self.assertEqual(len(addresses), 0)
addresses = await account.change.get_addresses()
self.assertEqual(len(addresses), 0)
await account.ensure_address_gap()
addresses = await account.receiving.get_addresses()
self.assertEqual(len(addresses), 1)
self.assertEqual(addresses[0], account.public_key.address)
addresses = await account.change.get_addresses()
self.assertEqual(len(addresses), 1)
self.assertEqual(addresses[0], account.public_key.address)
addresses = await account.get_addresses()
self.assertEqual(len(addresses), 1)
self.assertEqual(addresses[0], account.public_key.address)
async def test_ensure_address_gap(self):
account = self.account
self.assertIsInstance(account.receiving, SingleKey)
addresses = await account.receiving.get_addresses()
self.assertListEqual(addresses, [])
# we have 12, but default gap is 20
new_keys = await account.receiving.ensure_address_gap()
self.assertEqual(len(new_keys), 1)
self.assertEqual(new_keys[0], account.public_key.address)
records = await account.receiving.get_address_records()
pubkey = records[0].pop('pubkey')
self.assertListEqual(records, [{
'chain': 0,
'account': account.public_key.address,
'address': account.public_key.address,
'history': None,
'used_times': 0
}])
self.assertEqual(
pubkey.extended_key_string(),
account.public_key.extended_key_string()
)
# case #1: no new addresses needed
empty = await account.receiving.ensure_address_gap()
self.assertEqual(len(empty), 0)
# case #2: after use, still no new address needed
records = await account.receiving.get_address_records()
await self.ledger.db.set_address_history(records[0]['address'], 'a:1:')
empty = await account.receiving.ensure_address_gap()
self.assertEqual(len(empty), 0)
async def test_get_or_create_usable_address(self):
account = self.account
addresses = await account.receiving.get_addresses()
self.assertEqual(len(addresses), 0)
address1 = await account.receiving.get_or_create_usable_address()
self.assertIsNotNone(address1)
await self.ledger.db.set_address_history(address1, 'a:1:b:2:c:3:')
records = await account.receiving.get_address_records()
self.assertEqual(records[0]['used_times'], 3)
address2 = await account.receiving.get_or_create_usable_address()
self.assertEqual(address1, address2)
keys = await account.receiving.get_addresses()
self.assertEqual(len(keys), 1)
async def test_generate_account_from_seed(self):
account = self.ledger.account_class.from_dict(
self.ledger, Wallet(), {
"seed":
"carbon smart garage balance margin twelve chest sword toas"
"t envelope bottom stomach absent",
'address_generator': {'name': 'single-address'}
}
)
self.assertEqual(
account.private_key.extended_key_string(),
'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp'
'5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna',
)
self.assertEqual(
account.public_key.extended_key_string(),
'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7'
'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g',
)
address = await account.receiving.ensure_address_gap()
self.assertEqual(address[0], account.public_key.address)
private_key = await self.ledger.get_private_key_for_address(
account.wallet, address[0]
)
self.assertEqual(
private_key.extended_key_string(),
'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp'
'5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna',
)
invalid_key = await self.ledger.get_private_key_for_address(
account.wallet, 'BcQjRlhDOIrQez1WHfz3whnB33Bp34sUgX'
)
self.assertIsNone(invalid_key)
self.assertEqual(
hexlify(private_key.wif()),
b'1c92caa0ef99bfd5e2ceb73b66da8cd726a9370be8c368d448a322f3c5b23aaab901'
)
async def test_load_and_save_account(self):
account_data = {
'name': 'My Account',
'modified_on': 123.456,
'seed':
"carbon smart garage balance margin twelve chest sword toast envelope bottom stomac"
"h absent",
'encrypted': False,
'private_key':
'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp'
'5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna',
'public_key':
'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7'
'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g',
'address_generator': {'name': 'single-address'}
}
account = self.ledger.account_class.from_dict(self.ledger, Wallet(), account_data)
await account.ensure_address_gap()
addresses = await account.receiving.get_addresses()
self.assertEqual(len(addresses), 1)
addresses = await account.change.get_addresses()
self.assertEqual(len(addresses), 1)
self.maxDiff = None
account_data['ledger'] = 'btc_mainnet'
self.assertDictEqual(account_data, account.to_dict())
class AccountEncryptionTests(AsyncioTestCase):
password = "password"
init_vector = b'0000000000000000'
unencrypted_account = {
'name': 'My Account',
'seed':
"carbon smart garage balance margin twelve chest sword toast envelope bottom stomac"
"h absent",
'encrypted': False,
'private_key':
'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp'
'5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna',
'public_key':
'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7'
'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g',
'address_generator': {'name': 'single-address'}
}
encrypted_account = {
'name': 'My Account',
'seed':
"MDAwMDAwMDAwMDAwMDAwMJ4e4W4pE6nQtPiD6MujNIQ7aFPhUBl63GwPziAgGN"
"MBTMoaSjZfyyvw7ELMCqAYTWJ61aV7K4lmd2hR11g9dpdnnpCb9f9j3zLZHRv7+"
"bIkZ//trah9AIkmrc/ZvNkC0Q==",
'encrypted': True,
'private_key':
'MDAwMDAwMDAwMDAwMDAwMLkWikOLScA/ZxlFSGU7dl//7Q/1gS9h7vqQyrd8DX+'
'jwcp7SwlJ1mkMwuraUaWLq9/LxiaGmqJBUZ50p77YVZbDycaCN1unBr1/i1q6RP'
'Ob2MNCaG8nyjxZhQai+V/2JmJ+UnFMp3nHany7F8/Hr0g=',
'public_key':
'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7'
'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g',
'address_generator': {'name': 'single-address'}
}
async def asyncSetUp(self):
self.ledger = ledger_class({
'db': ledger_class.database_class(':memory:'),
'headers': ledger_class.headers_class(':memory:'),
})
def test_encrypt_wallet(self):
account = self.ledger.account_class.from_dict(self.ledger, Wallet(), self.unencrypted_account)
account.init_vectors = {
'seed': self.init_vector,
'private_key': self.init_vector
}
self.assertFalse(account.encrypted)
self.assertIsNotNone(account.private_key)
account.encrypt(self.password)
self.assertTrue(account.encrypted)
self.assertEqual(account.seed, self.encrypted_account['seed'])
self.assertEqual(account.private_key_string, self.encrypted_account['private_key'])
self.assertIsNone(account.private_key)
self.assertEqual(account.to_dict()['seed'], self.encrypted_account['seed'])
self.assertEqual(account.to_dict()['private_key'], self.encrypted_account['private_key'])
account.decrypt(self.password)
self.assertEqual(account.init_vectors['private_key'], self.init_vector)
self.assertEqual(account.init_vectors['seed'], self.init_vector)
self.assertEqual(account.seed, self.unencrypted_account['seed'])
self.assertEqual(account.private_key.extended_key_string(), self.unencrypted_account['private_key'])
self.assertEqual(account.to_dict(encrypt_password=self.password)['seed'], self.encrypted_account['seed'])
self.assertEqual(account.to_dict(encrypt_password=self.password)['private_key'], self.encrypted_account['private_key'])
self.assertFalse(account.encrypted)
def test_decrypt_wallet(self):
account = self.ledger.account_class.from_dict(self.ledger, Wallet(), self.encrypted_account)
self.assertTrue(account.encrypted)
account.decrypt(self.password)
self.assertEqual(account.init_vectors['private_key'], self.init_vector)
self.assertEqual(account.init_vectors['seed'], self.init_vector)
self.assertFalse(account.encrypted)
self.assertEqual(account.seed, self.unencrypted_account['seed'])
self.assertEqual(account.private_key.extended_key_string(), self.unencrypted_account['private_key'])
self.assertEqual(account.to_dict(encrypt_password=self.password)['seed'], self.encrypted_account['seed'])
self.assertEqual(account.to_dict(encrypt_password=self.password)['private_key'], self.encrypted_account['private_key'])
self.assertEqual(account.to_dict()['seed'], self.unencrypted_account['seed'])
self.assertEqual(account.to_dict()['private_key'], self.unencrypted_account['private_key'])
def test_encrypt_decrypt_read_only_account(self):
account_data = self.unencrypted_account.copy()
del account_data['seed']
del account_data['private_key']
account = self.ledger.account_class.from_dict(self.ledger, Wallet(), account_data)
encrypted = account.to_dict('password')
self.assertFalse(encrypted['seed'])
self.assertFalse(encrypted['private_key'])
account.encrypt('password')
account.decrypt('password')

View file

@ -1,160 +0,0 @@
import asyncio
import os
import tempfile
from binascii import hexlify
from torba.client.hash import sha256
from torba.testcase import AsyncioTestCase
from torba.coin.bitcoinsegwit import MainHeaders
def block_bytes(blocks):
return blocks * MainHeaders.header_size
class BitcoinHeadersTestCase(AsyncioTestCase):
HEADER_FILE = 'bitcoin_headers'
RETARGET_BLOCK = 32256 # difficulty: 1 -> 1.18
def setUp(self):
self.maxDiff = None
self.header_file_name = os.path.join(os.path.dirname(__file__), self.HEADER_FILE)
def get_bytes(self, upto: int = -1, after: int = 0) -> bytes:
with open(self.header_file_name, 'rb') as headers:
headers.seek(after, os.SEEK_SET)
return headers.read(upto)
async def get_headers(self, upto: int = -1):
h = MainHeaders(':memory:')
h.io.write(self.get_bytes(upto))
return h
class BasicHeadersTests(BitcoinHeadersTestCase):
async def test_serialization(self):
h = await self.get_headers()
self.assertDictEqual(h[0], {
'bits': 486604799,
'block_height': 0,
'merkle_root': b'4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b',
'nonce': 2083236893,
'prev_block_hash': b'0000000000000000000000000000000000000000000000000000000000000000',
'timestamp': 1231006505,
'version': 1
})
self.assertDictEqual(h[self.RETARGET_BLOCK-1], {
'bits': 486604799,
'block_height': 32255,
'merkle_root': b'89b4f223789e40b5b475af6483bb05bceda54059e17d2053334b358f6bb310ac',
'nonce': 312762301,
'prev_block_hash': b'000000006baebaa74cecde6c6787c26ee0a616a3c333261bff36653babdac149',
'timestamp': 1262152739,
'version': 1
})
self.assertDictEqual(h[self.RETARGET_BLOCK], {
'bits': 486594666,
'block_height': 32256,
'merkle_root': b'64b5e5f5a262f47af443a0120609206a3305877693edfe03e994f20a024ab627',
'nonce': 121087187,
'prev_block_hash': b'00000000984f962134a7291e3693075ae03e521f0ee33378ec30a334d860034b',
'timestamp': 1262153464,
'version': 1
})
self.assertDictEqual(h[self.RETARGET_BLOCK+1], {
'bits': 486594666,
'block_height': 32257,
'merkle_root': b'4d1488981f08b3037878193297dbac701a2054e0f803d4424fe6a4d763d62334',
'nonce': 274675219,
'prev_block_hash': b'000000004f2886a170adb7204cb0c7a824217dd24d11a74423d564c4e0904967',
'timestamp': 1262154352,
'version': 1
})
self.assertEqual(
h.serialize(h[0]),
h.get_raw_header(0)
)
self.assertEqual(
h.serialize(h[self.RETARGET_BLOCK]),
h.get_raw_header(self.RETARGET_BLOCK)
)
async def test_connect_from_genesis_to_3000_past_first_chunk_at_2016(self):
headers = MainHeaders(':memory:')
self.assertEqual(headers.height, -1)
await headers.connect(0, self.get_bytes(block_bytes(3001)))
self.assertEqual(headers.height, 3000)
async def test_connect_9_blocks_passing_a_retarget_at_32256(self):
retarget = block_bytes(self.RETARGET_BLOCK-5)
headers = await self.get_headers(upto=retarget)
remainder = self.get_bytes(after=retarget)
self.assertEqual(headers.height, 32250)
await headers.connect(len(headers), remainder)
self.assertEqual(headers.height, 32259)
async def test_bounds(self):
headers = MainHeaders(':memory:')
await headers.connect(0, self.get_bytes(block_bytes(3001)))
self.assertEqual(headers.height, 3000)
with self.assertRaises(IndexError):
_ = headers[3001]
with self.assertRaises(IndexError):
_ = headers[-1]
self.assertIsNotNone(headers[3000])
self.assertIsNotNone(headers[0])
async def test_repair(self):
headers = MainHeaders(':memory:')
await headers.connect(0, self.get_bytes(block_bytes(3001)))
self.assertEqual(headers.height, 3000)
await headers.repair()
self.assertEqual(headers.height, 3000)
# corrupt the middle of it
headers.io.seek(block_bytes(1500))
headers.io.write(b"wtf")
await headers.repair()
self.assertEqual(headers.height, 1499)
self.assertEqual(len(headers), 1500)
# corrupt by appending
headers.io.seek(block_bytes(len(headers)))
headers.io.write(b"appending")
await headers.repair()
self.assertEqual(headers.height, 1499)
await headers.connect(len(headers), self.get_bytes(block_bytes(3001 - 1500), after=block_bytes(1500)))
self.assertEqual(headers.height, 3000)
async def test_checkpointed_writer(self):
headers = MainHeaders(':memory:')
headers.checkpoint = 100, hexlify(sha256(self.get_bytes(block_bytes(100))))
genblocks = lambda start, end: self.get_bytes(block_bytes(end - start), block_bytes(start))
async with headers.checkpointed_connector() as buff:
buff.write(genblocks(0, 10))
self.assertEqual(len(headers), 10)
async with headers.checkpointed_connector() as buff:
buff.write(genblocks(10, 100))
self.assertEqual(len(headers), 100)
headers = MainHeaders(':memory:')
async with headers.checkpointed_connector() as buff:
buff.write(genblocks(0, 300))
self.assertEqual(len(headers), 300)
async def test_concurrency(self):
BLOCKS = 30
headers_temporary_file = tempfile.mktemp()
headers = MainHeaders(headers_temporary_file)
await headers.open()
self.addCleanup(os.remove, headers_temporary_file)
async def writer():
for block_index in range(BLOCKS):
await headers.connect(block_index, self.get_bytes(block_bytes(block_index + 1), block_bytes(block_index)))
async def reader():
for block_index in range(BLOCKS):
while len(headers) < block_index:
await asyncio.sleep(0.000001)
assert headers[block_index]['block_height'] == block_index
reader_task = asyncio.create_task(reader())
await writer()
await reader_task

View file

@ -1,170 +0,0 @@
import os
from binascii import hexlify
from torba.coin.bitcoinsegwit import MainNetLedger
from torba.client.wallet import Wallet
from client_tests.unit.test_transaction import get_transaction, get_output
from client_tests.unit.test_headers import BitcoinHeadersTestCase, block_bytes
class MockNetwork:
def __init__(self, history, transaction):
self.history = history
self.transaction = transaction
self.address = None
self.get_history_called = []
self.get_transaction_called = []
self.is_connected = False
def retriable_call(self, function, *args, **kwargs):
return function(*args, **kwargs)
async def get_history(self, address):
self.get_history_called.append(address)
self.address = address
return self.history
async def get_merkle(self, txid, height):
return {'merkle': ['abcd01'], 'pos': 1}
async def get_transaction(self, tx_hash, _=None):
self.get_transaction_called.append(tx_hash)
return self.transaction[tx_hash]
class LedgerTestCase(BitcoinHeadersTestCase):
async def asyncSetUp(self):
self.ledger = MainNetLedger({
'db': MainNetLedger.database_class(':memory:'),
'headers': MainNetLedger.headers_class(':memory:')
})
await self.ledger.db.open()
async def asyncTearDown(self):
await self.ledger.db.close()
def make_header(self, **kwargs):
header = {
'bits': 486604799,
'block_height': 0,
'merkle_root': b'4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b',
'nonce': 2083236893,
'prev_block_hash': b'0000000000000000000000000000000000000000000000000000000000000000',
'timestamp': 1231006505,
'version': 1
}
header.update(kwargs)
header['merkle_root'] = header['merkle_root'].ljust(64, b'a')
header['prev_block_hash'] = header['prev_block_hash'].ljust(64, b'0')
return self.ledger.headers.serialize(header)
def add_header(self, **kwargs):
serialized = self.make_header(**kwargs)
self.ledger.headers.io.seek(0, os.SEEK_END)
self.ledger.headers.io.write(serialized)
self.ledger.headers._size = None
class TestSynchronization(LedgerTestCase):
async def test_update_history(self):
account = self.ledger.account_class.generate(self.ledger, Wallet(), "torba")
address = await account.receiving.get_or_create_usable_address()
address_details = await self.ledger.db.get_address(address=address)
self.assertIsNone(address_details['history'])
self.add_header(block_height=0, merkle_root=b'abcd04')
self.add_header(block_height=1, merkle_root=b'abcd04')
self.add_header(block_height=2, merkle_root=b'abcd04')
self.add_header(block_height=3, merkle_root=b'abcd04')
self.ledger.network = MockNetwork([
{'tx_hash': 'abcd01', 'height': 0},
{'tx_hash': 'abcd02', 'height': 1},
{'tx_hash': 'abcd03', 'height': 2},
], {
'abcd01': hexlify(get_transaction(get_output(1)).raw),
'abcd02': hexlify(get_transaction(get_output(2)).raw),
'abcd03': hexlify(get_transaction(get_output(3)).raw),
})
await self.ledger.update_history(address, '')
self.assertListEqual(self.ledger.network.get_history_called, [address])
self.assertListEqual(self.ledger.network.get_transaction_called, ['abcd01', 'abcd02', 'abcd03'])
address_details = await self.ledger.db.get_address(address=address)
self.assertEqual(
address_details['history'],
'252bda9b22cc902ca2aa2de3548ee8baf06b8501ff7bfb3b0b7d980dbd1bf792:0:'
'ab9c0654dd484ac20437030f2034e25dcb29fc507e84b91138f80adc3af738f9:1:'
'a2ae3d1db3c727e7d696122cab39ee20a7f81856dab7019056dd539f38c548a0:2:'
)
self.ledger.network.get_history_called = []
self.ledger.network.get_transaction_called = []
await self.ledger.update_history(address, '')
self.assertListEqual(self.ledger.network.get_history_called, [address])
self.assertListEqual(self.ledger.network.get_transaction_called, [])
self.ledger.network.history.append({'tx_hash': 'abcd04', 'height': 3})
self.ledger.network.transaction['abcd04'] = hexlify(get_transaction(get_output(4)).raw)
self.ledger.network.get_history_called = []
self.ledger.network.get_transaction_called = []
await self.ledger.update_history(address, '')
self.assertListEqual(self.ledger.network.get_history_called, [address])
self.assertListEqual(self.ledger.network.get_transaction_called, ['abcd04'])
address_details = await self.ledger.db.get_address(address=address)
self.assertEqual(
address_details['history'],
'252bda9b22cc902ca2aa2de3548ee8baf06b8501ff7bfb3b0b7d980dbd1bf792:0:'
'ab9c0654dd484ac20437030f2034e25dcb29fc507e84b91138f80adc3af738f9:1:'
'a2ae3d1db3c727e7d696122cab39ee20a7f81856dab7019056dd539f38c548a0:2:'
'047cf1d53ef68f0fd586d46f90c09ff8e57a4180f67e7f4b8dd0135c3741e828:3:'
)
class MocHeaderNetwork(MockNetwork):
def __init__(self, responses):
super().__init__(None, None)
self.responses = responses
async def get_headers(self, height, blocks):
return self.responses[height]
class BlockchainReorganizationTests(LedgerTestCase):
async def test_1_block_reorganization(self):
self.ledger.network = MocHeaderNetwork({
20: {'height': 20, 'count': 5, 'hex': hexlify(
self.get_bytes(after=block_bytes(20), upto=block_bytes(5))
)},
25: {'height': 25, 'count': 0, 'hex': b''}
})
headers = self.ledger.headers
await headers.connect(0, self.get_bytes(upto=block_bytes(20)))
self.add_header(block_height=len(headers))
self.assertEqual(headers.height, 20)
await self.ledger.receive_header([{
'height': 21, 'hex': hexlify(self.make_header(block_height=21))
}])
async def test_3_block_reorganization(self):
self.ledger.network = MocHeaderNetwork({
20: {'height': 20, 'count': 5, 'hex': hexlify(
self.get_bytes(after=block_bytes(20), upto=block_bytes(5))
)},
21: {'height': 21, 'count': 1, 'hex': hexlify(self.make_header(block_height=21))},
22: {'height': 22, 'count': 1, 'hex': hexlify(self.make_header(block_height=22))},
25: {'height': 25, 'count': 0, 'hex': b''}
})
headers = self.ledger.headers
await headers.connect(0, self.get_bytes(upto=block_bytes(20)))
self.add_header(block_height=len(headers))
self.add_header(block_height=len(headers))
self.add_header(block_height=len(headers))
self.assertEqual(headers.height, 22)
await self.ledger.receive_header(({
'height': 23, 'hex': hexlify(self.make_header(block_height=23))
},))

View file

@ -1,218 +0,0 @@
import unittest
from binascii import hexlify, unhexlify
from torba.client.bcd_data_stream import BCDataStream
from torba.client.basescript import Template, ParseError, tokenize, push_data
from torba.client.basescript import PUSH_SINGLE, PUSH_INTEGER, PUSH_MANY, OP_HASH160, OP_EQUAL
from torba.client.basescript import BaseInputScript, BaseOutputScript
def parse(opcodes, source):
template = Template('test', opcodes)
s = BCDataStream()
for t in source:
if isinstance(t, bytes):
s.write_many(push_data(t))
elif isinstance(t, int):
s.write_uint8(t)
else:
raise ValueError()
s.reset()
return template.parse(tokenize(s))
class TestScriptTemplates(unittest.TestCase):
def test_push_data(self):
self.assertDictEqual(parse(
(PUSH_SINGLE('script_hash'),),
(b'abcdef',)
), {
'script_hash': b'abcdef'
}
)
self.assertDictEqual(parse(
(PUSH_SINGLE('first'), PUSH_INTEGER('rating')),
(b'Satoshi', (1000).to_bytes(2, 'little'))
), {
'first': b'Satoshi',
'rating': 1000,
}
)
self.assertDictEqual(parse(
(OP_HASH160, PUSH_SINGLE('script_hash'), OP_EQUAL),
(OP_HASH160, b'abcdef', OP_EQUAL)
), {
'script_hash': b'abcdef'
}
)
def test_push_data_many(self):
self.assertDictEqual(parse(
(PUSH_MANY('names'),),
(b'amit',)
), {
'names': [b'amit']
}
)
self.assertDictEqual(parse(
(PUSH_MANY('names'),),
(b'jeremy', b'amit', b'victor')
), {
'names': [b'jeremy', b'amit', b'victor']
}
)
self.assertDictEqual(parse(
(OP_HASH160, PUSH_MANY('names'), OP_EQUAL),
(OP_HASH160, b'grin', b'jack', OP_EQUAL)
), {
'names': [b'grin', b'jack']
}
)
def test_push_data_mixed(self):
self.assertDictEqual(parse(
(PUSH_SINGLE('CEO'), PUSH_MANY('Devs'), PUSH_SINGLE('CTO'), PUSH_SINGLE('State')),
(b'jeremy', b'lex', b'amit', b'victor', b'jack', b'grin', b'NH')
), {
'CEO': b'jeremy',
'CTO': b'grin',
'Devs': [b'lex', b'amit', b'victor', b'jack'],
'State': b'NH'
}
)
def test_push_data_many_separated(self):
self.assertDictEqual(parse(
(PUSH_MANY('Chiefs'), OP_HASH160, PUSH_MANY('Devs')),
(b'jeremy', b'grin', OP_HASH160, b'lex', b'jack')
), {
'Chiefs': [b'jeremy', b'grin'],
'Devs': [b'lex', b'jack']
}
)
def test_push_data_many_not_separated(self):
with self.assertRaisesRegex(ParseError, 'consecutive PUSH_MANY'):
parse((PUSH_MANY('Chiefs'), PUSH_MANY('Devs')), (b'jeremy', b'grin', b'lex', b'jack'))
class TestRedeemPubKeyHash(unittest.TestCase):
def redeem_pubkey_hash(self, sig, pubkey):
# this checks that factory function correctly sets up the script
src1 = BaseInputScript.redeem_pubkey_hash(unhexlify(sig), unhexlify(pubkey))
self.assertEqual(src1.template.name, 'pubkey_hash')
self.assertEqual(hexlify(src1.values['signature']), sig)
self.assertEqual(hexlify(src1.values['pubkey']), pubkey)
# now we test that it will round trip
src2 = BaseInputScript(src1.source)
self.assertEqual(src2.template.name, 'pubkey_hash')
self.assertEqual(hexlify(src2.values['signature']), sig)
self.assertEqual(hexlify(src2.values['pubkey']), pubkey)
return hexlify(src1.source)
def test_redeem_pubkey_hash_1(self):
self.assertEqual(
self.redeem_pubkey_hash(
b'30450221009dc93f25184a8d483745cd3eceff49727a317c9bfd8be8d3d04517e9cdaf8dd502200e'
b'02dc5939cad9562d2b1f303f185957581c4851c98d497af281118825e18a8301',
b'025415a06514230521bff3aaface31f6db9d9bbc39bf1ca60a189e78731cfd4e1b'
),
b'4830450221009dc93f25184a8d483745cd3eceff49727a317c9bfd8be8d3d04517e9cdaf8dd502200e02d'
b'c5939cad9562d2b1f303f185957581c4851c98d497af281118825e18a830121025415a06514230521bff3'
b'aaface31f6db9d9bbc39bf1ca60a189e78731cfd4e1b'
)
class TestRedeemScriptHash(unittest.TestCase):
def redeem_script_hash(self, sigs, pubkeys):
# this checks that factory function correctly sets up the script
src1 = BaseInputScript.redeem_script_hash(
[unhexlify(sig) for sig in sigs],
[unhexlify(pubkey) for pubkey in pubkeys]
)
subscript1 = src1.values['script']
self.assertEqual(src1.template.name, 'script_hash')
self.assertListEqual([hexlify(v) for v in src1.values['signatures']], sigs)
self.assertListEqual([hexlify(p) for p in subscript1.values['pubkeys']], pubkeys)
self.assertEqual(subscript1.values['signatures_count'], len(sigs))
self.assertEqual(subscript1.values['pubkeys_count'], len(pubkeys))
# now we test that it will round trip
src2 = BaseInputScript(src1.source)
subscript2 = src2.values['script']
self.assertEqual(src2.template.name, 'script_hash')
self.assertListEqual([hexlify(v) for v in src2.values['signatures']], sigs)
self.assertListEqual([hexlify(p) for p in subscript2.values['pubkeys']], pubkeys)
self.assertEqual(subscript2.values['signatures_count'], len(sigs))
self.assertEqual(subscript2.values['pubkeys_count'], len(pubkeys))
return hexlify(src1.source)
def test_redeem_script_hash_1(self):
self.assertEqual(
self.redeem_script_hash([
b'3045022100fec82ed82687874f2a29cbdc8334e114af645c45298e85bb1efe69fcf15c617a0220575'
b'e40399f9ada388d8e522899f4ec3b7256896dd9b02742f6567d960b613f0401',
b'3044022024890462f731bd1a42a4716797bad94761fc4112e359117e591c07b8520ea33b02201ac68'
b'9e35c4648e6beff1d42490207ba14027a638a62663b2ee40153299141eb01',
b'30450221009910823e0142967a73c2d16c1560054d71c0625a385904ba2f1f53e0bc1daa8d02205cd'
b'70a89c6cf031a8b07d1d5eb0d65d108c4d49c2d403f84fb03ad3dc318777a01'
], [
b'0372ba1fd35e5f1b1437cba0c4ebfc4025b7349366f9f9c7c8c4b03a47bd3f68a4',
b'03061d250182b2db1ba144167fd8b0ef3fe0fc3a2fa046958f835ffaf0dfdb7692',
b'02463bfbc1eaec74b5c21c09239ae18dbf6fc07833917df10d0b43e322810cee0c',
b'02fa6a6455c26fb516cfa85ea8de81dd623a893ffd579ee2a00deb6cdf3633d6bb',
b'0382910eae483ce4213d79d107bfc78f3d77e2a31ea597be45256171ad0abeaa89'
]),
b'00483045022100fec82ed82687874f2a29cbdc8334e114af645c45298e85bb1efe69fcf15c617a0220575e'
b'40399f9ada388d8e522899f4ec3b7256896dd9b02742f6567d960b613f0401473044022024890462f731bd'
b'1a42a4716797bad94761fc4112e359117e591c07b8520ea33b02201ac689e35c4648e6beff1d42490207ba'
b'14027a638a62663b2ee40153299141eb014830450221009910823e0142967a73c2d16c1560054d71c0625a'
b'385904ba2f1f53e0bc1daa8d02205cd70a89c6cf031a8b07d1d5eb0d65d108c4d49c2d403f84fb03ad3dc3'
b'18777a014cad53210372ba1fd35e5f1b1437cba0c4ebfc4025b7349366f9f9c7c8c4b03a47bd3f68a42103'
b'061d250182b2db1ba144167fd8b0ef3fe0fc3a2fa046958f835ffaf0dfdb76922102463bfbc1eaec74b5c2'
b'1c09239ae18dbf6fc07833917df10d0b43e322810cee0c2102fa6a6455c26fb516cfa85ea8de81dd623a89'
b'3ffd579ee2a00deb6cdf3633d6bb210382910eae483ce4213d79d107bfc78f3d77e2a31ea597be45256171'
b'ad0abeaa8955ae'
)
class TestPayPubKeyHash(unittest.TestCase):
def pay_pubkey_hash(self, pubkey_hash):
# this checks that factory function correctly sets up the script
src1 = BaseOutputScript.pay_pubkey_hash(unhexlify(pubkey_hash))
self.assertEqual(src1.template.name, 'pay_pubkey_hash')
self.assertEqual(hexlify(src1.values['pubkey_hash']), pubkey_hash)
# now we test that it will round trip
src2 = BaseOutputScript(src1.source)
self.assertEqual(src2.template.name, 'pay_pubkey_hash')
self.assertEqual(hexlify(src2.values['pubkey_hash']), pubkey_hash)
return hexlify(src1.source)
def test_pay_pubkey_hash_1(self):
self.assertEqual(
self.pay_pubkey_hash(b'64d74d12acc93ba1ad495e8d2d0523252d664f4d'),
b'76a91464d74d12acc93ba1ad495e8d2d0523252d664f4d88ac'
)
class TestPayScriptHash(unittest.TestCase):
def pay_script_hash(self, script_hash):
# this checks that factory function correctly sets up the script
src1 = BaseOutputScript.pay_script_hash(unhexlify(script_hash))
self.assertEqual(src1.template.name, 'pay_script_hash')
self.assertEqual(hexlify(src1.values['script_hash']), script_hash)
# now we test that it will round trip
src2 = BaseOutputScript(src1.source)
self.assertEqual(src2.template.name, 'pay_script_hash')
self.assertEqual(hexlify(src2.values['script_hash']), script_hash)
return hexlify(src1.source)
def test_pay_pubkey_hash_1(self):
self.assertEqual(
self.pay_script_hash(b'63d65a2ee8c44426d06050cfd71c0f0ff3fc41ac'),
b'a91463d65a2ee8c44426d06050cfd71c0f0ff3fc41ac87'
)

View file

@ -1,345 +0,0 @@
import unittest
from binascii import hexlify, unhexlify
from itertools import cycle
from torba.testcase import AsyncioTestCase
from torba.coin.bitcoinsegwit import MainNetLedger as ledger_class
from torba.client.wallet import Wallet
from torba.client.constants import CENT, COIN
NULL_HASH = b'\x00'*32
FEE_PER_BYTE = 50
FEE_PER_CHAR = 200000
def get_output(amount=CENT, pubkey_hash=NULL_HASH, height=-2):
return ledger_class.transaction_class(height=height) \
.add_outputs([ledger_class.transaction_class.output_class.pay_pubkey_hash(amount, pubkey_hash)]) \
.outputs[0]
def get_input(amount=CENT, pubkey_hash=NULL_HASH):
return ledger_class.transaction_class.input_class.spend(get_output(amount, pubkey_hash))
def get_transaction(txo=None):
return ledger_class.transaction_class() \
.add_inputs([get_input()]) \
.add_outputs([txo or ledger_class.transaction_class.output_class.pay_pubkey_hash(CENT, NULL_HASH)])
class TestSizeAndFeeEstimation(AsyncioTestCase):
async def asyncSetUp(self):
self.ledger = ledger_class({
'db': ledger_class.database_class(':memory:'),
'headers': ledger_class.headers_class(':memory:'),
})
def test_output_size_and_fee(self):
txo = get_output()
self.assertEqual(txo.size, 46)
self.assertEqual(txo.get_fee(self.ledger), 46 * FEE_PER_BYTE)
def test_input_size_and_fee(self):
txi = get_input()
self.assertEqual(txi.size, 148)
self.assertEqual(txi.get_fee(self.ledger), 148 * FEE_PER_BYTE)
def test_transaction_size_and_fee(self):
tx = get_transaction()
self.assertEqual(tx.size, 204)
self.assertEqual(tx.base_size, tx.size - tx.inputs[0].size - tx.outputs[0].size)
self.assertEqual(tx.get_base_fee(self.ledger), FEE_PER_BYTE * tx.base_size)
class TestAccountBalanceImpactFromTransaction(unittest.TestCase):
def test_is_my_account_not_set(self):
tx = get_transaction()
with self.assertRaisesRegex(ValueError, "Cannot access net_account_balance"):
_ = tx.net_account_balance
tx.inputs[0].txo_ref.txo.is_my_account = True
with self.assertRaisesRegex(ValueError, "Cannot access net_account_balance"):
_ = tx.net_account_balance
tx.outputs[0].is_my_account = True
# all inputs/outputs are set now so it should work
_ = tx.net_account_balance
def test_paying_from_my_account_to_other_account(self):
tx = ledger_class.transaction_class() \
.add_inputs([get_input(300*CENT)]) \
.add_outputs([get_output(190*CENT, NULL_HASH),
get_output(100*CENT, NULL_HASH)])
tx.inputs[0].txo_ref.txo.is_my_account = True
tx.outputs[0].is_my_account = False
tx.outputs[1].is_my_account = True
self.assertEqual(tx.net_account_balance, -200*CENT)
def test_paying_from_other_account_to_my_account(self):
tx = ledger_class.transaction_class() \
.add_inputs([get_input(300*CENT)]) \
.add_outputs([get_output(190*CENT, NULL_HASH),
get_output(100*CENT, NULL_HASH)])
tx.inputs[0].txo_ref.txo.is_my_account = False
tx.outputs[0].is_my_account = True
tx.outputs[1].is_my_account = False
self.assertEqual(tx.net_account_balance, 190*CENT)
def test_paying_from_my_account_to_my_account(self):
tx = ledger_class.transaction_class() \
.add_inputs([get_input(300*CENT)]) \
.add_outputs([get_output(190*CENT, NULL_HASH),
get_output(100*CENT, NULL_HASH)])
tx.inputs[0].txo_ref.txo.is_my_account = True
tx.outputs[0].is_my_account = True
tx.outputs[1].is_my_account = True
self.assertEqual(tx.net_account_balance, -10*CENT) # lost to fee
class TestTransactionSerialization(unittest.TestCase):
def test_genesis_transaction(self):
raw = unhexlify(
'01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4d04'
'ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e20'
'6272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73ffffffff0100f2052a01'
'000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4c'
'ef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000'
)
tx = ledger_class.transaction_class(raw)
self.assertEqual(tx.version, 1)
self.assertEqual(tx.locktime, 0)
self.assertEqual(len(tx.inputs), 1)
self.assertEqual(len(tx.outputs), 1)
coinbase = tx.inputs[0]
self.assertTrue(coinbase.txo_ref.is_null, NULL_HASH)
self.assertEqual(coinbase.txo_ref.position, 0xFFFFFFFF)
self.assertEqual(coinbase.sequence, 4294967295)
self.assertIsNotNone(coinbase.coinbase)
self.assertIsNone(coinbase.script)
self.assertEqual(
coinbase.coinbase[8:],
b'The Times 03/Jan/2009 Chancellor on brink of second bailout for banks'
)
out = tx.outputs[0]
self.assertEqual(out.amount, 5000000000)
self.assertEqual(out.position, 0)
self.assertTrue(out.script.is_pay_pubkey)
self.assertFalse(out.script.is_pay_pubkey_hash)
self.assertFalse(out.script.is_pay_script_hash)
tx._reset()
self.assertEqual(tx.raw, raw)
def test_coinbase_transaction(self):
raw = unhexlify(
'01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4e03'
'1f5a070473319e592f4254432e434f4d2f4e59412ffabe6d6dcceb2a9d0444c51cabc4ee97a1a000036ca0'
'cb48d25b94b78c8367d8b868454b0100000000000000c0309b21000008c5f8f80000ffffffff0291920b5d'
'0000000017a914e083685a1097ce1ea9e91987ab9e94eae33d8a13870000000000000000266a24aa21a9ed'
'e6c99265a6b9e1d36c962fda0516b35709c49dc3b8176fa7e5d5f1f6197884b400000000'
)
tx = ledger_class.transaction_class(raw)
self.assertEqual(tx.version, 1)
self.assertEqual(tx.locktime, 0)
self.assertEqual(len(tx.inputs), 1)
self.assertEqual(len(tx.outputs), 2)
coinbase = tx.inputs[0]
self.assertTrue(coinbase.txo_ref.is_null)
self.assertEqual(coinbase.txo_ref.position, 0xFFFFFFFF)
self.assertEqual(coinbase.sequence, 4294967295)
self.assertIsNotNone(coinbase.coinbase)
self.assertIsNone(coinbase.script)
self.assertEqual(coinbase.coinbase[9:22], b'/BTC.COM/NYA/')
out = tx.outputs[0]
self.assertEqual(out.amount, 1561039505)
self.assertEqual(out.position, 0)
self.assertFalse(out.script.is_pay_pubkey)
self.assertFalse(out.script.is_pay_pubkey_hash)
self.assertTrue(out.script.is_pay_script_hash)
self.assertFalse(out.script.is_return_data)
out1 = tx.outputs[1]
self.assertEqual(out1.amount, 0)
self.assertEqual(out1.position, 1)
self.assertEqual(
hexlify(out1.script.values['data']),
b'aa21a9ede6c99265a6b9e1d36c962fda0516b35709c49dc3b8176fa7e5d5f1f6197884b4'
)
self.assertTrue(out1.script.is_return_data)
self.assertFalse(out1.script.is_pay_pubkey)
self.assertFalse(out1.script.is_pay_pubkey_hash)
self.assertFalse(out1.script.is_pay_script_hash)
tx._reset()
self.assertEqual(tx.raw, raw)
class TestTransactionSigning(AsyncioTestCase):
async def asyncSetUp(self):
self.ledger = ledger_class({
'db': ledger_class.database_class(':memory:'),
'headers': ledger_class.headers_class(':memory:'),
})
await self.ledger.db.open()
async def asyncTearDown(self):
await self.ledger.db.close()
async def test_sign(self):
account = self.ledger.account_class.from_dict(
self.ledger, Wallet(), {
"seed": "carbon smart garage balance margin twelve chest sword "
"toast envelope bottom stomach absent"
}
)
await account.ensure_address_gap()
address1, address2 = await account.receiving.get_addresses(limit=2)
pubkey_hash1 = self.ledger.address_to_hash160(address1)
pubkey_hash2 = self.ledger.address_to_hash160(address2)
tx_class = ledger_class.transaction_class
tx = tx_class() \
.add_inputs([tx_class.input_class.spend(get_output(2*COIN, pubkey_hash1))]) \
.add_outputs([tx_class.output_class.pay_pubkey_hash(int(1.9*COIN), pubkey_hash2)]) \
await tx.sign([account])
self.assertEqual(
hexlify(tx.inputs[0].script.values['signature']),
b'304402205a1df8cd5d2d2fa5934b756883d6c07e4f83e1350c740992d47a12422'
b'226aaa202200098ac8675827aea2b0d6f0e49566143a95d523e311d342172cd99e2021e47cb01'
)
class TransactionIOBalancing(AsyncioTestCase):
async def asyncSetUp(self):
self.ledger = ledger_class({
'db': ledger_class.database_class(':memory:'),
'headers': ledger_class.headers_class(':memory:'),
})
await self.ledger.db.open()
self.account = self.ledger.account_class.from_dict(
self.ledger, Wallet(), {
"seed": "carbon smart garage balance margin twelve chest sword "
"toast envelope bottom stomach absent"
}
)
addresses = await self.account.ensure_address_gap()
self.pubkey_hash = [self.ledger.address_to_hash160(a) for a in addresses]
self.hash_cycler = cycle(self.pubkey_hash)
async def asyncTearDown(self):
await self.ledger.db.close()
def txo(self, amount, address=None):
return get_output(int(amount*COIN), address or next(self.hash_cycler))
def txi(self, txo):
return ledger_class.transaction_class.input_class.spend(txo)
def tx(self, inputs, outputs):
return ledger_class.transaction_class.create(inputs, outputs, [self.account], self.account)
async def create_utxos(self, amounts):
utxos = [self.txo(amount) for amount in amounts]
self.funding_tx = ledger_class.transaction_class(is_verified=True) \
.add_inputs([self.txi(self.txo(sum(amounts)+0.1))]) \
.add_outputs(utxos)
await self.ledger.db.insert_transaction(self.funding_tx)
for utxo in utxos:
await self.ledger.db.save_transaction_io(
self.funding_tx,
self.ledger.hash160_to_address(utxo.script.values['pubkey_hash']),
utxo.script.values['pubkey_hash'], ''
)
return utxos
@staticmethod
def inputs(tx):
return [round(i.amount/COIN, 2) for i in tx.inputs]
@staticmethod
def outputs(tx):
return [round(o.amount/COIN, 2) for o in tx.outputs]
async def test_basic_use_cases(self):
self.ledger.fee_per_byte = int(.01*CENT)
# available UTXOs for filling missing inputs
utxos = await self.create_utxos([
1, 1, 3, 5, 10
])
# pay 3 coins (3.02 w/ fees)
tx = await self.tx(
[], # inputs
[self.txo(3)] # outputs
)
# best UTXO match is 5 (as UTXO 3 will be short 0.02 to cover fees)
self.assertListEqual(self.inputs(tx), [5])
# a change of 1.98 is added to reach balance
self.assertListEqual(self.outputs(tx), [3, 1.98])
await self.ledger.release_outputs(utxos)
# pay 2.98 coins (3.00 w/ fees)
tx = await self.tx(
[], # inputs
[self.txo(2.98)] # outputs
)
# best UTXO match is 3 and no change is needed
self.assertListEqual(self.inputs(tx), [3])
self.assertListEqual(self.outputs(tx), [2.98])
await self.ledger.release_outputs(utxos)
# supplied input and output, but input is not enough to cover output
tx = await self.tx(
[self.txi(self.txo(10))], # inputs
[self.txo(11)] # outputs
)
# additional input is chosen (UTXO 3)
self.assertListEqual([10, 3], self.inputs(tx))
# change is now needed to consume extra input
self.assertListEqual([11, 1.96], self.outputs(tx))
await self.ledger.release_outputs(utxos)
# liquidating a UTXO
tx = await self.tx(
[self.txi(self.txo(10))], # inputs
[] # outputs
)
self.assertListEqual([10], self.inputs(tx))
# missing change added to consume the amount
self.assertListEqual([9.98], self.outputs(tx))
await self.ledger.release_outputs(utxos)
# liquidating at a loss, requires adding extra inputs
tx = await self.tx(
[self.txi(self.txo(0.01))], # inputs
[] # outputs
)
# UTXO 1 is added to cover some of the fee
self.assertListEqual([0.01, 1], self.inputs(tx))
# change is now needed to consume extra input
self.assertListEqual([0.97], self.outputs(tx))

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

View file

@ -1,2 +0,0 @@
__path__: str = __import__('pkgutil').extend_path(__path__, __name__)
__version__ = '0.5.7'

View file

@ -1 +0,0 @@
from .server import Server

Some files were not shown because too many files have changed in this diff Show more