2019-10-25 05:32:32 +02:00
|
|
|
import os
|
2019-12-31 21:30:13 +01:00
|
|
|
import sys
|
2019-02-11 23:46:13 +01:00
|
|
|
import json
|
2019-02-12 05:54:24 +01:00
|
|
|
import shutil
|
2019-02-11 23:46:13 +01:00
|
|
|
import logging
|
2019-12-31 21:30:13 +01:00
|
|
|
import tempfile
|
|
|
|
import functools
|
|
|
|
import asyncio
|
|
|
|
from asyncio.runners import _cancel_all_tasks # type: ignore
|
|
|
|
import unittest
|
|
|
|
from unittest.case import _Outcome
|
|
|
|
from typing import Optional
|
2019-12-15 07:37:55 +01:00
|
|
|
from time import time
|
2019-02-11 23:46:13 +01:00
|
|
|
from binascii import unhexlify
|
2019-12-15 07:37:55 +01:00
|
|
|
from functools import partial
|
2019-02-11 23:46:13 +01:00
|
|
|
|
2020-01-03 04:18:49 +01:00
|
|
|
from lbry.wallet import WalletManager, Wallet, Ledger, Account, Transaction
|
2019-06-21 02:55:47 +02:00
|
|
|
from lbry.conf import Config
|
2020-01-03 04:18:49 +01:00
|
|
|
from lbry.wallet.util import satoshis_to_coins
|
2021-12-19 23:50:37 +01:00
|
|
|
from lbry.wallet.dewies import lbc_to_dewies
|
2019-12-31 21:30:13 +01:00
|
|
|
from lbry.wallet.orchstr8 import Conductor
|
2022-08-08 20:04:55 +02:00
|
|
|
from lbry.wallet.orchstr8.node import LBCWalletNode, WalletNode
|
2021-12-19 23:50:37 +01:00
|
|
|
from lbry.schema.claim import Claim
|
2019-12-31 21:30:13 +01:00
|
|
|
|
2020-01-03 07:44:22 +01:00
|
|
|
from lbry.extras.daemon.daemon import Daemon, jsonrpc_dumps_pretty
|
2020-01-03 07:42:54 +01:00
|
|
|
from lbry.extras.daemon.components import Component, WalletComponent
|
|
|
|
from lbry.extras.daemon.components import (
|
2021-01-17 21:53:13 +01:00
|
|
|
DHT_COMPONENT,
|
|
|
|
HASH_ANNOUNCER_COMPONENT, PEER_PROTOCOL_SERVER_COMPONENT,
|
|
|
|
UPNP_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, LIBTORRENT_COMPONENT
|
2019-02-11 23:46:13 +01:00
|
|
|
)
|
2020-01-03 07:42:54 +01:00
|
|
|
from lbry.extras.daemon.componentmanager import ComponentManager
|
2019-12-15 07:37:55 +01:00
|
|
|
from lbry.extras.daemon.exchange_rate_manager import (
|
2021-02-10 19:29:05 +01:00
|
|
|
ExchangeRateManager, ExchangeRate, BittrexBTCFeed, BittrexUSDFeed
|
2019-12-15 07:37:55 +01:00
|
|
|
)
|
2019-06-21 02:55:47 +02:00
|
|
|
from lbry.extras.daemon.storage import SQLiteStorage
|
|
|
|
from lbry.blob.blob_manager import BlobManager
|
|
|
|
from lbry.stream.reflector.server import ReflectorServer
|
|
|
|
from lbry.blob_exchange.server import BlobServer
|
2019-02-12 05:54:24 +01:00
|
|
|
|
|
|
|
|
2019-12-31 21:30:13 +01:00
|
|
|
class ColorHandler(logging.StreamHandler):
|
|
|
|
|
|
|
|
level_color = {
|
|
|
|
logging.DEBUG: "black",
|
|
|
|
logging.INFO: "light_gray",
|
|
|
|
logging.WARNING: "yellow",
|
|
|
|
logging.ERROR: "red"
|
|
|
|
}
|
|
|
|
|
|
|
|
color_code = dict(
|
|
|
|
black=30,
|
|
|
|
red=31,
|
|
|
|
green=32,
|
|
|
|
yellow=33,
|
|
|
|
blue=34,
|
|
|
|
magenta=35,
|
|
|
|
cyan=36,
|
|
|
|
white=37,
|
|
|
|
light_gray='0;37',
|
|
|
|
dark_gray='1;30'
|
|
|
|
)
|
|
|
|
|
|
|
|
def emit(self, record):
|
|
|
|
try:
|
|
|
|
msg = self.format(record)
|
|
|
|
color_name = self.level_color.get(record.levelno, "black")
|
|
|
|
color_code = self.color_code[color_name]
|
|
|
|
stream = self.stream
|
|
|
|
stream.write(f'\x1b[{color_code}m{msg}\x1b[0m')
|
|
|
|
stream.write(self.terminator)
|
|
|
|
self.flush()
|
|
|
|
except Exception:
|
|
|
|
self.handleError(record)
|
|
|
|
|
|
|
|
|
|
|
|
HANDLER = ColorHandler(sys.stdout)
|
|
|
|
HANDLER.setFormatter(
|
|
|
|
logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
|
|
|
)
|
|
|
|
logging.getLogger().addHandler(HANDLER)
|
|
|
|
|
|
|
|
|
|
|
|
class AsyncioTestCase(unittest.TestCase):
|
|
|
|
# Implementation inspired by discussion:
|
|
|
|
# https://bugs.python.org/issue32972
|
|
|
|
|
|
|
|
LOOP_SLOW_CALLBACK_DURATION = 0.2
|
2021-03-17 18:12:44 +01:00
|
|
|
TIMEOUT = 120.0
|
2019-12-31 21:30:13 +01:00
|
|
|
|
|
|
|
maxDiff = None
|
|
|
|
|
|
|
|
async def asyncSetUp(self): # pylint: disable=C0103
|
|
|
|
pass
|
|
|
|
|
|
|
|
async def asyncTearDown(self): # pylint: disable=C0103
|
|
|
|
pass
|
|
|
|
|
|
|
|
def run(self, result=None): # pylint: disable=R0915
|
|
|
|
orig_result = result
|
|
|
|
if result is None:
|
|
|
|
result = self.defaultTestResult()
|
|
|
|
startTestRun = getattr(result, 'startTestRun', None) # pylint: disable=C0103
|
|
|
|
if startTestRun is not None:
|
|
|
|
startTestRun()
|
|
|
|
|
|
|
|
result.startTest(self)
|
|
|
|
|
|
|
|
testMethod = getattr(self, self._testMethodName) # pylint: disable=C0103
|
|
|
|
if (getattr(self.__class__, "__unittest_skip__", False) or
|
|
|
|
getattr(testMethod, "__unittest_skip__", False)):
|
|
|
|
# If the class or method was skipped.
|
|
|
|
try:
|
|
|
|
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
|
|
|
|
or getattr(testMethod, '__unittest_skip_why__', ''))
|
|
|
|
self._addSkip(result, self, skip_why)
|
|
|
|
finally:
|
|
|
|
result.stopTest(self)
|
|
|
|
return
|
|
|
|
expecting_failure_method = getattr(testMethod,
|
|
|
|
"__unittest_expecting_failure__", False)
|
|
|
|
expecting_failure_class = getattr(self,
|
|
|
|
"__unittest_expecting_failure__", False)
|
|
|
|
expecting_failure = expecting_failure_class or expecting_failure_method
|
|
|
|
outcome = _Outcome(result)
|
|
|
|
|
|
|
|
self.loop = asyncio.new_event_loop() # pylint: disable=W0201
|
|
|
|
asyncio.set_event_loop(self.loop)
|
|
|
|
self.loop.set_debug(True)
|
|
|
|
self.loop.slow_callback_duration = self.LOOP_SLOW_CALLBACK_DURATION
|
|
|
|
|
|
|
|
try:
|
|
|
|
self._outcome = outcome
|
|
|
|
|
|
|
|
with outcome.testPartExecutor(self):
|
|
|
|
self.setUp()
|
2021-11-20 01:47:46 +01:00
|
|
|
self.add_timeout()
|
2019-12-31 21:30:13 +01:00
|
|
|
self.loop.run_until_complete(self.asyncSetUp())
|
|
|
|
if outcome.success:
|
|
|
|
outcome.expecting_failure = expecting_failure
|
|
|
|
with outcome.testPartExecutor(self, isTest=True):
|
|
|
|
maybe_coroutine = testMethod()
|
|
|
|
if asyncio.iscoroutine(maybe_coroutine):
|
2021-11-20 01:47:46 +01:00
|
|
|
self.add_timeout()
|
2019-12-31 21:30:13 +01:00
|
|
|
self.loop.run_until_complete(maybe_coroutine)
|
|
|
|
outcome.expecting_failure = False
|
|
|
|
with outcome.testPartExecutor(self):
|
2021-11-20 01:47:46 +01:00
|
|
|
self.add_timeout()
|
2019-12-31 21:30:13 +01:00
|
|
|
self.loop.run_until_complete(self.asyncTearDown())
|
|
|
|
self.tearDown()
|
|
|
|
|
|
|
|
self.doAsyncCleanups()
|
|
|
|
|
|
|
|
try:
|
|
|
|
_cancel_all_tasks(self.loop)
|
|
|
|
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
|
|
|
|
finally:
|
|
|
|
asyncio.set_event_loop(None)
|
|
|
|
self.loop.close()
|
|
|
|
|
|
|
|
for test, reason in outcome.skipped:
|
|
|
|
self._addSkip(result, test, reason)
|
|
|
|
self._feedErrorsToResult(result, outcome.errors)
|
|
|
|
if outcome.success:
|
|
|
|
if expecting_failure:
|
|
|
|
if outcome.expectedFailure:
|
|
|
|
self._addExpectedFailure(result, outcome.expectedFailure)
|
|
|
|
else:
|
|
|
|
self._addUnexpectedSuccess(result)
|
|
|
|
else:
|
|
|
|
result.addSuccess(self)
|
|
|
|
return result
|
|
|
|
finally:
|
|
|
|
result.stopTest(self)
|
|
|
|
if orig_result is None:
|
|
|
|
stopTestRun = getattr(result, 'stopTestRun', None) # pylint: disable=C0103
|
|
|
|
if stopTestRun is not None:
|
|
|
|
stopTestRun() # pylint: disable=E1102
|
|
|
|
|
|
|
|
# explicitly break reference cycles:
|
|
|
|
# outcome.errors -> frame -> outcome -> outcome.errors
|
|
|
|
# outcome.expectedFailure -> frame -> outcome -> outcome.expectedFailure
|
|
|
|
outcome.errors.clear()
|
|
|
|
outcome.expectedFailure = None
|
|
|
|
|
|
|
|
# clear the outcome, no more needed
|
|
|
|
self._outcome = None
|
|
|
|
|
|
|
|
def doAsyncCleanups(self): # pylint: disable=C0103
|
|
|
|
outcome = self._outcome or _Outcome()
|
|
|
|
while self._cleanups:
|
|
|
|
function, args, kwargs = self._cleanups.pop()
|
|
|
|
with outcome.testPartExecutor(self):
|
|
|
|
maybe_coroutine = function(*args, **kwargs)
|
|
|
|
if asyncio.iscoroutine(maybe_coroutine):
|
2021-11-20 01:47:46 +01:00
|
|
|
self.add_timeout()
|
2019-12-31 21:30:13 +01:00
|
|
|
self.loop.run_until_complete(maybe_coroutine)
|
|
|
|
|
2021-03-17 18:12:44 +01:00
|
|
|
def cancel(self):
|
|
|
|
for task in asyncio.all_tasks(self.loop):
|
|
|
|
if not task.done():
|
|
|
|
task.print_stack()
|
|
|
|
task.cancel()
|
|
|
|
|
2021-11-20 01:47:46 +01:00
|
|
|
def add_timeout(self):
|
2021-11-30 02:01:35 +01:00
|
|
|
if self.TIMEOUT:
|
2022-02-18 21:21:37 +01:00
|
|
|
self.loop.call_later(self.TIMEOUT, self.check_timeout, time())
|
|
|
|
|
|
|
|
def check_timeout(self, started):
|
|
|
|
if time() - started >= self.TIMEOUT:
|
|
|
|
self.cancel()
|
|
|
|
else:
|
|
|
|
self.loop.call_later(self.TIMEOUT, self.check_timeout, started)
|
2021-11-20 01:22:25 +01:00
|
|
|
|
2019-12-31 21:30:13 +01:00
|
|
|
|
|
|
|
class AdvanceTimeTestCase(AsyncioTestCase):
|
|
|
|
|
|
|
|
async def asyncSetUp(self):
|
|
|
|
self._time = 0 # pylint: disable=W0201
|
|
|
|
self.loop.time = functools.wraps(self.loop.time)(lambda: self._time)
|
|
|
|
await super().asyncSetUp()
|
|
|
|
|
|
|
|
async def advance(self, seconds):
|
|
|
|
while self.loop._ready:
|
|
|
|
await asyncio.sleep(0)
|
|
|
|
self._time += seconds
|
|
|
|
await asyncio.sleep(0)
|
|
|
|
while self.loop._ready:
|
|
|
|
await asyncio.sleep(0)
|
|
|
|
|
|
|
|
|
|
|
|
class IntegrationTestCase(AsyncioTestCase):
|
|
|
|
|
|
|
|
SEED = None
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
self.conductor: Optional[Conductor] = None
|
2021-11-08 20:18:22 +01:00
|
|
|
self.blockchain: Optional[LBCWalletNode] = None
|
2019-12-31 21:30:13 +01:00
|
|
|
self.wallet_node: Optional[WalletNode] = None
|
2020-01-03 04:18:49 +01:00
|
|
|
self.manager: Optional[WalletManager] = None
|
|
|
|
self.ledger: Optional[Ledger] = None
|
2019-12-31 21:30:13 +01:00
|
|
|
self.wallet: Optional[Wallet] = None
|
2020-01-03 04:18:49 +01:00
|
|
|
self.account: Optional[Account] = None
|
2019-12-31 21:30:13 +01:00
|
|
|
|
|
|
|
async def asyncSetUp(self):
|
2020-01-03 04:18:49 +01:00
|
|
|
self.conductor = Conductor(seed=self.SEED)
|
2021-11-08 20:18:22 +01:00
|
|
|
await self.conductor.start_lbcd()
|
|
|
|
self.addCleanup(self.conductor.stop_lbcd)
|
|
|
|
await self.conductor.start_lbcwallet()
|
|
|
|
self.addCleanup(self.conductor.stop_lbcwallet)
|
2019-12-31 21:30:13 +01:00
|
|
|
await self.conductor.start_spv()
|
|
|
|
self.addCleanup(self.conductor.stop_spv)
|
|
|
|
await self.conductor.start_wallet()
|
|
|
|
self.addCleanup(self.conductor.stop_wallet)
|
2021-11-08 20:18:22 +01:00
|
|
|
self.blockchain = self.conductor.lbcwallet_node
|
2019-12-31 21:30:13 +01:00
|
|
|
self.wallet_node = self.conductor.wallet_node
|
|
|
|
self.manager = self.wallet_node.manager
|
|
|
|
self.ledger = self.wallet_node.ledger
|
|
|
|
self.wallet = self.wallet_node.wallet
|
|
|
|
self.account = self.wallet_node.wallet.default_account
|
|
|
|
|
|
|
|
async def assertBalance(self, account, expected_balance: str): # pylint: disable=C0103
|
|
|
|
balance = await account.get_balance()
|
|
|
|
self.assertEqual(satoshis_to_coins(balance), expected_balance)
|
|
|
|
|
|
|
|
def broadcast(self, tx):
|
|
|
|
return self.ledger.broadcast(tx)
|
|
|
|
|
2021-11-08 20:18:22 +01:00
|
|
|
async def broadcast_and_confirm(self, tx, ledger=None):
|
|
|
|
ledger = ledger or self.ledger
|
|
|
|
notifications = asyncio.create_task(ledger.wait(tx))
|
|
|
|
await ledger.broadcast(tx)
|
|
|
|
await notifications
|
|
|
|
await self.generate_and_wait(1, [tx.id], ledger)
|
|
|
|
|
2019-12-31 21:30:13 +01:00
|
|
|
async def on_header(self, height):
|
|
|
|
if self.ledger.headers.height < height:
|
|
|
|
await self.ledger.on_header.where(
|
|
|
|
lambda e: e.height == height
|
|
|
|
)
|
|
|
|
return True
|
|
|
|
|
2021-11-08 20:18:22 +01:00
|
|
|
async def send_to_address_and_wait(self, address, amount, blocks_to_generate=0, ledger=None):
|
|
|
|
tx_watch = []
|
|
|
|
txid = None
|
|
|
|
done = False
|
|
|
|
watcher = (ledger or self.ledger).on_transaction.where(
|
|
|
|
lambda e: e.tx.id == txid or done or tx_watch.append(e.tx.id)
|
2019-12-31 21:30:13 +01:00
|
|
|
)
|
|
|
|
|
2021-11-08 20:18:22 +01:00
|
|
|
txid = await self.blockchain.send_to_address(address, amount)
|
|
|
|
done = txid in tx_watch
|
|
|
|
await watcher
|
|
|
|
|
|
|
|
await self.generate_and_wait(blocks_to_generate, [txid], ledger)
|
|
|
|
return txid
|
|
|
|
|
|
|
|
async def generate_and_wait(self, blocks_to_generate, txids, ledger=None):
|
|
|
|
if blocks_to_generate > 0:
|
2022-09-20 15:35:50 +02:00
|
|
|
watcher = (ledger or self.ledger).on_transaction.where(
|
2021-11-08 20:18:22 +01:00
|
|
|
lambda e: ((e.tx.id in txids and txids.remove(e.tx.id)), len(txids) <= 0)[-1] # multi-statement lambda
|
|
|
|
)
|
2022-09-20 15:35:50 +02:00
|
|
|
await self.generate(blocks_to_generate)
|
|
|
|
await watcher
|
2021-11-08 20:18:22 +01:00
|
|
|
|
2020-01-08 17:09:16 +01:00
|
|
|
def on_address_update(self, address):
|
|
|
|
return self.ledger.on_transaction.where(
|
|
|
|
lambda e: e.address == address
|
|
|
|
)
|
|
|
|
|
2019-12-31 21:30:13 +01:00
|
|
|
def on_transaction_address(self, tx, address):
|
|
|
|
return self.ledger.on_transaction.where(
|
|
|
|
lambda e: e.tx.id == tx.id and e.address == address
|
|
|
|
)
|
|
|
|
|
2021-11-08 20:18:22 +01:00
|
|
|
async def generate(self, blocks):
|
|
|
|
""" Ask lbrycrd to generate some blocks and wait until ledger has them. """
|
|
|
|
prepare = self.ledger.on_header.where(self.blockchain.is_expected_block)
|
|
|
|
self.conductor.spv_node.server.synchronized.clear()
|
|
|
|
await self.blockchain.generate(blocks)
|
2022-09-08 15:52:20 +02:00
|
|
|
height = self.blockchain.block_expected
|
2021-11-08 20:18:22 +01:00
|
|
|
await prepare # no guarantee that it didn't happen already, so start waiting from before calling generate
|
|
|
|
while True:
|
|
|
|
await self.conductor.spv_node.server.synchronized.wait()
|
|
|
|
self.conductor.spv_node.server.synchronized.clear()
|
2022-09-02 22:30:16 +02:00
|
|
|
if self.conductor.spv_node.server.db.db_height < height:
|
2022-09-02 22:07:13 +02:00
|
|
|
continue
|
2022-09-02 22:30:16 +02:00
|
|
|
if self.conductor.spv_node.server._es_height < height:
|
2022-09-02 22:07:13 +02:00
|
|
|
continue
|
|
|
|
break
|
2021-11-08 20:18:22 +01:00
|
|
|
|
2019-12-31 21:30:13 +01:00
|
|
|
|
2019-12-15 07:37:55 +01:00
|
|
|
class FakeExchangeRateManager(ExchangeRateManager):
|
2019-10-29 06:26:25 +01:00
|
|
|
|
2020-01-03 05:03:45 +01:00
|
|
|
def __init__(self, market_feeds, rates): # pylint: disable=super-init-not-called
|
2019-12-15 07:37:55 +01:00
|
|
|
self.market_feeds = market_feeds
|
|
|
|
for feed in self.market_feeds:
|
|
|
|
feed.last_check = time()
|
|
|
|
feed.rate = ExchangeRate(feed.market, rates[feed.market], time())
|
2019-02-12 05:54:24 +01:00
|
|
|
|
|
|
|
def start(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2019-12-15 07:37:55 +01:00
|
|
|
def get_fake_exchange_rate_manager(rates=None):
|
|
|
|
return FakeExchangeRateManager(
|
2021-02-10 19:29:05 +01:00
|
|
|
[BittrexBTCFeed(), BittrexUSDFeed()],
|
|
|
|
rates or {'BTCLBC': 3.0, 'USDLBC': 2.0}
|
2019-12-15 07:37:55 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-02-12 05:54:24 +01:00
|
|
|
class ExchangeRateManagerComponent(Component):
|
|
|
|
component_name = EXCHANGE_RATE_MANAGER_COMPONENT
|
|
|
|
|
2019-12-15 07:37:55 +01:00
|
|
|
def __init__(self, component_manager, rates=None):
|
2019-02-12 05:54:24 +01:00
|
|
|
super().__init__(component_manager)
|
2019-12-15 07:37:55 +01:00
|
|
|
self.exchange_rate_manager = get_fake_exchange_rate_manager(rates)
|
2019-02-12 05:54:24 +01:00
|
|
|
|
|
|
|
@property
|
2019-12-15 07:37:55 +01:00
|
|
|
def component(self) -> ExchangeRateManager:
|
2019-02-12 05:54:24 +01:00
|
|
|
return self.exchange_rate_manager
|
|
|
|
|
|
|
|
async def start(self):
|
|
|
|
self.exchange_rate_manager.start()
|
|
|
|
|
|
|
|
async def stop(self):
|
|
|
|
self.exchange_rate_manager.stop()
|
2019-02-11 23:46:13 +01:00
|
|
|
|
|
|
|
|
|
|
|
class CommandTestCase(IntegrationTestCase):
|
|
|
|
|
2019-03-22 08:14:33 +01:00
|
|
|
VERBOSITY = logging.WARN
|
2019-05-24 04:40:02 +02:00
|
|
|
blob_lru_cache_size = 0
|
2019-02-11 23:46:13 +01:00
|
|
|
|
2020-01-03 05:03:45 +01:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
self.daemon = None
|
|
|
|
self.daemons = []
|
|
|
|
self.server_config = None
|
|
|
|
self.server_storage = None
|
|
|
|
self.extra_wallet_nodes = []
|
|
|
|
self.extra_wallet_node_port = 5280
|
|
|
|
self.server_blob_manager = None
|
|
|
|
self.server = None
|
|
|
|
self.reflector = None
|
2021-01-17 21:53:13 +01:00
|
|
|
self.skip_libtorrent = True
|
2020-01-03 05:03:45 +01:00
|
|
|
|
2019-02-11 23:46:13 +01:00
|
|
|
async def asyncSetUp(self):
|
|
|
|
|
2019-06-21 03:02:58 +02:00
|
|
|
logging.getLogger('lbry.blob_exchange').setLevel(self.VERBOSITY)
|
|
|
|
logging.getLogger('lbry.daemon').setLevel(self.VERBOSITY)
|
|
|
|
logging.getLogger('lbry.stream').setLevel(self.VERBOSITY)
|
2022-11-05 04:56:03 +01:00
|
|
|
logging.getLogger('lbry.torrent').setLevel(self.VERBOSITY)
|
2020-01-03 04:18:49 +01:00
|
|
|
logging.getLogger('lbry.wallet').setLevel(self.VERBOSITY)
|
2019-02-11 23:46:13 +01:00
|
|
|
|
2021-11-08 20:18:22 +01:00
|
|
|
await super().asyncSetUp()
|
|
|
|
|
2019-05-29 23:40:22 +02:00
|
|
|
self.daemon = await self.add_daemon(self.wallet_node)
|
2019-02-11 23:46:13 +01:00
|
|
|
|
|
|
|
await self.account.ensure_address_gap()
|
|
|
|
address = (await self.account.receiving.get_addresses(limit=1, only_usable=True))[0]
|
2021-11-08 20:18:22 +01:00
|
|
|
await self.send_to_address_and_wait(address, 10, 6)
|
2019-02-11 23:46:13 +01:00
|
|
|
|
2019-02-12 05:54:24 +01:00
|
|
|
server_tmp_dir = tempfile.mkdtemp()
|
|
|
|
self.addCleanup(shutil.rmtree, server_tmp_dir)
|
2021-05-21 15:50:47 +02:00
|
|
|
self.server_config = Config(
|
|
|
|
data_dir=server_tmp_dir,
|
|
|
|
wallet_dir=server_tmp_dir,
|
2021-08-19 15:31:17 +02:00
|
|
|
save_files=True,
|
2021-05-21 15:50:47 +02:00
|
|
|
download_dir=server_tmp_dir
|
|
|
|
)
|
2020-12-04 03:04:31 +01:00
|
|
|
self.server_config.transaction_cache_size = 10000
|
2019-04-15 22:14:19 +02:00
|
|
|
self.server_storage = SQLiteStorage(self.server_config, ':memory:')
|
2019-02-12 05:54:24 +01:00
|
|
|
await self.server_storage.open()
|
2019-04-15 22:14:19 +02:00
|
|
|
|
|
|
|
self.server_blob_manager = BlobManager(self.loop, server_tmp_dir, self.server_storage, self.server_config)
|
2019-02-12 17:30:40 +01:00
|
|
|
self.server = BlobServer(self.loop, self.server_blob_manager, 'bQEaw42GXsgCAGio1nxFncJSyRmnztSCjP')
|
|
|
|
self.server.start_server(5567, '127.0.0.1')
|
|
|
|
await self.server.started_listening.wait()
|
2019-02-12 05:54:24 +01:00
|
|
|
|
|
|
|
self.reflector = ReflectorServer(self.server_blob_manager)
|
|
|
|
self.reflector.start_server(5566, '127.0.0.1')
|
|
|
|
await self.reflector.started_listening.wait()
|
|
|
|
self.addCleanup(self.reflector.stop_server)
|
|
|
|
|
2019-02-11 23:46:13 +01:00
|
|
|
async def asyncTearDown(self):
|
|
|
|
await super().asyncTearDown()
|
2019-05-29 23:40:22 +02:00
|
|
|
for wallet_node in self.extra_wallet_nodes:
|
|
|
|
await wallet_node.stop(cleanup=True)
|
|
|
|
for daemon in self.daemons:
|
|
|
|
daemon.component_manager.get_component('wallet')._running = False
|
2019-10-26 19:40:08 +02:00
|
|
|
await daemon.stop()
|
2019-05-29 23:40:22 +02:00
|
|
|
|
|
|
|
async def add_daemon(self, wallet_node=None, seed=None):
|
2021-06-14 19:06:31 +02:00
|
|
|
start_wallet_node = False
|
2019-05-29 23:40:22 +02:00
|
|
|
if wallet_node is None:
|
|
|
|
wallet_node = WalletNode(
|
|
|
|
self.wallet_node.manager_class,
|
|
|
|
self.wallet_node.ledger_class,
|
|
|
|
port=self.extra_wallet_node_port
|
|
|
|
)
|
|
|
|
self.extra_wallet_node_port += 1
|
2021-06-14 19:06:31 +02:00
|
|
|
start_wallet_node = True
|
2019-05-29 23:40:22 +02:00
|
|
|
|
2019-10-25 05:32:32 +02:00
|
|
|
upload_dir = os.path.join(wallet_node.data_path, 'uploads')
|
|
|
|
os.mkdir(upload_dir)
|
|
|
|
|
2021-05-21 15:50:47 +02:00
|
|
|
conf = Config(
|
|
|
|
# needed during instantiation to access known_hubs path
|
|
|
|
data_dir=wallet_node.data_path,
|
|
|
|
wallet_dir=wallet_node.data_path,
|
2021-08-19 15:31:17 +02:00
|
|
|
save_files=True,
|
2021-05-21 15:50:47 +02:00
|
|
|
download_dir=wallet_node.data_path
|
|
|
|
)
|
2019-10-25 05:32:32 +02:00
|
|
|
conf.upload_dir = upload_dir # not a real conf setting
|
2019-05-29 23:40:22 +02:00
|
|
|
conf.share_usage_data = False
|
|
|
|
conf.use_upnp = False
|
|
|
|
conf.reflect_streams = True
|
|
|
|
conf.blockchain_name = 'lbrycrd_regtest'
|
2021-05-19 16:56:22 +02:00
|
|
|
conf.lbryum_servers = [(self.conductor.spv_node.hostname, self.conductor.spv_node.port)]
|
2019-05-29 23:40:22 +02:00
|
|
|
conf.reflector_servers = [('127.0.0.1', 5566)]
|
2020-05-08 16:58:29 +02:00
|
|
|
conf.fixed_peers = [('127.0.0.1', 5567)]
|
2019-05-29 23:40:22 +02:00
|
|
|
conf.known_dht_nodes = []
|
|
|
|
conf.blob_lru_cache_size = self.blob_lru_cache_size
|
2020-12-04 03:04:31 +01:00
|
|
|
conf.transaction_cache_size = 10000
|
2019-05-29 23:40:22 +02:00
|
|
|
conf.components_to_skip = [
|
|
|
|
DHT_COMPONENT, UPNP_COMPONENT, HASH_ANNOUNCER_COMPONENT,
|
|
|
|
PEER_PROTOCOL_SERVER_COMPONENT
|
|
|
|
]
|
2021-01-17 21:53:13 +01:00
|
|
|
if self.skip_libtorrent:
|
|
|
|
conf.components_to_skip.append(LIBTORRENT_COMPONENT)
|
2021-06-14 19:06:31 +02:00
|
|
|
|
|
|
|
if start_wallet_node:
|
|
|
|
await wallet_node.start(self.conductor.spv_node, seed=seed, config=conf)
|
|
|
|
self.extra_wallet_nodes.append(wallet_node)
|
|
|
|
else:
|
|
|
|
wallet_node.manager.config = conf
|
|
|
|
wallet_node.manager.ledger.config['known_hubs'] = conf.known_hubs
|
2019-05-29 23:40:22 +02:00
|
|
|
|
|
|
|
def wallet_maker(component_manager):
|
|
|
|
wallet_component = WalletComponent(component_manager)
|
|
|
|
wallet_component.wallet_manager = wallet_node.manager
|
|
|
|
wallet_component._running = True
|
|
|
|
return wallet_component
|
|
|
|
|
|
|
|
daemon = Daemon(conf, ComponentManager(
|
|
|
|
conf, skip_components=conf.components_to_skip, wallet=wallet_maker,
|
2019-12-15 07:37:55 +01:00
|
|
|
exchange_rate_manager=partial(ExchangeRateManagerComponent, rates={
|
2021-02-10 20:11:22 +01:00
|
|
|
'BTCLBC': 1.0, 'USDLBC': 2.0
|
2019-12-15 07:37:55 +01:00
|
|
|
})
|
2019-05-29 23:40:22 +02:00
|
|
|
))
|
|
|
|
await daemon.initialize()
|
2019-05-30 00:50:47 +02:00
|
|
|
self.daemons.append(daemon)
|
2019-05-29 23:40:22 +02:00
|
|
|
wallet_node.manager.old_db = daemon.storage
|
|
|
|
return daemon
|
2019-02-11 23:46:13 +01:00
|
|
|
|
2019-10-13 01:33:16 +02:00
|
|
|
async def confirm_tx(self, txid, ledger=None):
|
2019-02-11 23:46:13 +01:00
|
|
|
""" Wait for tx to be in mempool, then generate a block, wait for tx to be in a block. """
|
2021-11-08 20:18:22 +01:00
|
|
|
# await (ledger or self.ledger).on_transaction.where(lambda e: e.tx.id == txid)
|
|
|
|
on_tx = (ledger or self.ledger).on_transaction.where(lambda e: e.tx.id == txid)
|
|
|
|
await asyncio.wait([self.generate(1), on_tx], timeout=5)
|
|
|
|
|
|
|
|
# # actually, if it's in the mempool or in the block we're fine
|
|
|
|
# await self.generate_and_wait(1, [txid], ledger=ledger)
|
|
|
|
# return txid
|
|
|
|
|
2019-07-30 05:34:12 +02:00
|
|
|
return txid
|
2019-02-11 23:46:13 +01:00
|
|
|
|
|
|
|
async def on_transaction_dict(self, tx):
|
2020-01-03 04:18:49 +01:00
|
|
|
await self.ledger.wait(Transaction(unhexlify(tx['hex'])))
|
2019-02-11 23:46:13 +01:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_all_addresses(tx):
|
|
|
|
addresses = set()
|
|
|
|
for txi in tx['inputs']:
|
|
|
|
addresses.add(txi['address'])
|
|
|
|
for txo in tx['outputs']:
|
|
|
|
addresses.add(txo['address'])
|
|
|
|
return list(addresses)
|
|
|
|
|
2019-04-29 04:25:18 +02:00
|
|
|
async def blockchain_claim_name(self, name: str, value: str, amount: str, confirm=True):
|
2019-04-29 04:18:42 +02:00
|
|
|
txid = await self.blockchain._cli_cmnd('claimname', name, value, amount)
|
2019-04-29 04:25:18 +02:00
|
|
|
if confirm:
|
|
|
|
await self.generate(1)
|
2019-04-29 04:18:42 +02:00
|
|
|
return txid
|
2019-04-01 00:27:38 +02:00
|
|
|
|
2019-04-29 04:25:18 +02:00
|
|
|
async def blockchain_update_name(self, txid: str, value: str, amount: str, confirm=True):
|
2019-04-29 04:18:42 +02:00
|
|
|
txid = await self.blockchain._cli_cmnd('updateclaim', txid, value, amount)
|
2019-04-29 04:25:18 +02:00
|
|
|
if confirm:
|
|
|
|
await self.generate(1)
|
2019-04-29 04:18:42 +02:00
|
|
|
return txid
|
2019-04-29 00:19:58 +02:00
|
|
|
|
2019-02-11 23:46:13 +01:00
|
|
|
async def out(self, awaitable):
|
2019-04-06 21:55:08 +02:00
|
|
|
""" Serializes lbrynet API results to JSON then loads and returns it as dictionary. """
|
2019-02-11 23:46:13 +01:00
|
|
|
return json.loads(jsonrpc_dumps_pretty(await awaitable, ledger=self.ledger))['result']
|
|
|
|
|
2019-04-06 21:55:08 +02:00
|
|
|
def sout(self, value):
|
|
|
|
""" Synchronous version of `out` method. """
|
|
|
|
return json.loads(jsonrpc_dumps_pretty(value, ledger=self.ledger))['result']
|
|
|
|
|
2021-07-20 21:10:45 +02:00
|
|
|
async def confirm_and_render(self, awaitable, confirm, return_tx=False) -> Transaction:
|
2019-10-25 05:32:32 +02:00
|
|
|
tx = await awaitable
|
2019-05-08 20:47:04 +02:00
|
|
|
if confirm:
|
2019-10-25 05:32:32 +02:00
|
|
|
await self.ledger.wait(tx)
|
2019-05-08 20:47:04 +02:00
|
|
|
await self.generate(1)
|
2019-12-20 17:04:21 +01:00
|
|
|
await self.ledger.wait(tx, self.blockchain.block_expected)
|
2021-07-20 21:10:45 +02:00
|
|
|
if not return_tx:
|
|
|
|
return self.sout(tx)
|
|
|
|
return tx
|
2019-10-25 05:32:32 +02:00
|
|
|
|
2021-11-08 20:18:22 +01:00
|
|
|
async def create_nondeterministic_channel(self, name, price, pubkey_bytes, daemon=None, blocking=False):
|
2021-12-19 23:50:37 +01:00
|
|
|
account = (daemon or self.daemon).wallet_manager.default_account
|
|
|
|
claim_address = await account.receiving.get_or_create_usable_address()
|
|
|
|
claim = Claim()
|
|
|
|
claim.channel.public_key_bytes = pubkey_bytes
|
|
|
|
tx = await Transaction.claim_create(
|
|
|
|
name, claim, lbc_to_dewies(price),
|
|
|
|
claim_address, [self.account], self.account
|
|
|
|
)
|
|
|
|
await tx.sign([self.account])
|
2021-11-08 20:18:22 +01:00
|
|
|
await (daemon or self.daemon).broadcast_or_release(tx, blocking)
|
2021-12-19 23:50:37 +01:00
|
|
|
return self.sout(tx)
|
|
|
|
|
2019-10-25 05:54:22 +02:00
|
|
|
def create_upload_file(self, data, prefix=None, suffix=None):
|
|
|
|
file_path = tempfile.mktemp(prefix=prefix or "tmp", suffix=suffix or "", dir=self.daemon.conf.upload_dir)
|
2019-10-25 05:32:32 +02:00
|
|
|
with open(file_path, 'w+b') as file:
|
|
|
|
file.write(data)
|
|
|
|
file.flush()
|
|
|
|
return file.name
|
|
|
|
|
|
|
|
async def stream_create(
|
|
|
|
self, name='hovercraft', bid='1.0', file_path=None,
|
2021-07-20 21:10:45 +02:00
|
|
|
data=b'hi!', confirm=True, prefix=None, suffix=None, return_tx=False, **kwargs):
|
2021-03-11 02:55:48 +01:00
|
|
|
if file_path is None and data is not None:
|
2019-10-25 05:54:22 +02:00
|
|
|
file_path = self.create_upload_file(data=data, prefix=prefix, suffix=suffix)
|
2019-10-25 05:32:32 +02:00
|
|
|
return await self.confirm_and_render(
|
2021-07-20 21:10:45 +02:00
|
|
|
self.daemon.jsonrpc_stream_create(name, bid, file_path=file_path, **kwargs), confirm, return_tx
|
2019-10-25 05:32:32 +02:00
|
|
|
)
|
|
|
|
|
2019-10-25 05:54:22 +02:00
|
|
|
async def stream_update(
|
2021-07-20 21:10:45 +02:00
|
|
|
self, claim_id, data=None, prefix=None, suffix=None, confirm=True, return_tx=False, **kwargs):
|
2019-10-25 05:32:32 +02:00
|
|
|
if data is not None:
|
2019-10-25 05:54:22 +02:00
|
|
|
file_path = self.create_upload_file(data=data, prefix=prefix, suffix=suffix)
|
2019-10-25 05:32:32 +02:00
|
|
|
return await self.confirm_and_render(
|
2021-07-20 21:10:45 +02:00
|
|
|
self.daemon.jsonrpc_stream_update(claim_id, file_path=file_path, **kwargs), confirm, return_tx
|
2019-03-24 21:55:04 +01:00
|
|
|
)
|
2019-10-25 05:32:32 +02:00
|
|
|
return await self.confirm_and_render(
|
|
|
|
self.daemon.jsonrpc_stream_update(claim_id, **kwargs), confirm
|
|
|
|
)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2020-03-22 04:44:57 +01:00
|
|
|
async def stream_repost(self, claim_id, name='repost', bid='1.0', confirm=True, **kwargs):
|
|
|
|
return await self.confirm_and_render(
|
2019-11-18 21:48:52 +01:00
|
|
|
self.daemon.jsonrpc_stream_repost(claim_id=claim_id, name=name, bid=bid, **kwargs), confirm
|
|
|
|
)
|
|
|
|
|
2019-03-30 02:41:24 +01:00
|
|
|
async def stream_abandon(self, *args, confirm=True, **kwargs):
|
|
|
|
if 'blocking' not in kwargs:
|
|
|
|
kwargs['blocking'] = False
|
2019-10-25 05:32:32 +02:00
|
|
|
return await self.confirm_and_render(
|
|
|
|
self.daemon.jsonrpc_stream_abandon(*args, **kwargs), confirm
|
|
|
|
)
|
2019-03-30 02:41:24 +01:00
|
|
|
|
2020-03-22 04:44:57 +01:00
|
|
|
async def purchase_create(self, *args, confirm=True, **kwargs):
|
|
|
|
return await self.confirm_and_render(
|
|
|
|
self.daemon.jsonrpc_purchase_create(*args, **kwargs), confirm
|
|
|
|
)
|
|
|
|
|
2019-03-27 21:02:17 +01:00
|
|
|
async def publish(self, name, *args, confirm=True, **kwargs):
|
2019-10-25 05:32:32 +02:00
|
|
|
return await self.confirm_and_render(
|
|
|
|
self.daemon.jsonrpc_publish(name, *args, **kwargs), confirm
|
|
|
|
)
|
2019-03-27 21:02:17 +01:00
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
async def channel_create(self, name='@arena', bid='1.0', confirm=True, **kwargs):
|
2019-10-25 05:32:32 +02:00
|
|
|
return await self.confirm_and_render(
|
|
|
|
self.daemon.jsonrpc_channel_create(name, bid, **kwargs), confirm
|
|
|
|
)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
async def channel_update(self, claim_id, confirm=True, **kwargs):
|
2019-10-25 05:32:32 +02:00
|
|
|
return await self.confirm_and_render(
|
|
|
|
self.daemon.jsonrpc_channel_update(claim_id, **kwargs), confirm
|
|
|
|
)
|
2019-02-11 23:46:13 +01:00
|
|
|
|
2019-03-30 02:41:24 +01:00
|
|
|
async def channel_abandon(self, *args, confirm=True, **kwargs):
|
2019-03-26 03:06:36 +01:00
|
|
|
if 'blocking' not in kwargs:
|
|
|
|
kwargs['blocking'] = False
|
2019-10-25 05:32:32 +02:00
|
|
|
return await self.confirm_and_render(
|
|
|
|
self.daemon.jsonrpc_channel_abandon(*args, **kwargs), confirm
|
|
|
|
)
|
2019-11-14 04:14:20 +01:00
|
|
|
|
2019-11-13 23:50:35 +01:00
|
|
|
async def collection_create(
|
|
|
|
self, name='firstcollection', bid='1.0', confirm=True, **kwargs):
|
|
|
|
return await self.confirm_and_render(
|
|
|
|
self.daemon.jsonrpc_collection_create(name, bid, **kwargs), confirm
|
|
|
|
)
|
2019-11-14 04:14:20 +01:00
|
|
|
|
2019-11-13 23:50:35 +01:00
|
|
|
async def collection_update(
|
|
|
|
self, claim_id, confirm=True, **kwargs):
|
|
|
|
return await self.confirm_and_render(
|
|
|
|
self.daemon.jsonrpc_collection_update(claim_id, **kwargs), confirm
|
|
|
|
)
|
2019-03-26 03:06:36 +01:00
|
|
|
|
2019-11-13 23:50:35 +01:00
|
|
|
async def collection_abandon(self, *args, confirm=True, **kwargs):
|
|
|
|
if 'blocking' not in kwargs:
|
|
|
|
kwargs['blocking'] = False
|
|
|
|
return await self.confirm_and_render(
|
|
|
|
self.daemon.jsonrpc_stream_abandon(*args, **kwargs), confirm
|
|
|
|
)
|
2019-11-12 18:17:35 +01:00
|
|
|
|
2019-03-31 00:40:01 +01:00
|
|
|
async def support_create(self, claim_id, bid='1.0', confirm=True, **kwargs):
|
2019-10-25 05:32:32 +02:00
|
|
|
return await self.confirm_and_render(
|
|
|
|
self.daemon.jsonrpc_support_create(claim_id, bid, **kwargs), confirm
|
|
|
|
)
|
2020-03-07 02:12:38 +01:00
|
|
|
|
|
|
|
async def support_abandon(self, *args, confirm=True, **kwargs):
|
|
|
|
if 'blocking' not in kwargs:
|
|
|
|
kwargs['blocking'] = False
|
|
|
|
return await self.confirm_and_render(
|
|
|
|
self.daemon.jsonrpc_support_abandon(*args, **kwargs), confirm
|
|
|
|
)
|
2019-03-31 00:40:01 +01:00
|
|
|
|
2021-06-02 15:51:08 +02:00
|
|
|
async def account_send(self, *args, confirm=True, **kwargs):
|
|
|
|
return await self.confirm_and_render(
|
|
|
|
self.daemon.jsonrpc_account_send(*args, **kwargs), confirm
|
|
|
|
)
|
|
|
|
|
2020-03-18 05:15:24 +01:00
|
|
|
async def wallet_send(self, *args, confirm=True, **kwargs):
|
|
|
|
return await self.confirm_and_render(
|
|
|
|
self.daemon.jsonrpc_wallet_send(*args, **kwargs), confirm
|
|
|
|
)
|
|
|
|
|
2020-03-27 03:16:05 +01:00
|
|
|
async def txo_spend(self, *args, confirm=True, **kwargs):
|
|
|
|
txs = await self.daemon.jsonrpc_txo_spend(*args, **kwargs)
|
|
|
|
if confirm:
|
|
|
|
await asyncio.wait([self.ledger.wait(tx) for tx in txs])
|
|
|
|
await self.generate(1)
|
|
|
|
await asyncio.wait([self.ledger.wait(tx, self.blockchain.block_expected) for tx in txs])
|
|
|
|
return self.sout(txs)
|
|
|
|
|
2021-08-16 20:15:12 +02:00
|
|
|
async def blob_clean(self):
|
|
|
|
return await self.out(self.daemon.jsonrpc_blob_clean())
|
|
|
|
|
|
|
|
async def status(self):
|
|
|
|
return await self.out(self.daemon.jsonrpc_status())
|
|
|
|
|
2020-03-22 04:44:57 +01:00
|
|
|
async def resolve(self, uri, **kwargs):
|
|
|
|
return (await self.out(self.daemon.jsonrpc_resolve(uri, **kwargs)))[uri]
|
2019-03-26 03:06:36 +01:00
|
|
|
|
2019-04-29 06:38:58 +02:00
|
|
|
async def claim_search(self, **kwargs):
|
2021-06-03 06:29:22 +02:00
|
|
|
return (await self.out(self.daemon.jsonrpc_claim_search(**kwargs)))['items']
|
2019-06-24 01:58:41 +02:00
|
|
|
|
2021-03-10 02:15:50 +01:00
|
|
|
async def get_claim_by_claim_id(self, claim_id):
|
|
|
|
return await self.out(self.ledger.get_claim_by_claim_id(claim_id))
|
|
|
|
|
2019-10-30 03:56:28 +01:00
|
|
|
async def file_list(self, *args, **kwargs):
|
|
|
|
return (await self.out(self.daemon.jsonrpc_file_list(*args, **kwargs)))['items']
|
2019-10-15 02:43:46 +02:00
|
|
|
|
2020-03-07 06:34:47 +01:00
|
|
|
async def txo_list(self, *args, **kwargs):
|
|
|
|
return (await self.out(self.daemon.jsonrpc_txo_list(*args, **kwargs)))['items']
|
|
|
|
|
2020-03-21 00:07:16 +01:00
|
|
|
async def txo_sum(self, *args, **kwargs):
|
|
|
|
return await self.out(self.daemon.jsonrpc_txo_sum(*args, **kwargs))
|
|
|
|
|
2020-03-26 05:37:13 +01:00
|
|
|
async def txo_plot(self, *args, **kwargs):
|
|
|
|
return await self.out(self.daemon.jsonrpc_txo_plot(*args, **kwargs))
|
|
|
|
|
2019-11-18 21:48:52 +01:00
|
|
|
async def claim_list(self, *args, **kwargs):
|
|
|
|
return (await self.out(self.daemon.jsonrpc_claim_list(*args, **kwargs)))['items']
|
|
|
|
|
2020-02-01 23:59:10 +01:00
|
|
|
async def stream_list(self, *args, **kwargs):
|
|
|
|
return (await self.out(self.daemon.jsonrpc_stream_list(*args, **kwargs)))['items']
|
|
|
|
|
|
|
|
async def channel_list(self, *args, **kwargs):
|
|
|
|
return (await self.out(self.daemon.jsonrpc_channel_list(*args, **kwargs)))['items']
|
|
|
|
|
2020-03-07 06:34:47 +01:00
|
|
|
async def transaction_list(self, *args, **kwargs):
|
|
|
|
return (await self.out(self.daemon.jsonrpc_transaction_list(*args, **kwargs)))['items']
|
|
|
|
|
2021-09-10 16:53:52 +02:00
|
|
|
async def blob_list(self, *args, **kwargs):
|
|
|
|
return (await self.out(self.daemon.jsonrpc_blob_list(*args, **kwargs)))['items']
|
|
|
|
|
2019-06-24 01:58:41 +02:00
|
|
|
@staticmethod
|
|
|
|
def get_claim_id(tx):
|
|
|
|
return tx['outputs'][0]['claim_id']
|
2019-10-26 05:34:44 +02:00
|
|
|
|
2020-01-03 05:03:45 +01:00
|
|
|
def assertItemCount(self, result, count): # pylint: disable=invalid-name
|
2020-02-22 18:23:11 +01:00
|
|
|
self.assertEqual(count, result['total_items'])
|