2018-07-01 23:20:17 +02:00
|
|
|
import asyncio
|
2019-08-30 05:52:08 +02:00
|
|
|
import random
|
2018-12-06 05:45:35 +01:00
|
|
|
from itertools import chain
|
2019-08-30 05:52:08 +02:00
|
|
|
|
2019-12-31 20:52:57 +01:00
|
|
|
from lbry.wallet.transaction import Transaction, Output, Input
|
2019-12-31 21:30:13 +01:00
|
|
|
from lbry.testcase import IntegrationTestCase
|
2020-01-03 08:40:37 +01:00
|
|
|
from lbry.wallet.util import satoshis_to_coins, coins_to_satoshis
|
2018-06-08 05:47:46 +02:00
|
|
|
|
|
|
|
|
|
|
|
class BasicTransactionTests(IntegrationTestCase):
|
|
|
|
|
2018-12-06 19:38:56 +01:00
|
|
|
async def test_variety_of_transactions_and_longish_history(self):
|
|
|
|
await self.blockchain.generate(300)
|
2018-12-05 07:13:31 +01:00
|
|
|
await self.assertBalance(self.account, '0.0')
|
2018-12-06 05:45:35 +01:00
|
|
|
addresses = await self.account.receiving.get_addresses()
|
2018-12-05 07:13:31 +01:00
|
|
|
|
2018-12-06 19:38:56 +01:00
|
|
|
# send 10 coins to first 10 receiving addresses and then 10 transactions worth 10 coins each
|
|
|
|
# to the 10th receiving address for a total of 30 UTXOs on the entire account
|
2018-12-06 05:45:35 +01:00
|
|
|
sends = list(chain(
|
2018-12-06 19:38:56 +01:00
|
|
|
(self.blockchain.send_to_address(address, 10) for address in addresses[:10]),
|
|
|
|
(self.blockchain.send_to_address(addresses[9], 10) for _ in range(10))
|
2018-12-05 17:02:52 +01:00
|
|
|
))
|
2018-12-06 19:38:56 +01:00
|
|
|
# use batching to reduce issues with send_to_address on cli
|
2018-12-06 05:45:35 +01:00
|
|
|
for batch in range(0, len(sends), 10):
|
|
|
|
txids = await asyncio.gather(*sends[batch:batch+10])
|
2018-12-06 19:38:56 +01:00
|
|
|
await asyncio.wait([self.on_transaction_id(txid) for txid in txids])
|
|
|
|
await self.assertBalance(self.account, '200.0')
|
|
|
|
self.assertEqual(20, await self.account.get_utxo_count())
|
2018-12-06 05:45:35 +01:00
|
|
|
|
2018-12-06 19:38:56 +01:00
|
|
|
# address gap should have increase by 10 to cover the first 10 addresses we've used up
|
2018-12-06 05:45:35 +01:00
|
|
|
addresses = await self.account.receiving.get_addresses()
|
2018-12-06 19:38:56 +01:00
|
|
|
self.assertEqual(30, len(addresses))
|
|
|
|
|
|
|
|
# there used to be a sync bug which failed to save TXIs between
|
|
|
|
# daemon restarts, clearing cache replicates that behavior
|
|
|
|
self.ledger._tx_cache.clear()
|
|
|
|
|
|
|
|
# spend from each of the first 10 addresses to the subsequent 10 addresses
|
|
|
|
txs = []
|
|
|
|
for address in addresses[10:20]:
|
2019-12-31 20:52:57 +01:00
|
|
|
txs.append(await Transaction.create(
|
2018-12-06 19:38:56 +01:00
|
|
|
[],
|
2019-12-31 20:52:57 +01:00
|
|
|
[Output.pay_pubkey_hash(
|
2018-12-06 19:38:56 +01:00
|
|
|
coins_to_satoshis('1.0'), self.ledger.address_to_hash160(address)
|
|
|
|
)],
|
|
|
|
[self.account], self.account
|
|
|
|
))
|
|
|
|
await asyncio.wait([self.broadcast(tx) for tx in txs])
|
|
|
|
await asyncio.wait([self.ledger.wait(tx) for tx in txs])
|
|
|
|
|
|
|
|
# verify that a previous bug which failed to save TXIs doesn't come back
|
|
|
|
# this check must happen before generating a new block
|
|
|
|
self.assertTrue(all([
|
|
|
|
tx.inputs[0].txo_ref.txo is not None
|
|
|
|
for tx in await self.ledger.db.get_transactions(txid__in=[tx.id for tx in txs])
|
|
|
|
]))
|
2018-12-05 17:02:52 +01:00
|
|
|
|
2018-12-06 05:45:35 +01:00
|
|
|
await self.blockchain.generate(1)
|
2018-12-06 19:38:56 +01:00
|
|
|
await asyncio.wait([self.ledger.wait(tx) for tx in txs])
|
|
|
|
await self.assertBalance(self.account, '199.99876')
|
2018-12-05 07:13:31 +01:00
|
|
|
|
2018-12-06 19:38:56 +01:00
|
|
|
# 10 of the UTXOs have been split into a 1 coin UTXO and a 9 UTXO change
|
2018-12-06 05:45:35 +01:00
|
|
|
self.assertEqual(30, await self.account.get_utxo_count())
|
2018-12-05 07:13:31 +01:00
|
|
|
|
2018-12-06 19:38:56 +01:00
|
|
|
# spend all 30 UTXOs into a a 199 coin UTXO and change
|
2019-12-31 20:52:57 +01:00
|
|
|
tx = await Transaction.create(
|
2018-12-06 05:45:35 +01:00
|
|
|
[],
|
2019-12-31 20:52:57 +01:00
|
|
|
[Output.pay_pubkey_hash(
|
2018-12-06 19:38:56 +01:00
|
|
|
coins_to_satoshis('199.0'), self.ledger.address_to_hash160(addresses[-1])
|
|
|
|
)],
|
2018-12-06 05:45:35 +01:00
|
|
|
[self.account], self.account
|
|
|
|
)
|
|
|
|
await self.broadcast(tx)
|
|
|
|
await self.ledger.wait(tx)
|
2018-12-06 19:38:56 +01:00
|
|
|
await self.blockchain.generate(1)
|
|
|
|
await self.ledger.wait(tx)
|
2018-12-05 07:13:31 +01:00
|
|
|
|
2018-12-06 19:38:56 +01:00
|
|
|
self.assertEqual(2, await self.account.get_utxo_count()) # 199 + change
|
|
|
|
await self.assertBalance(self.account, '199.99649')
|
2018-12-05 07:13:31 +01:00
|
|
|
|
2018-06-14 03:37:02 +02:00
|
|
|
async def test_sending_and_receiving(self):
|
2018-06-08 05:47:46 +02:00
|
|
|
account1, account2 = self.account, self.wallet.generate_account(self.ledger)
|
2018-11-19 04:54:00 +01:00
|
|
|
await self.ledger.subscribe_account(account2)
|
2018-06-08 05:47:46 +02:00
|
|
|
|
2018-11-19 04:54:00 +01:00
|
|
|
await self.assertBalance(account1, '0.0')
|
|
|
|
await self.assertBalance(account2, '0.0')
|
2018-06-08 05:47:46 +02:00
|
|
|
|
2019-09-23 19:43:29 +02:00
|
|
|
addresses = await account1.receiving.get_addresses()
|
2018-12-06 19:38:56 +01:00
|
|
|
txids = await asyncio.gather(*(
|
|
|
|
self.blockchain.send_to_address(address, 1.1) for address in addresses[:5]
|
|
|
|
))
|
|
|
|
await asyncio.wait([self.on_transaction_id(txid) for txid in txids]) # mempool
|
2018-06-12 16:02:04 +02:00
|
|
|
await self.blockchain.generate(1)
|
2018-12-06 19:38:56 +01:00
|
|
|
await asyncio.wait([self.on_transaction_id(txid) for txid in txids]) # confirmed
|
2018-11-19 04:54:00 +01:00
|
|
|
await self.assertBalance(account1, '5.5')
|
|
|
|
await self.assertBalance(account2, '0.0')
|
2018-06-08 05:47:46 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
address2 = await account2.receiving.get_or_create_usable_address()
|
2019-12-31 20:52:57 +01:00
|
|
|
tx = await Transaction.create(
|
2018-08-03 16:41:40 +02:00
|
|
|
[],
|
2019-12-31 20:52:57 +01:00
|
|
|
[Output.pay_pubkey_hash(
|
2018-12-06 19:38:56 +01:00
|
|
|
coins_to_satoshis('2.0'), self.ledger.address_to_hash160(address2)
|
|
|
|
)],
|
2018-06-12 16:02:04 +02:00
|
|
|
[account1], account1
|
2018-10-15 04:16:51 +02:00
|
|
|
)
|
2018-06-08 05:47:46 +02:00
|
|
|
await self.broadcast(tx)
|
2018-11-07 20:42:17 +01:00
|
|
|
await self.ledger.wait(tx) # mempool
|
2018-06-14 02:57:57 +02:00
|
|
|
await self.blockchain.generate(1)
|
2018-11-07 20:42:17 +01:00
|
|
|
await self.ledger.wait(tx) # confirmed
|
2018-06-08 05:47:46 +02:00
|
|
|
|
2018-11-19 04:54:00 +01:00
|
|
|
await self.assertBalance(account1, '3.499802')
|
|
|
|
await self.assertBalance(account2, '2.0')
|
2018-07-05 03:30:38 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
utxos = await self.account.get_utxos()
|
2019-12-31 20:52:57 +01:00
|
|
|
tx = await Transaction.create(
|
|
|
|
[Input.spend(utxos[0])],
|
2018-08-03 16:41:40 +02:00
|
|
|
[],
|
|
|
|
[account1], account1
|
2018-10-15 04:16:51 +02:00
|
|
|
)
|
2018-07-10 04:02:18 +02:00
|
|
|
await self.broadcast(tx)
|
2018-11-07 20:42:17 +01:00
|
|
|
await self.ledger.wait(tx) # mempool
|
2018-07-10 04:02:18 +02:00
|
|
|
await self.blockchain.generate(1)
|
2018-11-07 20:42:17 +01:00
|
|
|
await self.ledger.wait(tx) # confirmed
|
2018-09-22 04:18:30 +02:00
|
|
|
|
2018-12-06 19:38:56 +01:00
|
|
|
tx = (await account1.get_transactions())[1]
|
|
|
|
self.assertEqual(satoshis_to_coins(tx.inputs[0].amount), '1.1')
|
|
|
|
self.assertEqual(satoshis_to_coins(tx.inputs[1].amount), '1.1')
|
|
|
|
self.assertEqual(satoshis_to_coins(tx.outputs[0].amount), '2.0')
|
2018-09-22 04:18:30 +02:00
|
|
|
self.assertEqual(tx.outputs[0].get_address(self.ledger), address2)
|
2019-10-02 18:58:51 +02:00
|
|
|
self.assertFalse(tx.outputs[0].is_change)
|
|
|
|
self.assertTrue(tx.outputs[1].is_change)
|
2019-08-30 05:52:08 +02:00
|
|
|
|
|
|
|
async def test_history_edge_cases(self):
|
2019-12-31 23:47:24 +01:00
|
|
|
await self.blockchain.generate(300)
|
2019-08-30 05:52:08 +02:00
|
|
|
await self.assertBalance(self.account, '0.0')
|
|
|
|
address = await self.account.receiving.get_or_create_usable_address()
|
|
|
|
# evil trick: mempool is unsorted on real life, but same order between python instances. reproduce it
|
|
|
|
original_summary = self.conductor.spv_node.server.mempool.transaction_summaries
|
2019-08-31 11:37:17 +02:00
|
|
|
|
2019-08-30 05:52:08 +02:00
|
|
|
async def random_summary(*args, **kwargs):
|
|
|
|
summary = await original_summary(*args, **kwargs)
|
|
|
|
if summary and len(summary) > 2:
|
|
|
|
ordered = summary.copy()
|
|
|
|
while summary == ordered:
|
|
|
|
random.shuffle(summary)
|
|
|
|
return summary
|
|
|
|
self.conductor.spv_node.server.mempool.transaction_summaries = random_summary
|
|
|
|
# 10 unconfirmed txs, all from blockchain wallet
|
2019-10-08 17:01:38 +02:00
|
|
|
sends = [self.blockchain.send_to_address(address, 10) for _ in range(10)]
|
2019-08-30 05:52:08 +02:00
|
|
|
# use batching to reduce issues with send_to_address on cli
|
|
|
|
for batch in range(0, len(sends), 10):
|
|
|
|
txids = await asyncio.gather(*sends[batch:batch + 10])
|
|
|
|
await asyncio.wait([self.on_transaction_id(txid) for txid in txids])
|
|
|
|
remote_status = await self.ledger.network.subscribe_address(address)
|
|
|
|
self.assertTrue(await self.ledger.update_history(address, remote_status))
|
|
|
|
# 20 unconfirmed txs, 10 from blockchain, 10 from local to local
|
|
|
|
utxos = await self.account.get_utxos()
|
|
|
|
txs = []
|
|
|
|
for utxo in utxos:
|
2019-12-31 20:52:57 +01:00
|
|
|
tx = await Transaction.create(
|
|
|
|
[Input.spend(utxo)],
|
2019-08-30 05:52:08 +02:00
|
|
|
[],
|
|
|
|
[self.account], self.account
|
|
|
|
)
|
|
|
|
await self.broadcast(tx)
|
|
|
|
txs.append(tx)
|
|
|
|
await asyncio.wait([self.on_transaction_address(tx, address) for tx in txs], timeout=1)
|
|
|
|
remote_status = await self.ledger.network.subscribe_address(address)
|
|
|
|
self.assertTrue(await self.ledger.update_history(address, remote_status))
|
2019-08-30 07:50:21 +02:00
|
|
|
# server history grows unordered
|
|
|
|
txid = await self.blockchain.send_to_address(address, 1)
|
|
|
|
await self.on_transaction_id(txid)
|
|
|
|
self.assertTrue(await self.ledger.update_history(address, remote_status))
|
|
|
|
self.assertEqual(21, len((await self.ledger.get_local_status_and_history(address))[1]))
|
|
|
|
self.assertEqual(0, len(self.ledger._known_addresses_out_of_sync))
|