2019-06-26 22:28:13 +02:00
|
|
|
import asyncio
|
|
|
|
|
2019-12-31 20:52:57 +01:00
|
|
|
import lbry
|
2019-07-23 18:45:10 +02:00
|
|
|
import lbry.wallet
|
2020-02-21 16:05:46 +01:00
|
|
|
from lbry.error import ServerPaymentFeeAboveMaxAllowedError
|
2020-01-03 08:40:37 +01:00
|
|
|
from lbry.wallet.network import ClientSession
|
2021-08-05 20:58:41 +02:00
|
|
|
from lbry.wallet.rpc import RPCError
|
2021-01-16 22:27:42 +01:00
|
|
|
from lbry.wallet.server.session import LBRYElectrumX
|
2020-02-21 04:04:37 +01:00
|
|
|
from lbry.testcase import IntegrationTestCase, CommandTestCase
|
2020-01-06 05:29:48 +01:00
|
|
|
from lbry.wallet.orchstr8.node import SPVNode
|
2019-06-26 22:28:13 +02:00
|
|
|
|
|
|
|
|
2019-11-08 16:52:39 +01:00
|
|
|
class TestSessions(IntegrationTestCase):
|
2019-06-26 22:28:13 +02:00
|
|
|
"""
|
2019-07-16 11:23:44 +02:00
|
|
|
Tests that server cleans up stale connections after session timeout and client times out too.
|
2019-06-26 22:28:13 +02:00
|
|
|
"""
|
|
|
|
async def test_session_bloat_from_socket_timeout(self):
|
2019-07-16 11:23:44 +02:00
|
|
|
await self.conductor.stop_spv()
|
2019-07-22 04:26:49 +02:00
|
|
|
await self.ledger.stop()
|
2019-07-16 11:23:44 +02:00
|
|
|
self.conductor.spv_node.session_timeout = 1
|
|
|
|
await self.conductor.start_spv()
|
2019-09-03 16:28:30 +02:00
|
|
|
session = ClientSession(
|
|
|
|
network=None, server=(self.conductor.spv_node.hostname, self.conductor.spv_node.port), timeout=0.2
|
|
|
|
)
|
2019-07-16 11:23:44 +02:00
|
|
|
await session.create_connection()
|
|
|
|
await session.send_request('server.banner', ())
|
2022-01-12 17:59:44 +01:00
|
|
|
self.assertEqual(len(self.conductor.spv_node.server.session_manager.sessions), 1)
|
2019-07-16 11:23:44 +02:00
|
|
|
self.assertFalse(session.is_closing())
|
|
|
|
await asyncio.sleep(1.1)
|
|
|
|
with self.assertRaises(asyncio.TimeoutError):
|
|
|
|
await session.send_request('server.banner', ())
|
|
|
|
self.assertTrue(session.is_closing())
|
2022-01-12 17:59:44 +01:00
|
|
|
self.assertEqual(len(self.conductor.spv_node.server.session_manager.sessions), 0)
|
2019-07-23 18:45:10 +02:00
|
|
|
|
2019-11-08 16:52:39 +01:00
|
|
|
async def test_proper_version(self):
|
|
|
|
info = await self.ledger.network.get_server_features()
|
2019-12-31 20:52:57 +01:00
|
|
|
self.assertEqual(lbry.__version__, info['server_version'])
|
2019-11-08 16:52:39 +01:00
|
|
|
|
2019-11-18 16:19:16 +01:00
|
|
|
async def test_client_errors(self):
|
|
|
|
# Goal is ensuring thsoe are raised and not trapped accidentally
|
|
|
|
with self.assertRaisesRegex(Exception, 'not a valid address'):
|
|
|
|
await self.ledger.network.get_history('of the world')
|
|
|
|
with self.assertRaisesRegex(Exception, 'rejected by network rules.*TX decode failed'):
|
|
|
|
await self.ledger.network.broadcast('13370042004200')
|
|
|
|
|
2019-07-23 18:45:10 +02:00
|
|
|
|
2020-01-06 05:29:48 +01:00
|
|
|
class TestUsagePayment(CommandTestCase):
|
2022-02-20 23:58:08 +01:00
|
|
|
async def test_single_server_payment(self):
|
2020-02-18 17:50:41 +01:00
|
|
|
wallet_pay_service = self.daemon.component_manager.get_component('wallet_server_payments')
|
|
|
|
wallet_pay_service.payment_period = 1
|
2020-02-21 18:15:17 +01:00
|
|
|
# only starts with a positive max key fee
|
|
|
|
wallet_pay_service.max_fee = "0.0"
|
|
|
|
await wallet_pay_service.start(ledger=self.ledger, wallet=self.wallet)
|
|
|
|
self.assertFalse(wallet_pay_service.running)
|
|
|
|
wallet_pay_service.max_fee = "1.0"
|
|
|
|
await wallet_pay_service.start(ledger=self.ledger, wallet=self.wallet)
|
|
|
|
self.assertTrue(wallet_pay_service.running)
|
2020-02-18 17:50:41 +01:00
|
|
|
await wallet_pay_service.stop()
|
|
|
|
await wallet_pay_service.start(ledger=self.ledger, wallet=self.wallet)
|
2020-02-12 02:05:49 +01:00
|
|
|
|
2020-02-21 21:25:12 +01:00
|
|
|
address = await self.blockchain.get_raw_change_address()
|
2020-01-06 09:07:37 +01:00
|
|
|
_, history = await self.ledger.get_local_status_and_history(address)
|
|
|
|
self.assertEqual(history, [])
|
2020-01-06 05:29:48 +01:00
|
|
|
|
|
|
|
node = SPVNode(self.conductor.spv_module, node_number=2)
|
2022-02-20 23:58:08 +01:00
|
|
|
await node.start(self.blockchain, extraconf={"payment_address": address, "daily_fee": "1.1"})
|
2020-02-21 18:18:44 +01:00
|
|
|
self.addCleanup(node.stop)
|
2020-02-17 17:35:46 +01:00
|
|
|
self.daemon.jsonrpc_settings_set('lbryum_servers', [f"{node.hostname}:{node.port}"])
|
2021-01-21 23:52:57 +01:00
|
|
|
await self.daemon.jsonrpc_wallet_reconnect()
|
2021-01-16 22:27:42 +01:00
|
|
|
LBRYElectrumX.set_server_features(node.server.env)
|
2020-02-21 04:04:37 +01:00
|
|
|
features = await self.ledger.network.get_server_features()
|
|
|
|
self.assertEqual(features["payment_address"], address)
|
|
|
|
self.assertEqual(features["daily_fee"], "1.1")
|
2020-02-21 16:05:46 +01:00
|
|
|
with self.assertRaises(ServerPaymentFeeAboveMaxAllowedError):
|
2021-01-21 23:52:57 +01:00
|
|
|
await asyncio.wait_for(wallet_pay_service.on_payment.first, timeout=30)
|
2021-01-16 22:27:42 +01:00
|
|
|
node.server.env.daily_fee = "1.0"
|
|
|
|
node.server.env.payment_address = address
|
|
|
|
LBRYElectrumX.set_server_features(node.server.env)
|
|
|
|
# self.daemon.jsonrpc_settings_set('lbryum_servers', [f"{node.hostname}:{node.port}"])
|
2020-01-06 09:07:37 +01:00
|
|
|
await self.daemon.jsonrpc_wallet_reconnect()
|
2020-01-06 05:29:48 +01:00
|
|
|
features = await self.ledger.network.get_server_features()
|
2020-01-06 09:07:37 +01:00
|
|
|
self.assertEqual(features["payment_address"], address)
|
2020-02-17 17:35:46 +01:00
|
|
|
self.assertEqual(features["daily_fee"], "1.0")
|
2021-01-16 22:27:42 +01:00
|
|
|
tx = await asyncio.wait_for(wallet_pay_service.on_payment.first, timeout=30)
|
2020-02-21 21:25:12 +01:00
|
|
|
self.assertIsNotNone(await self.blockchain.get_raw_transaction(tx.id)) # verify its broadcasted
|
2020-02-21 20:41:23 +01:00
|
|
|
self.assertEqual(tx.outputs[0].amount, 100000000)
|
|
|
|
self.assertEqual(tx.outputs[0].get_address(self.ledger), address)
|
2021-05-12 02:38:05 +02:00
|
|
|
|
|
|
|
|
|
|
|
class TestESSync(CommandTestCase):
|
|
|
|
async def test_es_sync_utility(self):
|
2022-02-12 03:22:32 +01:00
|
|
|
es_writer = self.conductor.spv_node.es_writer
|
|
|
|
server_search_client = self.conductor.spv_node.server.session_manager.search_index
|
|
|
|
|
2021-05-12 02:38:05 +02:00
|
|
|
for i in range(10):
|
|
|
|
await self.stream_create(f"stream{i}", bid='0.001')
|
|
|
|
await self.generate(1)
|
|
|
|
self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
|
2021-10-21 01:34:55 +02:00
|
|
|
|
2022-02-12 03:22:32 +01:00
|
|
|
# delete the index and verify nothing is returned by claim search
|
|
|
|
await es_writer.delete_index()
|
|
|
|
server_search_client.clear_caches()
|
2021-05-12 02:38:05 +02:00
|
|
|
self.assertEqual(0, len(await self.claim_search(order_by=['height'])))
|
2021-06-18 03:22:23 +02:00
|
|
|
|
2022-02-12 03:22:32 +01:00
|
|
|
# reindex, 10 claims should be returned
|
|
|
|
await es_writer.reindex()
|
|
|
|
self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
|
|
|
|
server_search_client.clear_caches()
|
|
|
|
self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
|
2021-05-12 05:21:03 +02:00
|
|
|
|
2022-02-12 03:22:32 +01:00
|
|
|
# reindex again, this should not appear to do anything but will delete and reinsert the same 10 claims
|
|
|
|
await es_writer.reindex()
|
|
|
|
self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
|
|
|
|
server_search_client.clear_caches()
|
|
|
|
self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
|
2021-10-21 01:34:55 +02:00
|
|
|
|
2022-02-12 03:22:32 +01:00
|
|
|
# delete the index again and stop the writer, upon starting it the writer should reindex automatically
|
|
|
|
await es_writer.stop(delete_index=True)
|
|
|
|
server_search_client.clear_caches()
|
|
|
|
self.assertEqual(0, len(await self.claim_search(order_by=['height'])))
|
2021-10-21 01:34:55 +02:00
|
|
|
|
2022-02-12 03:22:32 +01:00
|
|
|
await es_writer.start(reindex=True)
|
2021-10-21 01:34:55 +02:00
|
|
|
self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
|
2021-05-10 21:50:16 +02:00
|
|
|
|
2022-02-12 03:22:32 +01:00
|
|
|
# stop the es writer and advance the chain by 1, adding a new claim. upon resuming the es writer, it should
|
|
|
|
# add the new claim
|
|
|
|
await es_writer.stop()
|
|
|
|
await self.stream_create(f"stream11", bid='0.001', confirm=False)
|
|
|
|
generate_block_task = asyncio.create_task(self.generate(1))
|
|
|
|
await es_writer.start()
|
|
|
|
await generate_block_task
|
|
|
|
self.assertEqual(11, len(await self.claim_search(order_by=['height'])))
|
|
|
|
|
|
|
|
|
|
|
|
# # this time we will test a migration from unversioned to v1
|
|
|
|
# await db.search_index.sync_client.indices.delete_template(db.search_index.index)
|
|
|
|
# await db.search_index.stop()
|
|
|
|
#
|
|
|
|
# await make_es_index_and_run_sync(env, db=db, index_name=db.search_index.index, force=True)
|
|
|
|
# await db.search_index.start()
|
|
|
|
#
|
|
|
|
# await es_writer.reindex()
|
|
|
|
# self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
|
|
|
|
|
2021-05-10 21:50:16 +02:00
|
|
|
|
|
|
|
class TestHubDiscovery(CommandTestCase):
|
|
|
|
|
|
|
|
async def test_hub_discovery(self):
|
2021-06-14 19:06:31 +02:00
|
|
|
us_final_node = SPVNode(self.conductor.spv_module, node_number=2)
|
2022-01-25 20:27:20 +01:00
|
|
|
await us_final_node.start(self.blockchain, extraconf={"country": "US"})
|
2021-06-14 19:06:31 +02:00
|
|
|
self.addCleanup(us_final_node.stop)
|
|
|
|
final_node_host = f"{us_final_node.hostname}:{us_final_node.port}"
|
|
|
|
|
|
|
|
kp_final_node = SPVNode(self.conductor.spv_module, node_number=3)
|
2022-01-25 20:27:20 +01:00
|
|
|
await kp_final_node.start(self.blockchain, extraconf={"country": "KP"})
|
2021-06-14 19:06:31 +02:00
|
|
|
self.addCleanup(kp_final_node.stop)
|
|
|
|
kp_final_node_host = f"{kp_final_node.hostname}:{kp_final_node.port}"
|
|
|
|
|
|
|
|
relay_node = SPVNode(self.conductor.spv_module, node_number=4)
|
|
|
|
await relay_node.start(self.blockchain, extraconf={
|
2022-01-25 20:27:20 +01:00
|
|
|
"country": "FR",
|
|
|
|
"peer_hubs": ",".join([kp_final_node_host, final_node_host])
|
2021-06-14 19:06:31 +02:00
|
|
|
})
|
2021-05-10 21:50:16 +02:00
|
|
|
relay_node_host = f"{relay_node.hostname}:{relay_node.port}"
|
|
|
|
self.addCleanup(relay_node.stop)
|
|
|
|
|
|
|
|
self.assertEqual(list(self.daemon.conf.known_hubs), [])
|
|
|
|
self.assertEqual(
|
|
|
|
self.daemon.ledger.network.client.server_address_and_port,
|
|
|
|
('127.0.0.1', 50002)
|
|
|
|
)
|
|
|
|
|
2021-06-14 19:06:31 +02:00
|
|
|
# connect to relay hub which will tell us about the final hubs
|
2021-05-19 16:56:22 +02:00
|
|
|
self.daemon.jsonrpc_settings_set('lbryum_servers', [relay_node_host])
|
2021-05-10 21:50:16 +02:00
|
|
|
await self.daemon.jsonrpc_wallet_reconnect()
|
2021-06-14 19:06:31 +02:00
|
|
|
self.assertEqual(
|
|
|
|
self.daemon.conf.known_hubs.filter(), {
|
|
|
|
(relay_node.hostname, relay_node.port): {"country": "FR"},
|
|
|
|
(us_final_node.hostname, us_final_node.port): {}, # discovered from relay but not contacted yet
|
|
|
|
(kp_final_node.hostname, kp_final_node.port): {}, # discovered from relay but not contacted yet
|
|
|
|
}
|
|
|
|
)
|
2021-05-19 16:56:22 +02:00
|
|
|
self.assertEqual(
|
|
|
|
self.daemon.ledger.network.client.server_address_and_port, ('127.0.0.1', relay_node.port)
|
|
|
|
)
|
2021-05-10 21:50:16 +02:00
|
|
|
|
2021-06-14 19:06:31 +02:00
|
|
|
# use known_hubs to connect to final US hub
|
|
|
|
self.daemon.jsonrpc_settings_clear('lbryum_servers')
|
|
|
|
self.daemon.conf.jurisdiction = "US"
|
2021-05-19 16:56:22 +02:00
|
|
|
await self.daemon.jsonrpc_wallet_reconnect()
|
2021-05-10 21:50:16 +02:00
|
|
|
self.assertEqual(
|
2021-06-14 19:06:31 +02:00
|
|
|
self.daemon.conf.known_hubs.filter(), {
|
|
|
|
(relay_node.hostname, relay_node.port): {"country": "FR"},
|
|
|
|
(us_final_node.hostname, us_final_node.port): {"country": "US"},
|
|
|
|
(kp_final_node.hostname, kp_final_node.port): {"country": "KP"},
|
|
|
|
}
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
self.daemon.ledger.network.client.server_address_and_port, ('127.0.0.1', us_final_node.port)
|
2021-05-10 21:50:16 +02:00
|
|
|
)
|
2021-05-21 15:50:47 +02:00
|
|
|
|
2021-06-14 19:06:31 +02:00
|
|
|
# connection to KP jurisdiction
|
|
|
|
self.daemon.conf.jurisdiction = "KP"
|
|
|
|
await self.daemon.jsonrpc_wallet_reconnect()
|
|
|
|
self.assertEqual(
|
|
|
|
self.daemon.ledger.network.client.server_address_and_port, ('127.0.0.1', kp_final_node.port)
|
|
|
|
)
|
|
|
|
|
2022-01-12 17:59:44 +01:00
|
|
|
kp_final_node.server.session_manager._notify_peer('127.0.0.1:9988')
|
2021-05-21 15:50:47 +02:00
|
|
|
await self.daemon.ledger.network.on_hub.first
|
2021-06-14 19:06:31 +02:00
|
|
|
await asyncio.sleep(0.5) # wait for above event to be processed by other listeners
|
|
|
|
self.assertEqual(
|
|
|
|
self.daemon.conf.known_hubs.filter(), {
|
|
|
|
(relay_node.hostname, relay_node.port): {"country": "FR"},
|
|
|
|
(us_final_node.hostname, us_final_node.port): {"country": "US"},
|
|
|
|
(kp_final_node.hostname, kp_final_node.port): {"country": "KP"},
|
|
|
|
('127.0.0.1', 9988): {}
|
|
|
|
}
|
|
|
|
)
|
2021-06-25 16:33:17 +02:00
|
|
|
|
|
|
|
|
2021-07-16 21:43:17 +02:00
|
|
|
class TestStressFlush(CommandTestCase):
|
2021-08-10 03:04:42 +02:00
|
|
|
# async def test_flush_over_66_thousand(self):
|
|
|
|
# history = self.conductor.spv_node.server.db.history
|
|
|
|
# history.flush_count = 66_000
|
|
|
|
# history.flush()
|
|
|
|
# self.assertEqual(history.flush_count, 66_001)
|
|
|
|
# await self.generate(1)
|
|
|
|
# self.assertEqual(history.flush_count, 66_002)
|
2021-08-05 20:58:41 +02:00
|
|
|
|
|
|
|
async def test_thousands_claim_ids_on_search(self):
|
|
|
|
await self.stream_create()
|
|
|
|
with self.assertRaises(RPCError) as err:
|
|
|
|
await self.claim_search(not_channel_ids=[("%040x" % i) for i in range(8196)])
|
2021-09-15 19:17:26 +02:00
|
|
|
# in the go hub this doesnt have a `.` at the end, in python it does
|
|
|
|
self.assertTrue(err.exception.message.startswith('not_channel_ids cant have more than 2048 items'))
|