lbry-sdk/tests/unit/blob_exchange/test_transfer_blob.py

364 lines
17 KiB
Python
Raw Permalink Normal View History

2019-01-22 18:52:32 +01:00
import asyncio
import tempfile
2019-01-30 16:11:05 +01:00
from io import BytesIO
from unittest import mock
2019-01-30 16:11:05 +01:00
2019-01-22 18:52:32 +01:00
import shutil
import os
import copy
2019-01-30 16:11:05 +01:00
from lbry.blob_exchange.serialization import BlobRequest
2019-12-31 21:30:13 +01:00
from lbry.testcase import AsyncioTestCase
from lbry.conf import Config
from lbry.extras.daemon.storage import SQLiteStorage
from lbry.extras.daemon.daemon import Daemon
from lbry.blob.blob_manager import BlobManager
from lbry.blob_exchange.server import BlobServer, BlobServerProtocol
from lbry.blob_exchange.client import request_blob
2019-10-01 02:00:10 +02:00
from lbry.dht.peer import PeerManager, make_kademlia_peer
from lbry.dht.node import Node
2019-01-22 18:52:32 +01:00
# import logging
# logging.getLogger("lbry").setLevel(logging.DEBUG)
2019-01-22 18:52:32 +01:00
2019-01-30 21:23:17 +01:00
def mock_config():
config = Config(save_files=True)
2019-01-30 21:23:17 +01:00
config.fixed_peer_delay = 10000
return config
2019-01-22 18:52:32 +01:00
class BlobExchangeTestBase(AsyncioTestCase):
async def asyncSetUp(self):
self.loop = asyncio.get_event_loop()
2020-06-04 16:18:14 +02:00
self.client_wallet_dir = tempfile.mkdtemp()
2019-01-22 18:52:32 +01:00
self.client_dir = tempfile.mkdtemp()
self.server_dir = tempfile.mkdtemp()
2020-06-04 16:18:14 +02:00
self.addCleanup(shutil.rmtree, self.client_wallet_dir)
2019-01-22 18:52:32 +01:00
self.addCleanup(shutil.rmtree, self.client_dir)
self.addCleanup(shutil.rmtree, self.server_dir)
self.server_config = Config(
data_dir=self.server_dir,
download_dir=self.server_dir,
wallet=self.server_dir,
save_files=True,
fixed_peers=[]
)
2020-12-04 03:04:31 +01:00
self.server_config.transaction_cache_size = 10000
2019-01-30 23:43:02 +01:00
self.server_storage = SQLiteStorage(self.server_config, os.path.join(self.server_dir, "lbrynet.sqlite"))
self.server_blob_manager = BlobManager(self.loop, self.server_dir, self.server_storage, self.server_config)
2019-01-22 18:52:32 +01:00
self.server = BlobServer(self.loop, self.server_blob_manager, 'bQEaw42GXsgCAGio1nxFncJSyRmnztSCjP')
self.client_config = Config(
data_dir=self.client_dir,
download_dir=self.client_dir,
wallet=self.client_wallet_dir,
save_files=True,
2022-03-05 08:55:57 +01:00
fixed_peers=[],
tracker_servers=[]
)
2020-12-04 03:04:31 +01:00
self.client_config.transaction_cache_size = 10000
2019-01-30 23:43:02 +01:00
self.client_storage = SQLiteStorage(self.client_config, os.path.join(self.client_dir, "lbrynet.sqlite"))
self.client_blob_manager = BlobManager(self.loop, self.client_dir, self.client_storage, self.client_config)
2019-01-22 18:52:32 +01:00
self.client_peer_manager = PeerManager(self.loop)
2019-11-29 21:28:41 +01:00
self.server_from_client = make_kademlia_peer(b'1' * 48, "127.0.0.1", tcp_port=33333, allow_localhost=True)
2019-01-22 18:52:32 +01:00
await self.client_storage.open()
await self.server_storage.open()
await self.client_blob_manager.setup()
await self.server_blob_manager.setup()
self.server.start_server(33333, '127.0.0.1')
2019-04-05 18:28:45 +02:00
self.addCleanup(self.server.stop_server)
2019-01-22 18:52:32 +01:00
await self.server.started_listening.wait()
class TestBlobExchange(BlobExchangeTestBase):
2019-01-25 19:10:40 +01:00
async def _add_blob_to_server(self, blob_hash: str, blob_bytes: bytes):
2019-01-22 18:52:32 +01:00
# add the blob on the server
server_blob = self.server_blob_manager.get_blob(blob_hash, len(blob_bytes))
2019-03-31 19:42:27 +02:00
writer = server_blob.get_blob_writer()
2019-01-22 18:52:32 +01:00
writer.write(blob_bytes)
2019-03-31 19:42:27 +02:00
await server_blob.verified.wait()
2019-01-22 18:52:32 +01:00
self.assertTrue(os.path.isfile(server_blob.file_path))
2019-10-02 18:58:51 +02:00
self.assertTrue(server_blob.get_is_verified())
self.assertTrue(writer.closed())
2019-01-22 18:52:32 +01:00
2019-01-25 19:10:40 +01:00
async def _test_transfer_blob(self, blob_hash: str):
2019-01-22 18:52:32 +01:00
client_blob = self.client_blob_manager.get_blob(blob_hash)
# download the blob
2019-03-31 19:42:27 +02:00
downloaded, transport = await request_blob(self.loop, client_blob, self.server_from_client.address,
self.server_from_client.tcp_port, 2, 3)
self.assertIsNotNone(transport)
self.addCleanup(transport.close)
await client_blob.verified.wait()
2019-10-02 18:58:51 +02:00
self.assertTrue(client_blob.get_is_verified())
2019-01-22 18:52:32 +01:00
self.assertTrue(downloaded)
client_blob.close()
2019-01-22 18:52:32 +01:00
async def test_transfer_sd_blob(self):
sd_hash = "3e2706157a59aaa47ef52bc264fce488078b4026c0b9bab649a8f2fe1ecc5e5cad7182a2bb7722460f856831a1ac0f02"
mock_sd_blob_bytes = b"""{"blobs": [{"blob_hash": "6f53c72de100f6f007aa1b9720632e2d049cc6049e609ad790b556dba262159f739d5a14648d5701afc84b991254206a", "blob_num": 0, "iv": "3b6110c2d8e742bff66e4314863dee7e", "length": 2097152}, {"blob_hash": "18493bc7c5164b00596153859a0faffa45765e47a6c3f12198a4f7be4658111505b7f8a15ed0162306a0672c4a9b505d", "blob_num": 1, "iv": "df973fa64e73b4ff2677d682cdc32d3e", "length": 2097152}, {"blob_num": 2, "iv": "660d2dc2645da7c7d4540a466fcb0c60", "length": 0}], "key": "6465616462656566646561646265656664656164626565666465616462656566", "stream_hash": "22423c6786584974bd6b462af47ecb03e471da0ef372fe85a4e71a78bef7560c4afb0835c689f03916105404653b7bdf", "stream_name": "746573745f66696c65", "stream_type": "lbryfile", "suggested_file_name": "746573745f66696c65"}"""
2019-01-25 19:10:40 +01:00
await self._add_blob_to_server(sd_hash, mock_sd_blob_bytes)
return await self._test_transfer_blob(sd_hash)
2019-01-22 18:52:32 +01:00
async def test_transfer_blob(self):
blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed"
mock_blob_bytes = b'1' * ((2 * 2 ** 20) - 1)
2019-01-25 19:10:40 +01:00
await self._add_blob_to_server(blob_hash, mock_blob_bytes)
return await self._test_transfer_blob(blob_hash)
async def test_host_same_blob_to_multiple_peers_at_once(self):
blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed"
mock_blob_bytes = b'1' * ((2 * 2 ** 20) - 1)
second_client_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, second_client_dir)
second_client_conf = Config(save_files=True)
second_client_storage = SQLiteStorage(second_client_conf, os.path.join(second_client_dir, "lbrynet.sqlite"))
second_client_blob_manager = BlobManager(
self.loop, second_client_dir, second_client_storage, second_client_conf
)
2019-11-29 21:28:41 +01:00
server_from_second_client = make_kademlia_peer(b'1' * 48, "127.0.0.1", tcp_port=33333, allow_localhost=True)
2019-01-25 19:10:40 +01:00
await second_client_storage.open()
await second_client_blob_manager.setup()
await self._add_blob_to_server(blob_hash, mock_blob_bytes)
2019-04-23 21:36:06 +02:00
second_client_blob = second_client_blob_manager.get_blob(blob_hash)
2019-01-25 19:10:40 +01:00
# download the blob
await asyncio.gather(
request_blob(
2019-01-30 21:23:17 +01:00
self.loop, second_client_blob, server_from_second_client.address,
server_from_second_client.tcp_port, 2, 3
2019-01-25 19:10:40 +01:00
),
self._test_transfer_blob(blob_hash)
)
2019-03-31 19:42:27 +02:00
await second_client_blob.verified.wait()
2019-10-02 18:58:51 +02:00
self.assertTrue(second_client_blob.get_is_verified())
2019-01-25 19:10:40 +01:00
2019-04-23 21:36:06 +02:00
async def test_blob_writers_concurrency(self):
blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed"
mock_blob_bytes = b'1' * ((2 * 2 ** 20) - 1)
blob = self.server_blob_manager.get_blob(blob_hash)
write_blob = blob._write_blob
write_called_count = 0
2020-04-22 22:10:23 +02:00
async def _wrap_write_blob(blob_bytes):
2019-04-23 21:36:06 +02:00
nonlocal write_called_count
write_called_count += 1
2020-04-22 22:10:23 +02:00
await write_blob(blob_bytes)
def wrap_write_blob(blob_bytes):
return asyncio.create_task(_wrap_write_blob(blob_bytes))
2019-04-23 21:36:06 +02:00
blob._write_blob = wrap_write_blob
writer1 = blob.get_blob_writer(peer_port=1)
writer2 = blob.get_blob_writer(peer_port=2)
reader1_ctx_before_write = blob.reader_context()
with self.assertRaises(OSError):
blob.get_blob_writer(peer_port=2)
with self.assertRaises(OSError):
with blob.reader_context():
pass
blob.set_length(len(mock_blob_bytes))
results = {}
def check_finished_callback(writer, num):
def inner(writer_future: asyncio.Future):
results[num] = writer_future.result()
writer.finished.add_done_callback(inner)
check_finished_callback(writer1, 1)
check_finished_callback(writer2, 2)
def write_task(writer):
async def _inner():
writer.write(mock_blob_bytes)
return self.loop.create_task(_inner())
await asyncio.gather(write_task(writer1), write_task(writer2), loop=self.loop)
self.assertDictEqual({1: mock_blob_bytes, 2: mock_blob_bytes}, results)
self.assertEqual(1, write_called_count)
2020-04-22 22:10:23 +02:00
await blob.verified.wait()
2019-04-23 21:36:06 +02:00
self.assertTrue(blob.get_is_verified())
self.assertDictEqual({}, blob.writers)
with reader1_ctx_before_write as f:
self.assertEqual(mock_blob_bytes, f.read())
with blob.reader_context() as f:
self.assertEqual(mock_blob_bytes, f.read())
with blob.reader_context() as f:
blob.close()
with self.assertRaises(ValueError):
f.read()
self.assertListEqual([], blob.readers)
2019-01-25 19:10:40 +01:00
async def test_host_different_blobs_to_multiple_peers_at_once(self):
blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed"
mock_blob_bytes = b'1' * ((2 * 2 ** 20) - 1)
sd_hash = "3e2706157a59aaa47ef52bc264fce488078b4026c0b9bab649a8f2fe1ecc5e5cad7182a2bb7722460f856831a1ac0f02"
mock_sd_blob_bytes = b"""{"blobs": [{"blob_hash": "6f53c72de100f6f007aa1b9720632e2d049cc6049e609ad790b556dba262159f739d5a14648d5701afc84b991254206a", "blob_num": 0, "iv": "3b6110c2d8e742bff66e4314863dee7e", "length": 2097152}, {"blob_hash": "18493bc7c5164b00596153859a0faffa45765e47a6c3f12198a4f7be4658111505b7f8a15ed0162306a0672c4a9b505d", "blob_num": 1, "iv": "df973fa64e73b4ff2677d682cdc32d3e", "length": 2097152}, {"blob_num": 2, "iv": "660d2dc2645da7c7d4540a466fcb0c60", "length": 0}], "key": "6465616462656566646561646265656664656164626565666465616462656566", "stream_hash": "22423c6786584974bd6b462af47ecb03e471da0ef372fe85a4e71a78bef7560c4afb0835c689f03916105404653b7bdf", "stream_name": "746573745f66696c65", "stream_type": "lbryfile", "suggested_file_name": "746573745f66696c65"}"""
second_client_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, second_client_dir)
second_client_conf = Config(save_files=True)
2019-01-25 19:10:40 +01:00
second_client_storage = SQLiteStorage(second_client_conf, os.path.join(second_client_dir, "lbrynet.sqlite"))
second_client_blob_manager = BlobManager(
self.loop, second_client_dir, second_client_storage, second_client_conf
)
2019-11-29 21:28:41 +01:00
server_from_second_client = make_kademlia_peer(b'1' * 48, "127.0.0.1", tcp_port=33333, allow_localhost=True)
2019-01-25 19:10:40 +01:00
await second_client_storage.open()
await second_client_blob_manager.setup()
await self._add_blob_to_server(blob_hash, mock_blob_bytes)
await self._add_blob_to_server(sd_hash, mock_sd_blob_bytes)
second_client_blob = self.client_blob_manager.get_blob(blob_hash)
await asyncio.gather(
request_blob(
2019-01-30 21:23:17 +01:00
self.loop, second_client_blob, server_from_second_client.address,
server_from_second_client.tcp_port, 2, 3
2019-01-25 19:10:40 +01:00
),
self._test_transfer_blob(sd_hash),
2019-03-31 19:42:27 +02:00
second_client_blob.verified.wait()
2019-01-25 19:10:40 +01:00
)
2019-10-02 18:58:51 +02:00
self.assertTrue(second_client_blob.get_is_verified())
2019-01-30 16:11:05 +01:00
async def test_server_chunked_request(self):
blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed"
server_protocol = BlobServerProtocol(self.loop, self.server_blob_manager, self.server.lbrycrd_address)
transport = asyncio.Transport(extra={'peername': ('ip', 90)})
received_data = BytesIO()
2020-02-04 04:24:02 +01:00
transport.is_closing = lambda: received_data.closed
2019-01-30 16:11:05 +01:00
transport.write = received_data.write
server_protocol.connection_made(transport)
blob_request = BlobRequest.make_request_for_blob_hash(blob_hash).serialize()
for byte in blob_request:
server_protocol.data_received(bytes([byte]))
await asyncio.sleep(0.1) # yield execution
2019-10-02 18:58:51 +02:00
self.assertGreater(len(received_data.getvalue()), 0)
2019-08-12 04:29:31 +02:00
async def test_idle_timeout(self):
self.server.idle_timeout = 1
blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed"
mock_blob_bytes = b'1' * ((2 * 2 ** 20) - 1)
await self._add_blob_to_server(blob_hash, mock_blob_bytes)
client_blob = self.client_blob_manager.get_blob(blob_hash)
# download the blob
2019-08-19 18:51:17 +02:00
downloaded, protocol = await request_blob(self.loop, client_blob, self.server_from_client.address,
2019-08-12 04:29:31 +02:00
self.server_from_client.tcp_port, 2, 3)
2019-08-19 18:51:17 +02:00
self.assertIsNotNone(protocol)
self.assertFalse(protocol.transport.is_closing())
2019-08-12 04:29:31 +02:00
await client_blob.verified.wait()
self.assertTrue(client_blob.get_is_verified())
self.assertTrue(downloaded)
client_blob.delete()
# wait for less than the idle timeout
await asyncio.sleep(0.5, loop=self.loop)
# download the blob again
2019-08-19 18:51:17 +02:00
downloaded, protocol2 = await request_blob(self.loop, client_blob, self.server_from_client.address,
2019-08-12 04:29:31 +02:00
self.server_from_client.tcp_port, 2, 3,
2019-08-19 18:51:17 +02:00
connected_protocol=protocol)
2019-10-02 18:58:51 +02:00
self.assertIs(protocol, protocol2)
2019-08-19 18:51:17 +02:00
self.assertFalse(protocol.transport.is_closing())
2019-08-12 04:29:31 +02:00
await client_blob.verified.wait()
self.assertTrue(client_blob.get_is_verified())
self.assertTrue(downloaded)
client_blob.delete()
# check that the connection times out from the server side
await asyncio.sleep(0.9, loop=self.loop)
2019-08-19 18:51:17 +02:00
self.assertFalse(protocol.transport.is_closing())
self.assertIsNotNone(protocol.transport._sock)
2019-08-12 04:29:31 +02:00
await asyncio.sleep(0.1, loop=self.loop)
2019-08-19 18:51:17 +02:00
self.assertIsNone(protocol.transport)
2019-08-12 05:06:30 +02:00
def test_max_request_size(self):
protocol = BlobServerProtocol(self.loop, self.server_blob_manager, 'bQEaw42GXsgCAGio1nxFncJSyRmnztSCjP')
called = asyncio.Event()
protocol.close = called.set
protocol.data_received(b'0' * 1199)
self.assertFalse(called.is_set())
protocol.data_received(b'0')
self.assertTrue(called.is_set())
2019-08-12 06:25:57 +02:00
def test_bad_json(self):
protocol = BlobServerProtocol(self.loop, self.server_blob_manager, 'bQEaw42GXsgCAGio1nxFncJSyRmnztSCjP')
called = asyncio.Event()
protocol.close = called.set
protocol.data_received(b'{{0}')
self.assertTrue(called.is_set())
def test_no_request(self):
protocol = BlobServerProtocol(self.loop, self.server_blob_manager, 'bQEaw42GXsgCAGio1nxFncJSyRmnztSCjP')
called = asyncio.Event()
protocol.close = called.set
protocol.data_received(b'{}')
self.assertTrue(called.is_set())
async def test_transfer_timeout(self):
self.server.transfer_timeout = 1
blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed"
mock_blob_bytes = b'1' * ((2 * 2 ** 20) - 1)
await self._add_blob_to_server(blob_hash, mock_blob_bytes)
client_blob = self.client_blob_manager.get_blob(blob_hash)
server_blob = self.server_blob_manager.get_blob(blob_hash)
async def sendfile(writer):
await asyncio.sleep(2, loop=self.loop)
return 0
server_blob.sendfile = sendfile
with self.assertRaises(asyncio.CancelledError):
await request_blob(self.loop, client_blob, self.server_from_client.address,
self.server_from_client.tcp_port, 2, 3)
async def test_download_blob_using_jsonrpc_blob_get(self):
blob_hash = "7f5ab2def99f0ddd008da71db3a3772135f4002b19b7605840ed1034c8955431bd7079549e65e6b2a3b9c17c773073ed"
mock_blob_bytes = b'1' * ((2 * 2 ** 20) - 1)
await self._add_blob_to_server(blob_hash, mock_blob_bytes)
# setup RPC Daemon
daemon_config = copy.deepcopy(self.client_config)
daemon_config.fixed_peers = [(self.server_from_client.address, self.server_from_client.tcp_port)]
daemon = Daemon(daemon_config)
mock_node = mock.Mock(spec=Node)
def _mock_accumulate_peers(q1, q2=None):
async def _task():
pass
q2 = q2 or asyncio.Queue(loop=self.loop)
return q2, self.loop.create_task(_task())
mock_node.accumulate_peers = _mock_accumulate_peers
with mock.patch('lbry.extras.daemon.componentmanager.ComponentManager.all_components_running',
return_value=True):
with mock.patch('lbry.extras.daemon.daemon.Daemon.dht_node', new_callable=mock.PropertyMock) \
as daemon_mock_dht:
with mock.patch('lbry.extras.daemon.daemon.Daemon.blob_manager', new_callable=mock.PropertyMock) \
as daemon_mock_blob_manager:
daemon_mock_dht.return_value = mock_node
daemon_mock_blob_manager.return_value = self.client_blob_manager
result = await daemon.jsonrpc_blob_get(blob_hash, read=True)
self.assertIsNotNone(result)
self.assertEqual(mock_blob_bytes.decode(), result, "Downloaded blob is different than server blob")