2021-11-08 20:18:22 +01:00
|
|
|
import unittest
|
2021-08-13 16:07:06 +02:00
|
|
|
from unittest import skipIf
|
2019-02-14 00:40:14 +01:00
|
|
|
import asyncio
|
|
|
|
import os
|
2019-06-12 04:46:11 +02:00
|
|
|
from binascii import hexlify
|
2019-02-14 00:40:14 +01:00
|
|
|
|
2020-02-11 01:50:16 +01:00
|
|
|
from lbry.schema import Claim
|
2021-10-24 23:19:06 +02:00
|
|
|
from lbry.stream.background_downloader import BackgroundDownloader
|
2021-10-20 07:18:34 +02:00
|
|
|
from lbry.stream.descriptor import StreamDescriptor
|
2019-06-21 03:02:58 +02:00
|
|
|
from lbry.testcase import CommandTestCase
|
2021-10-15 08:09:38 +02:00
|
|
|
from lbry.extras.daemon.components import TorrentSession, BACKGROUND_DOWNLOADER_COMPONENT
|
2020-02-11 01:50:16 +01:00
|
|
|
from lbry.wallet import Transaction
|
2022-03-12 06:32:53 +01:00
|
|
|
from lbry.torrent.tracker import UDPTrackerServerProtocol
|
2019-02-12 05:54:24 +01:00
|
|
|
|
|
|
|
|
|
|
|
class FileCommands(CommandTestCase):
|
2021-01-17 21:53:13 +01:00
|
|
|
def __init__(self, *a, **kw):
|
|
|
|
super().__init__(*a, **kw)
|
|
|
|
self.skip_libtorrent = False
|
|
|
|
|
2020-02-26 01:18:01 +01:00
|
|
|
async def initialize_torrent(self, tx_to_update=None):
|
|
|
|
if not hasattr(self, 'seeder_session'):
|
|
|
|
self.seeder_session = TorrentSession(self.loop, None)
|
|
|
|
self.addCleanup(self.seeder_session.stop)
|
|
|
|
await self.seeder_session.bind(port=4040)
|
|
|
|
btih = await self.seeder_session.add_fake_torrent()
|
2020-02-11 01:50:16 +01:00
|
|
|
address = await self.account.receiving.get_or_create_usable_address()
|
2020-02-26 01:18:01 +01:00
|
|
|
if not tx_to_update:
|
|
|
|
claim = Claim()
|
|
|
|
claim.stream.update(bt_infohash=btih)
|
|
|
|
tx = await Transaction.claim_create(
|
|
|
|
'torrent', claim, 1, address, [self.account], self.account
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
claim = tx_to_update.outputs[0].claim
|
|
|
|
claim.stream.update(bt_infohash=btih)
|
|
|
|
tx = await Transaction.claim_update(
|
|
|
|
tx_to_update.outputs[0], claim, 1, address, [self.account], self.account
|
|
|
|
)
|
2020-02-11 01:50:16 +01:00
|
|
|
await tx.sign([self.account])
|
2021-11-08 20:18:22 +01:00
|
|
|
await self.broadcast_and_confirm(tx)
|
2020-02-26 01:18:01 +01:00
|
|
|
self.client_session = self.daemon.file_manager.source_managers['torrent'].torrent_session
|
|
|
|
self.client_session._session.add_dht_node(('localhost', 4040))
|
2020-02-28 18:58:59 +01:00
|
|
|
self.client_session.wait_start = False # fixme: this is super slow on tests
|
2020-02-26 01:18:01 +01:00
|
|
|
return tx, btih
|
2020-02-11 01:50:16 +01:00
|
|
|
|
2021-08-13 16:07:06 +02:00
|
|
|
@skipIf(TorrentSession is None, "libtorrent not installed")
|
2020-02-11 01:50:16 +01:00
|
|
|
async def test_download_torrent(self):
|
2020-02-26 01:18:01 +01:00
|
|
|
tx, btih = await self.initialize_torrent()
|
2020-02-25 22:18:48 +01:00
|
|
|
self.assertNotIn('error', await self.out(self.daemon.jsonrpc_get('torrent')))
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
2020-02-26 01:18:01 +01:00
|
|
|
# second call, see its there and move on
|
2020-02-25 22:18:48 +01:00
|
|
|
self.assertNotIn('error', await self.out(self.daemon.jsonrpc_get('torrent')))
|
2020-02-11 03:15:18 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
2020-02-26 01:18:01 +01:00
|
|
|
self.assertEqual((await self.daemon.jsonrpc_file_list())['items'][0].identifier, btih)
|
|
|
|
self.assertIn(btih, self.client_session._handles)
|
|
|
|
tx, new_btih = await self.initialize_torrent(tx)
|
|
|
|
self.assertNotEqual(btih, new_btih)
|
|
|
|
# claim now points to another torrent, update to it
|
|
|
|
self.assertNotIn('error', await self.out(self.daemon.jsonrpc_get('torrent')))
|
|
|
|
self.assertEqual((await self.daemon.jsonrpc_file_list())['items'][0].identifier, new_btih)
|
|
|
|
self.assertIn(new_btih, self.client_session._handles)
|
|
|
|
self.assertNotIn(btih, self.client_session._handles)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
2020-02-26 07:20:26 +01:00
|
|
|
await self.daemon.jsonrpc_file_delete(delete_all=True)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 0)
|
|
|
|
self.assertNotIn(new_btih, self.client_session._handles)
|
2019-02-12 05:54:24 +01:00
|
|
|
|
2019-10-15 02:43:46 +02:00
|
|
|
async def create_streams_in_range(self, *args, **kwargs):
|
|
|
|
self.stream_claim_ids = []
|
|
|
|
for i in range(*args, **kwargs):
|
2019-10-26 06:55:30 +02:00
|
|
|
t = await self.stream_create(f'Stream_{i}', '0.00001')
|
2019-10-15 02:43:46 +02:00
|
|
|
self.stream_claim_ids.append(t['outputs'][0]['claim_id'])
|
|
|
|
|
2021-09-16 23:38:56 +02:00
|
|
|
async def test_file_reflect(self):
|
|
|
|
tx = await self.stream_create('mirror', '0.01')
|
|
|
|
sd_hash = tx['outputs'][0]['value']['source']['sd_hash']
|
|
|
|
self.assertEqual([], await self.daemon.jsonrpc_file_reflect(sd_hash=sd_hash))
|
|
|
|
all_except_sd = [
|
|
|
|
blob_hash for blob_hash in self.server.blob_manager.completed_blob_hashes if blob_hash != sd_hash
|
|
|
|
]
|
|
|
|
await self.reflector.blob_manager.delete_blobs(all_except_sd)
|
|
|
|
self.assertEqual(all_except_sd, await self.daemon.jsonrpc_file_reflect(sd_hash=sd_hash))
|
|
|
|
|
2019-02-12 05:54:24 +01:00
|
|
|
async def test_file_management(self):
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_create('foo', '0.01')
|
|
|
|
await self.stream_create('foo2', '0.01')
|
2019-02-12 05:54:24 +01:00
|
|
|
|
2019-10-30 03:56:28 +01:00
|
|
|
file1, file2 = await self.file_list('claim_name')
|
2019-02-12 05:54:24 +01:00
|
|
|
self.assertEqual(file1['claim_name'], 'foo')
|
|
|
|
self.assertEqual(file2['claim_name'], 'foo2')
|
|
|
|
|
2020-04-13 19:19:16 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(claim_id=[file1['claim_id'], file2['claim_id']]), 2)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(claim_id=file1['claim_id']), 1)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(outpoint=[file1['outpoint'], file2['outpoint']]), 2)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(outpoint=file1['outpoint']), 1)
|
|
|
|
|
2019-02-12 05:54:24 +01:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo')
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
2019-02-12 05:54:24 +01:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo2')
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 0)
|
2019-02-12 05:54:24 +01:00
|
|
|
|
|
|
|
await self.daemon.jsonrpc_get('lbry://foo')
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
2019-02-13 00:42:01 +01:00
|
|
|
|
2022-03-12 06:32:53 +01:00
|
|
|
async def test_tracker_discovery(self):
|
|
|
|
port = 50990
|
|
|
|
server = UDPTrackerServerProtocol()
|
|
|
|
transport, _ = await self.loop.create_datagram_endpoint(lambda: server, local_addr=("127.0.0.1", port))
|
|
|
|
self.addCleanup(transport.close)
|
|
|
|
self.daemon.conf.fixed_peers = []
|
|
|
|
self.daemon.conf.tracker_servers = [("127.0.0.1", port)]
|
|
|
|
tx = await self.stream_create('foo', '0.01')
|
|
|
|
sd_hash = tx['outputs'][0]['value']['source']['sd_hash']
|
|
|
|
self.assertNotIn(bytes.fromhex(sd_hash)[:20], server.peers)
|
|
|
|
server.add_peer(bytes.fromhex(sd_hash)[:20], "127.0.0.1", 5567)
|
|
|
|
self.assertEqual(1, len(server.peers[bytes.fromhex(sd_hash)[:20]]))
|
|
|
|
self.assertTrue(await self.daemon.jsonrpc_file_delete(delete_all=True))
|
|
|
|
stream = await self.daemon.jsonrpc_get('foo', save_file=True)
|
|
|
|
await self.wait_files_to_complete()
|
|
|
|
self.assertEqual(0, stream.blobs_remaining)
|
|
|
|
self.assertEqual(2, len(server.peers[bytes.fromhex(sd_hash)[:20]]))
|
2022-04-04 05:09:20 +02:00
|
|
|
self.assertEqual([{'address': '127.0.0.1',
|
|
|
|
'node_id': None,
|
|
|
|
'tcp_port': 5567,
|
|
|
|
'udp_port': None},
|
|
|
|
{'address': '127.0.0.1',
|
|
|
|
'node_id': None,
|
|
|
|
'tcp_port': 4444,
|
|
|
|
'udp_port': None}], (await self.daemon.jsonrpc_peer_list(sd_hash))['items'])
|
2022-03-12 06:32:53 +01:00
|
|
|
|
2019-05-06 22:27:12 +02:00
|
|
|
async def test_announces(self):
|
|
|
|
# announces on publish
|
|
|
|
self.assertEqual(await self.daemon.storage.get_blobs_to_announce(), [])
|
|
|
|
await self.stream_create('foo', '0.01')
|
2019-10-30 03:56:28 +01:00
|
|
|
stream = (await self.daemon.jsonrpc_file_list())["items"][0]
|
2022-02-09 03:14:18 +01:00
|
|
|
self.assertSetEqual(set(await self.daemon.storage.get_blobs_to_announce()), {stream.sd_hash})
|
2019-05-06 22:27:12 +02:00
|
|
|
self.assertTrue(await self.daemon.jsonrpc_file_delete(delete_all=True))
|
|
|
|
# announces on download
|
|
|
|
self.assertEqual(await self.daemon.storage.get_blobs_to_announce(), [])
|
|
|
|
stream = await self.daemon.jsonrpc_get('foo')
|
2022-02-09 03:14:18 +01:00
|
|
|
self.assertSetEqual(set(await self.daemon.storage.get_blobs_to_announce()), {stream.sd_hash})
|
2019-05-06 22:27:12 +02:00
|
|
|
|
2019-10-11 17:54:12 +02:00
|
|
|
async def _purge_file(self, claim_name, full_path):
|
|
|
|
self.assertTrue(
|
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name=claim_name, delete_from_download_dir=True)
|
|
|
|
)
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 0)
|
2019-10-11 17:54:12 +02:00
|
|
|
self.assertFalse(os.path.isfile(full_path))
|
|
|
|
|
2019-10-08 20:03:27 +02:00
|
|
|
async def test_publish_with_illegal_chars(self):
|
2019-10-11 17:54:12 +02:00
|
|
|
def check_prefix_suffix(name, prefix, suffix):
|
|
|
|
self.assertTrue(name.startswith(prefix))
|
|
|
|
self.assertTrue(name.endswith(suffix))
|
|
|
|
|
2019-10-08 20:03:27 +02:00
|
|
|
# Stream a file with file name containing invalid chars
|
2019-10-11 17:54:12 +02:00
|
|
|
claim_name = 'lolwindows'
|
|
|
|
prefix, suffix = 'derp?', '.ext.'
|
|
|
|
san_prefix, san_suffix = 'derp', '.ext'
|
|
|
|
tx = await self.stream_create(claim_name, '0.01', prefix=prefix, suffix=suffix)
|
2019-10-30 03:56:28 +01:00
|
|
|
stream = (await self.daemon.jsonrpc_file_list())["items"][0]
|
2019-10-11 17:54:12 +02:00
|
|
|
claim_id = self.get_claim_id(tx)
|
2019-10-08 20:03:27 +02:00
|
|
|
|
2019-10-09 19:32:52 +02:00
|
|
|
# Assert that file list and source contains the local unsanitized name, but suggested name is sanitized
|
2019-10-11 17:54:12 +02:00
|
|
|
full_path = (await self.daemon.jsonrpc_get('lbry://' + claim_name)).full_path
|
|
|
|
stream_file_name = os.path.basename(full_path)
|
2019-10-09 19:32:52 +02:00
|
|
|
source_file_name = tx['outputs'][0]['value']['source']['name']
|
2019-10-08 20:03:27 +02:00
|
|
|
file_list_name = stream.file_name
|
|
|
|
suggested_file_name = stream.descriptor.suggested_file_name
|
2019-10-11 17:54:12 +02:00
|
|
|
|
2019-10-08 20:03:27 +02:00
|
|
|
self.assertTrue(os.path.isfile(full_path))
|
2019-10-11 17:54:12 +02:00
|
|
|
check_prefix_suffix(stream_file_name, prefix, suffix)
|
|
|
|
self.assertEqual(stream_file_name, source_file_name)
|
|
|
|
self.assertEqual(stream_file_name, file_list_name)
|
|
|
|
check_prefix_suffix(suggested_file_name, san_prefix, san_suffix)
|
|
|
|
await self._purge_file(claim_name, full_path)
|
|
|
|
|
|
|
|
# Re-download deleted file and assert that the file name is sanitized
|
|
|
|
full_path = (await self.daemon.jsonrpc_get('lbry://' + claim_name, save_file=True)).full_path
|
|
|
|
stream_file_name = os.path.basename(full_path)
|
2019-10-30 03:56:28 +01:00
|
|
|
stream = (await self.daemon.jsonrpc_file_list())["items"][0]
|
2019-10-11 17:54:12 +02:00
|
|
|
file_list_name = stream.file_name
|
|
|
|
suggested_file_name = stream.descriptor.suggested_file_name
|
|
|
|
|
|
|
|
self.assertTrue(os.path.isfile(full_path))
|
|
|
|
check_prefix_suffix(stream_file_name, san_prefix, san_suffix)
|
|
|
|
self.assertEqual(stream_file_name, file_list_name)
|
|
|
|
self.assertEqual(stream_file_name, suggested_file_name)
|
|
|
|
await self._purge_file(claim_name, full_path)
|
2019-10-08 20:03:27 +02:00
|
|
|
|
2019-10-09 19:32:52 +02:00
|
|
|
# Assert that the downloaded file name is not sanitized when user provides custom file name
|
2019-10-11 17:54:12 +02:00
|
|
|
custom_name = 'cust*m_name'
|
|
|
|
full_path = (await self.daemon.jsonrpc_get(
|
|
|
|
'lbry://' + claim_name, file_name=custom_name, save_file=True)).full_path
|
|
|
|
file_name_on_disk = os.path.basename(full_path)
|
|
|
|
self.assertTrue(os.path.isfile(full_path))
|
|
|
|
self.assertEqual(custom_name, file_name_on_disk)
|
|
|
|
|
|
|
|
# Update the stream and assert the file name is not sanitized, but the suggested file name is
|
|
|
|
prefix, suffix = 'derpyderp?', '.ext.'
|
|
|
|
san_prefix, san_suffix = 'derpyderp', '.ext'
|
2019-10-25 05:32:32 +02:00
|
|
|
tx = await self.stream_update(claim_id, data=b'amazing content', prefix=prefix, suffix=suffix)
|
2019-10-11 17:54:12 +02:00
|
|
|
full_path = (await self.daemon.jsonrpc_get('lbry://' + claim_name, save_file=True)).full_path
|
2019-10-30 03:56:28 +01:00
|
|
|
updated_stream = (await self.daemon.jsonrpc_file_list())["items"][0]
|
2019-10-11 17:54:12 +02:00
|
|
|
|
|
|
|
stream_file_name = os.path.basename(full_path)
|
|
|
|
source_file_name = tx['outputs'][0]['value']['source']['name']
|
|
|
|
file_list_name = updated_stream.file_name
|
|
|
|
suggested_file_name = updated_stream.descriptor.suggested_file_name
|
|
|
|
|
2019-10-08 20:03:27 +02:00
|
|
|
self.assertTrue(os.path.isfile(full_path))
|
2019-10-11 17:54:12 +02:00
|
|
|
check_prefix_suffix(stream_file_name, prefix, suffix)
|
|
|
|
self.assertEqual(stream_file_name, source_file_name)
|
|
|
|
self.assertEqual(stream_file_name, file_list_name)
|
|
|
|
check_prefix_suffix(suggested_file_name, san_prefix, san_suffix)
|
2019-10-08 20:03:27 +02:00
|
|
|
|
2019-04-29 05:37:01 +02:00
|
|
|
async def test_file_list_fields(self):
|
|
|
|
await self.stream_create('foo', '0.01')
|
2019-10-30 03:56:28 +01:00
|
|
|
file_list = await self.file_list()
|
2019-04-29 05:37:01 +02:00
|
|
|
self.assertEqual(
|
|
|
|
file_list[0]['timestamp'],
|
2020-03-21 06:01:21 +01:00
|
|
|
self.ledger.headers.estimated_timestamp(file_list[0]['height'])
|
2019-04-29 05:37:01 +02:00
|
|
|
)
|
|
|
|
self.assertEqual(file_list[0]['confirmations'], -1)
|
|
|
|
await self.daemon.jsonrpc_resolve('foo')
|
2019-10-30 03:56:28 +01:00
|
|
|
file_list = await self.file_list()
|
2019-04-29 05:37:01 +02:00
|
|
|
self.assertEqual(
|
|
|
|
file_list[0]['timestamp'],
|
2020-03-21 06:01:21 +01:00
|
|
|
self.ledger.headers.estimated_timestamp(file_list[0]['height'])
|
2019-04-29 05:37:01 +02:00
|
|
|
)
|
|
|
|
self.assertEqual(file_list[0]['confirmations'], 1)
|
|
|
|
|
2019-05-01 18:16:32 +02:00
|
|
|
async def test_get_doesnt_touch_user_written_files_between_calls(self):
|
|
|
|
await self.stream_create('foo', '0.01', data=bytes([0] * (2 << 23)))
|
|
|
|
self.assertTrue(await self.daemon.jsonrpc_file_delete(claim_name='foo'))
|
|
|
|
first_path = (await self.daemon.jsonrpc_get('lbry://foo', save_file=True)).full_path
|
|
|
|
await self.wait_files_to_complete()
|
|
|
|
self.assertTrue(await self.daemon.jsonrpc_file_delete(claim_name='foo'))
|
|
|
|
with open(first_path, 'wb') as f:
|
|
|
|
f.write(b' ')
|
|
|
|
f.flush()
|
|
|
|
second_path = await self.daemon.jsonrpc_get('lbry://foo', save_file=True)
|
|
|
|
await self.wait_files_to_complete()
|
2019-10-29 06:26:25 +01:00
|
|
|
self.assertNotEqual(first_path, second_path)
|
2019-04-29 05:37:01 +02:00
|
|
|
|
2021-11-08 20:18:22 +01:00
|
|
|
@unittest.SkipTest # FIXME: claimname/updateclaim is gone. #3480 wip, unblock #3479"
|
2019-04-29 00:19:58 +02:00
|
|
|
async def test_file_list_updated_metadata_on_resolve(self):
|
|
|
|
await self.stream_create('foo', '0.01')
|
2019-10-29 06:26:25 +01:00
|
|
|
txo = (await self.daemon.resolve(self.wallet.accounts, ['lbry://foo']))['lbry://foo']
|
2019-05-05 21:35:41 +02:00
|
|
|
claim = txo.claim
|
2019-04-29 00:19:58 +02:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo')
|
2019-05-05 21:35:41 +02:00
|
|
|
txid = await self.blockchain_claim_name('bar', hexlify(claim.to_bytes()).decode(), '0.01')
|
2019-04-29 00:19:58 +02:00
|
|
|
await self.daemon.jsonrpc_get('lbry://bar')
|
|
|
|
claim.stream.description = "fix typos, fix the world"
|
|
|
|
await self.blockchain_update_name(txid, hexlify(claim.to_bytes()).decode(), '0.01')
|
|
|
|
await self.daemon.jsonrpc_resolve('lbry://bar')
|
2019-10-30 03:56:28 +01:00
|
|
|
file_list = (await self.daemon.jsonrpc_file_list())['items']
|
2019-04-29 00:19:58 +02:00
|
|
|
self.assertEqual(file_list[0].stream_claim_info.claim.stream.description, claim.stream.description)
|
|
|
|
|
2021-03-11 02:55:48 +01:00
|
|
|
async def test_sourceless_content(self):
|
|
|
|
# claim has no source, then it has one
|
|
|
|
tx = await self.stream_create('foo', '0.01', data=None)
|
|
|
|
claim_id = self.get_claim_id(tx)
|
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo')
|
|
|
|
response = await self.out(self.daemon.jsonrpc_get('lbry://foo'))
|
|
|
|
self.assertIn('error', response)
|
|
|
|
self.assertIn('nothing to download', response['error'])
|
|
|
|
# source is set (there isn't a way to clear the source field, so we stop here for now)
|
|
|
|
await self.stream_update(claim_id, data=b'surpriiiiiiiise')
|
|
|
|
response = await self.out(self.daemon.jsonrpc_get('lbry://foo'))
|
|
|
|
self.assertNotIn('error', response)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
|
|
|
|
2019-08-28 20:00:06 +02:00
|
|
|
async def test_file_list_paginated_output(self):
|
2019-10-15 02:43:46 +02:00
|
|
|
await self.create_streams_in_range(0, 20)
|
|
|
|
|
2019-10-30 03:56:28 +01:00
|
|
|
page = await self.file_list(page_size=20)
|
2019-10-15 02:43:46 +02:00
|
|
|
page_claim_ids = [item['claim_id'] for item in page]
|
|
|
|
self.assertListEqual(page_claim_ids, self.stream_claim_ids)
|
|
|
|
|
2019-10-30 03:56:28 +01:00
|
|
|
page = await self.file_list(page_size=6)
|
2019-10-15 02:43:46 +02:00
|
|
|
page_claim_ids = [item['claim_id'] for item in page]
|
|
|
|
self.assertListEqual(page_claim_ids, self.stream_claim_ids[:6])
|
|
|
|
|
2019-10-30 03:56:28 +01:00
|
|
|
page = await self.file_list(page_size=6, page=2)
|
2019-10-15 02:43:46 +02:00
|
|
|
page_claim_ids = [item['claim_id'] for item in page]
|
|
|
|
self.assertListEqual(page_claim_ids, self.stream_claim_ids[6:12])
|
|
|
|
|
2019-10-30 03:56:28 +01:00
|
|
|
out_of_bounds = await self.file_list(page=5, page_size=6)
|
2019-10-15 02:43:46 +02:00
|
|
|
self.assertEqual(out_of_bounds, [])
|
|
|
|
|
2019-10-30 03:56:28 +01:00
|
|
|
complete = await self.daemon.jsonrpc_file_list()
|
2019-10-15 02:43:46 +02:00
|
|
|
self.assertEqual(complete['total_pages'], 1)
|
|
|
|
self.assertEqual(complete['total_items'], 20)
|
|
|
|
|
2019-10-30 03:56:28 +01:00
|
|
|
page = await self.daemon.jsonrpc_file_list(page_size=10, page=1)
|
2019-10-15 02:43:46 +02:00
|
|
|
self.assertEqual(page['total_pages'], 2)
|
|
|
|
self.assertEqual(page['total_items'], 20)
|
|
|
|
self.assertEqual(page['page'], 1)
|
|
|
|
|
2019-10-30 03:56:28 +01:00
|
|
|
full = await self.out(self.daemon.jsonrpc_file_list(page_size=20, page=1))
|
|
|
|
page1 = await self.file_list(page=1, page_size=10)
|
|
|
|
page2 = await self.file_list(page=2, page_size=10)
|
2019-10-15 02:43:46 +02:00
|
|
|
self.assertEqual(page1 + page2, full['items'])
|
2019-08-28 20:00:06 +02:00
|
|
|
|
2019-02-13 00:42:01 +01:00
|
|
|
async def test_download_different_timeouts(self):
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.stream_create('foo', '0.01')
|
2019-04-20 08:11:19 +02:00
|
|
|
sd_hash = tx['outputs'][0]['value']['source']['sd_hash']
|
2019-02-13 00:42:01 +01:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo')
|
|
|
|
all_except_sd = [
|
|
|
|
blob_hash for blob_hash in self.server.blob_manager.completed_blob_hashes if blob_hash != sd_hash
|
|
|
|
]
|
|
|
|
await self.server.blob_manager.delete_blobs(all_except_sd)
|
2019-05-02 21:41:48 +02:00
|
|
|
resp = await self.daemon.jsonrpc_get('lbry://foo', timeout=2, save_file=True)
|
2019-02-13 00:42:01 +01:00
|
|
|
self.assertIn('error', resp)
|
2019-11-19 19:57:14 +01:00
|
|
|
self.assertEqual('Failed to download data blobs for sd hash %s within timeout.' % sd_hash, resp['error'])
|
2019-10-02 20:04:30 +02:00
|
|
|
self.assertTrue(await self.daemon.jsonrpc_file_delete(claim_name='foo'), "data timeout didn't create a file")
|
2019-02-14 22:34:37 +01:00
|
|
|
await self.server.blob_manager.delete_blobs([sd_hash])
|
2019-05-02 21:41:48 +02:00
|
|
|
resp = await self.daemon.jsonrpc_get('lbry://foo', timeout=2, save_file=True)
|
2019-02-13 00:42:01 +01:00
|
|
|
self.assertIn('error', resp)
|
2019-11-19 19:57:14 +01:00
|
|
|
self.assertEqual('Failed to download sd blob %s within timeout.' % sd_hash, resp['error'])
|
2019-02-14 00:40:14 +01:00
|
|
|
|
|
|
|
async def wait_files_to_complete(self):
|
2019-10-30 03:56:28 +01:00
|
|
|
while await self.file_list(status='running'):
|
2019-02-14 00:40:14 +01:00
|
|
|
await asyncio.sleep(0.01)
|
|
|
|
|
|
|
|
async def test_filename_conflicts_management_on_resume_download(self):
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_create('foo', '0.01', data=bytes([0] * (1 << 23)))
|
2019-10-30 03:56:28 +01:00
|
|
|
file_info = (await self.file_list())[0]
|
2019-02-14 00:40:14 +01:00
|
|
|
original_path = os.path.join(self.daemon.conf.download_dir, file_info['file_name'])
|
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo')
|
|
|
|
await self.daemon.jsonrpc_get('lbry://foo')
|
|
|
|
with open(original_path, 'wb') as handle:
|
|
|
|
handle.write(b'some other stuff was there instead')
|
2020-01-27 23:02:31 +01:00
|
|
|
self.daemon.file_manager.stop()
|
|
|
|
await self.daemon.file_manager.start()
|
2019-10-02 20:04:30 +02:00
|
|
|
await asyncio.wait_for(self.wait_files_to_complete(), timeout=5) # if this hangs, file didn't get set completed
|
2019-02-14 00:40:14 +01:00
|
|
|
# check that internal state got through up to the file list API
|
2020-01-29 01:24:05 +01:00
|
|
|
stream = self.daemon.file_manager.get_filtered(stream_hash=file_info['stream_hash'])[0]
|
2019-10-30 03:56:28 +01:00
|
|
|
file_info = (await self.file_list())[0]
|
2019-04-05 04:02:33 +02:00
|
|
|
self.assertEqual(stream.file_name, file_info['file_name'])
|
2019-02-14 00:40:14 +01:00
|
|
|
# checks if what the API shows is what he have at the very internal level.
|
2019-04-05 04:02:33 +02:00
|
|
|
self.assertEqual(stream.full_path, file_info['download_path'])
|
2019-02-14 01:30:59 +01:00
|
|
|
|
|
|
|
async def test_incomplete_downloads_erases_output_file_on_stop(self):
|
2019-04-05 05:10:18 +02:00
|
|
|
tx = await self.stream_create('foo', '0.01', data=b'deadbeef' * 1000000)
|
2019-04-20 08:11:19 +02:00
|
|
|
sd_hash = tx['outputs'][0]['value']['source']['sd_hash']
|
2019-10-30 03:56:28 +01:00
|
|
|
file_info = (await self.file_list())[0]
|
2021-11-07 08:21:58 +01:00
|
|
|
blobs = await self.daemon.storage.get_blobs_for_stream(
|
|
|
|
await self.daemon.storage.get_stream_hash_for_sd_hash(sd_hash)
|
2019-03-11 23:43:00 +01:00
|
|
|
)
|
2021-11-07 08:21:58 +01:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo')
|
|
|
|
self.assertEqual(5, len(blobs))
|
2019-03-11 23:43:00 +01:00
|
|
|
all_except_sd_and_head = [
|
2019-04-05 05:10:18 +02:00
|
|
|
blob.blob_hash for blob in blobs[1:-1]
|
2019-02-14 01:30:59 +01:00
|
|
|
]
|
2019-03-11 23:43:00 +01:00
|
|
|
await self.server.blob_manager.delete_blobs(all_except_sd_and_head)
|
2019-04-10 15:36:02 +02:00
|
|
|
path = os.path.join(self.daemon.conf.download_dir, file_info['file_name'])
|
|
|
|
self.assertFalse(os.path.isfile(path))
|
2019-04-06 22:11:56 +02:00
|
|
|
resp = await self.out(self.daemon.jsonrpc_get('lbry://foo', timeout=2))
|
2019-03-11 23:43:00 +01:00
|
|
|
self.assertNotIn('error', resp)
|
2019-04-10 15:36:02 +02:00
|
|
|
self.assertTrue(os.path.isfile(path))
|
2020-01-27 23:02:31 +01:00
|
|
|
self.daemon.file_manager.stop()
|
2019-04-24 15:46:46 +02:00
|
|
|
await asyncio.sleep(0.01, loop=self.loop) # FIXME: this sleep should not be needed
|
2019-04-10 15:36:02 +02:00
|
|
|
self.assertFalse(os.path.isfile(path))
|
2019-02-14 02:45:05 +01:00
|
|
|
|
|
|
|
async def test_incomplete_downloads_retry(self):
|
2019-04-05 05:10:18 +02:00
|
|
|
tx = await self.stream_create('foo', '0.01', data=b'deadbeef' * 1000000)
|
2019-04-20 08:11:19 +02:00
|
|
|
sd_hash = tx['outputs'][0]['value']['source']['sd_hash']
|
2021-11-07 08:21:58 +01:00
|
|
|
blobs = await self.daemon.storage.get_blobs_for_stream(
|
|
|
|
await self.daemon.storage.get_stream_hash_for_sd_hash(sd_hash)
|
2019-03-11 23:43:00 +01:00
|
|
|
)
|
2021-11-07 08:21:58 +01:00
|
|
|
self.assertEqual(5, len(blobs))
|
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo')
|
2019-03-11 23:43:00 +01:00
|
|
|
all_except_sd_and_head = [
|
2019-04-05 05:10:18 +02:00
|
|
|
blob.blob_hash for blob in blobs[1:-1]
|
2019-02-14 02:45:05 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
# backup server blobs
|
2019-03-11 23:43:00 +01:00
|
|
|
for blob_hash in all_except_sd_and_head:
|
2019-02-14 02:45:05 +01:00
|
|
|
blob = self.server_blob_manager.get_blob(blob_hash)
|
|
|
|
os.rename(blob.file_path, blob.file_path + '__')
|
|
|
|
|
|
|
|
# erase all except sd blob
|
2019-03-11 23:43:00 +01:00
|
|
|
await self.server.blob_manager.delete_blobs(all_except_sd_and_head)
|
2019-02-14 02:45:05 +01:00
|
|
|
|
2019-03-11 23:43:00 +01:00
|
|
|
# start the download
|
2019-04-06 22:11:56 +02:00
|
|
|
resp = await self.out(self.daemon.jsonrpc_get('lbry://foo', timeout=2))
|
2019-03-11 23:43:00 +01:00
|
|
|
self.assertNotIn('error', resp)
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
|
|
|
self.assertEqual('running', (await self.file_list())[0]['status'])
|
2019-02-14 02:45:05 +01:00
|
|
|
|
|
|
|
# recover blobs
|
2019-03-11 23:43:00 +01:00
|
|
|
for blob_hash in all_except_sd_and_head:
|
2019-02-14 02:45:05 +01:00
|
|
|
blob = self.server_blob_manager.get_blob(blob_hash)
|
|
|
|
os.rename(blob.file_path + '__', blob.file_path)
|
|
|
|
self.server_blob_manager.blobs.clear()
|
|
|
|
await self.server_blob_manager.blob_completed(self.server_blob_manager.get_blob(blob_hash))
|
2019-03-11 23:43:00 +01:00
|
|
|
|
2019-02-14 02:45:05 +01:00
|
|
|
await asyncio.wait_for(self.wait_files_to_complete(), timeout=5)
|
2019-10-30 03:56:28 +01:00
|
|
|
file_info = (await self.file_list())[0]
|
2019-02-14 02:45:05 +01:00
|
|
|
self.assertEqual(file_info['blobs_completed'], file_info['blobs_in_stream'])
|
2019-03-11 23:43:00 +01:00
|
|
|
self.assertEqual('finished', file_info['status'])
|
2019-02-18 21:08:37 +01:00
|
|
|
|
2019-02-19 00:23:11 +01:00
|
|
|
async def test_paid_download(self):
|
2019-02-19 01:37:28 +01:00
|
|
|
target_address = await self.blockchain.get_raw_change_address()
|
2019-02-19 04:16:59 +01:00
|
|
|
|
|
|
|
# FAIL: beyond available balance
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_create(
|
2019-02-19 04:16:59 +01:00
|
|
|
'expensive', '0.01', data=b'pay me if you can',
|
2021-01-22 05:07:33 +01:00
|
|
|
fee_currency='LBC', fee_amount='11.0',
|
|
|
|
fee_address=target_address, claim_address=target_address
|
2019-03-24 21:55:04 +01:00
|
|
|
)
|
2019-02-19 00:23:11 +01:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='expensive')
|
2019-04-06 22:11:56 +02:00
|
|
|
response = await self.out(self.daemon.jsonrpc_get('lbry://expensive'))
|
2019-10-29 06:26:25 +01:00
|
|
|
self.assertEqual(response['error'], 'Not enough funds to cover this transaction.')
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 0)
|
2019-02-19 04:16:59 +01:00
|
|
|
|
|
|
|
# FAIL: beyond maximum key fee
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_create(
|
2019-02-19 04:16:59 +01:00
|
|
|
'maxkey', '0.01', data=b'no pay me, no',
|
2021-01-22 05:07:33 +01:00
|
|
|
fee_currency='LBC', fee_amount='111.0',
|
|
|
|
fee_address=target_address, claim_address=target_address
|
2019-03-24 21:55:04 +01:00
|
|
|
)
|
2019-02-19 04:16:59 +01:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='maxkey')
|
2019-04-06 22:11:56 +02:00
|
|
|
response = await self.out(self.daemon.jsonrpc_get('lbry://maxkey'))
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 0)
|
2019-10-29 06:26:25 +01:00
|
|
|
self.assertEqual(
|
|
|
|
response['error'], 'Purchase price of 111.0 LBC exceeds maximum configured price of 100.0 LBC (50.0 USD).'
|
|
|
|
)
|
2019-02-19 04:16:59 +01:00
|
|
|
|
|
|
|
# PASS: purchase is successful
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_create(
|
2019-02-19 04:16:59 +01:00
|
|
|
'icanpay', '0.01', data=b'I got the power!',
|
2021-01-22 05:07:33 +01:00
|
|
|
fee_currency='LBC', fee_amount='1.0',
|
|
|
|
fee_address=target_address, claim_address=target_address
|
2019-03-24 21:55:04 +01:00
|
|
|
)
|
2019-02-19 04:16:59 +01:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='icanpay')
|
2019-02-19 01:37:28 +01:00
|
|
|
await self.assertBalance(self.account, '9.925679')
|
2019-04-10 15:36:02 +02:00
|
|
|
response = await self.daemon.jsonrpc_get('lbry://icanpay')
|
2019-05-07 20:30:35 +02:00
|
|
|
raw_content_fee = response.content_fee.raw
|
2019-04-10 15:36:02 +02:00
|
|
|
await self.ledger.wait(response.content_fee)
|
2019-10-27 18:54:48 +01:00
|
|
|
await self.assertBalance(self.account, '8.925538')
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
2019-02-19 04:16:59 +01:00
|
|
|
|
2019-02-19 00:23:11 +01:00
|
|
|
await asyncio.wait_for(self.wait_files_to_complete(), timeout=1)
|
2019-02-19 01:37:28 +01:00
|
|
|
|
2019-02-19 04:16:59 +01:00
|
|
|
# check that the fee was received
|
2021-06-02 15:51:08 +02:00
|
|
|
starting_balance = float(await self.blockchain.get_balance())
|
2019-02-19 01:37:28 +01:00
|
|
|
await self.generate(1)
|
2019-02-19 04:16:59 +01:00
|
|
|
block_reward_and_claim_fee = 2.0
|
|
|
|
self.assertEqual(
|
2021-06-02 15:51:08 +02:00
|
|
|
float(await self.blockchain.get_balance()),
|
|
|
|
starting_balance + block_reward_and_claim_fee
|
2019-02-19 04:16:59 +01:00
|
|
|
)
|
2019-05-07 20:30:35 +02:00
|
|
|
|
|
|
|
# restart the daemon and make sure the fee is still there
|
|
|
|
|
2020-01-27 23:02:31 +01:00
|
|
|
self.daemon.file_manager.stop()
|
|
|
|
await self.daemon.file_manager.start()
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
|
|
|
self.assertEqual((await self.daemon.jsonrpc_file_list())['items'][0].content_fee.raw, raw_content_fee)
|
2019-06-07 05:51:10 +02:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='icanpay')
|
|
|
|
|
|
|
|
# PASS: no fee address --> use the claim address to pay
|
2019-06-10 23:43:08 +02:00
|
|
|
tx = await self.stream_create(
|
2019-06-07 05:51:10 +02:00
|
|
|
'nofeeaddress', '0.01', data=b'free stuff?',
|
|
|
|
)
|
2019-06-12 04:46:11 +02:00
|
|
|
await self.__raw_value_update_no_fee_address(
|
|
|
|
tx, fee_amount='2.0', fee_currency='LBC', claim_address=target_address
|
2019-06-07 05:51:10 +02:00
|
|
|
)
|
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='nofeeaddress')
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 0)
|
2019-06-07 05:51:10 +02:00
|
|
|
|
|
|
|
response = await self.out(self.daemon.jsonrpc_get('lbry://nofeeaddress'))
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertIsNone((await self.daemon.jsonrpc_file_list())['items'][0].stream_claim_info.claim.stream.fee.address)
|
2019-06-07 05:51:10 +02:00
|
|
|
self.assertIsNotNone(response['content_fee'])
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
2019-06-07 05:51:10 +02:00
|
|
|
self.assertEqual(response['content_fee']['outputs'][0]['amount'], '2.0')
|
|
|
|
self.assertEqual(response['content_fee']['outputs'][0]['address'], target_address)
|
2019-06-10 23:43:08 +02:00
|
|
|
|
2019-09-19 19:39:44 +02:00
|
|
|
async def test_null_max_key_fee(self):
|
|
|
|
target_address = await self.blockchain.get_raw_change_address()
|
|
|
|
self.daemon.conf.max_key_fee = None
|
|
|
|
|
|
|
|
await self.stream_create(
|
|
|
|
'somename', '0.5', data=b'Yes, please',
|
2021-01-22 05:07:33 +01:00
|
|
|
fee_currency='LBC', fee_amount='1.0',
|
|
|
|
fee_address=target_address, claim_address=target_address
|
2019-09-19 19:39:44 +02:00
|
|
|
)
|
|
|
|
self.assertTrue(await self.daemon.jsonrpc_file_delete(claim_name='somename'))
|
|
|
|
# Assert the fee and bid are subtracted
|
|
|
|
await self.assertBalance(self.account, '9.483893')
|
|
|
|
response = await self.daemon.jsonrpc_get('lbry://somename')
|
|
|
|
await self.ledger.wait(response.content_fee)
|
2019-10-27 18:54:48 +01:00
|
|
|
await self.assertBalance(self.account, '8.483752')
|
2019-09-19 19:39:44 +02:00
|
|
|
|
|
|
|
# Assert the file downloads
|
|
|
|
await asyncio.wait_for(self.wait_files_to_complete(), timeout=1)
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
2019-09-19 19:39:44 +02:00
|
|
|
|
|
|
|
# Assert the transaction is recorded to the blockchain
|
2021-06-02 15:51:08 +02:00
|
|
|
starting_balance = float(await self.blockchain.get_balance())
|
2019-09-19 19:39:44 +02:00
|
|
|
await self.generate(1)
|
|
|
|
block_reward_and_claim_fee = 2.0
|
|
|
|
self.assertEqual(
|
2021-06-02 15:51:08 +02:00
|
|
|
float(await self.blockchain.get_balance()), starting_balance + block_reward_and_claim_fee
|
2019-09-19 19:39:44 +02:00
|
|
|
)
|
|
|
|
|
2019-07-29 23:47:03 +02:00
|
|
|
async def test_null_fee(self):
|
|
|
|
target_address = await self.blockchain.get_raw_change_address()
|
|
|
|
tx = await self.stream_create(
|
|
|
|
'nullfee', '0.01', data=b'no pay me, no',
|
|
|
|
fee_currency='LBC', fee_address=target_address, fee_amount='1.0'
|
|
|
|
)
|
|
|
|
await self.__raw_value_update_no_fee_amount(tx, target_address)
|
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='nullfee')
|
|
|
|
response = await self.daemon.jsonrpc_get('lbry://nullfee')
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
2019-07-29 23:47:03 +02:00
|
|
|
self.assertIsNone(response.content_fee)
|
|
|
|
self.assertTrue(response.stream_claim_info.claim.stream.has_fee)
|
|
|
|
self.assertDictEqual(
|
|
|
|
response.stream_claim_info.claim.stream.to_dict()['fee'],
|
|
|
|
{'currency': 'LBC', 'address': target_address}
|
|
|
|
)
|
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='nullfee')
|
|
|
|
|
2019-06-12 04:46:11 +02:00
|
|
|
async def __raw_value_update_no_fee_address(self, tx, claim_address, **kwargs):
|
2019-06-15 04:18:57 +02:00
|
|
|
tx = await self.daemon.jsonrpc_stream_update(
|
2019-06-24 01:58:41 +02:00
|
|
|
self.get_claim_id(tx), preview=True, claim_address=claim_address, **kwargs
|
2019-06-15 04:18:57 +02:00
|
|
|
)
|
2019-06-12 04:46:11 +02:00
|
|
|
tx.outputs[0].claim.stream.fee.address_bytes = b''
|
|
|
|
tx.outputs[0].script.generate()
|
2019-06-10 23:43:08 +02:00
|
|
|
await tx.sign([self.account])
|
2021-11-08 20:18:22 +01:00
|
|
|
await self.broadcast_and_confirm(tx)
|
2019-07-29 23:47:03 +02:00
|
|
|
|
|
|
|
async def __raw_value_update_no_fee_amount(self, tx, claim_address):
|
|
|
|
tx = await self.daemon.jsonrpc_stream_update(
|
|
|
|
self.get_claim_id(tx), preview=True, fee_currency='LBC', fee_amount='1.0', fee_address=claim_address,
|
|
|
|
claim_address=claim_address
|
|
|
|
)
|
|
|
|
tx.outputs[0].claim.stream.fee.message.ClearField('amount')
|
|
|
|
tx.outputs[0].script.generate()
|
|
|
|
await tx.sign([self.account])
|
2021-11-08 20:18:22 +01:00
|
|
|
await self.broadcast_and_confirm(tx)
|
2021-08-06 16:44:57 +02:00
|
|
|
|
|
|
|
|
|
|
|
class DiskSpaceManagement(CommandTestCase):
|
|
|
|
|
2021-09-10 16:53:52 +02:00
|
|
|
async def get_referenced_blobs(self, tx):
|
|
|
|
sd_hash = tx['outputs'][0]['value']['source']['sd_hash']
|
|
|
|
stream_hash = await self.daemon.storage.get_stream_hash_for_sd_hash(sd_hash)
|
|
|
|
return tx['outputs'][0]['value']['source']['sd_hash'], set(await self.blob_list(
|
|
|
|
stream_hash=stream_hash
|
|
|
|
))
|
|
|
|
|
2021-08-06 16:44:57 +02:00
|
|
|
async def test_file_management(self):
|
2021-08-16 20:15:12 +02:00
|
|
|
status = await self.status()
|
2021-08-06 16:44:57 +02:00
|
|
|
self.assertIn('disk_space', status)
|
2021-11-03 02:34:15 +01:00
|
|
|
self.assertEqual(0, status['disk_space']['total_used_mb'])
|
2021-08-16 20:15:12 +02:00
|
|
|
self.assertEqual(True, status['disk_space']['running'])
|
2021-09-10 16:53:52 +02:00
|
|
|
sd_hash1, blobs1 = await self.get_referenced_blobs(
|
2021-09-15 16:37:08 +02:00
|
|
|
await self.stream_create('foo1', '0.01', data=('0' * 2 * 1024 * 1024).encode())
|
2021-09-10 16:53:52 +02:00
|
|
|
)
|
|
|
|
sd_hash2, blobs2 = await self.get_referenced_blobs(
|
2021-09-15 16:37:08 +02:00
|
|
|
await self.stream_create('foo2', '0.01', data=('0' * 3 * 1024 * 1024).encode())
|
2021-09-10 16:53:52 +02:00
|
|
|
)
|
|
|
|
sd_hash3, blobs3 = await self.get_referenced_blobs(
|
2021-09-15 16:37:08 +02:00
|
|
|
await self.stream_create('foo3', '0.01', data=('0' * 3 * 1024 * 1024).encode())
|
2021-09-10 16:53:52 +02:00
|
|
|
)
|
|
|
|
sd_hash4, blobs4 = await self.get_referenced_blobs(
|
|
|
|
await self.stream_create('foo4', '0.01', data=('0' * 2 * 1024 * 1024).encode())
|
|
|
|
)
|
|
|
|
|
|
|
|
await self.daemon.storage.update_blob_ownership(sd_hash1, False)
|
|
|
|
await self.daemon.storage.update_blob_ownership(sd_hash3, False)
|
|
|
|
await self.daemon.storage.update_blob_ownership(sd_hash4, False)
|
2021-11-03 19:38:32 +01:00
|
|
|
await self.blob_clean() # just to refresh caches, has no effect
|
2021-09-10 16:53:52 +02:00
|
|
|
|
2021-11-03 02:34:15 +01:00
|
|
|
self.assertEqual(7, (await self.status())['disk_space']['content_blobs_storage_used_mb'])
|
|
|
|
self.assertEqual(10, (await self.status())['disk_space']['total_used_mb'])
|
2021-09-10 16:53:52 +02:00
|
|
|
self.assertEqual(blobs1 | blobs2 | blobs3 | blobs4, set(await self.blob_list()))
|
|
|
|
|
2021-08-16 20:15:12 +02:00
|
|
|
await self.blob_clean()
|
2021-09-10 16:53:52 +02:00
|
|
|
|
2021-11-03 02:34:15 +01:00
|
|
|
self.assertEqual(10, (await self.status())['disk_space']['total_used_mb'])
|
2021-11-03 19:38:32 +01:00
|
|
|
self.assertEqual(7, (await self.status())['disk_space']['content_blobs_storage_used_mb'])
|
2021-11-03 02:34:15 +01:00
|
|
|
self.assertEqual(3, (await self.status())['disk_space']['published_blobs_storage_used_mb'])
|
2021-09-10 16:53:52 +02:00
|
|
|
self.assertEqual(blobs1 | blobs2 | blobs3 | blobs4, set(await self.blob_list()))
|
|
|
|
|
2021-09-15 16:37:08 +02:00
|
|
|
self.daemon.conf.blob_storage_limit = 6
|
2021-08-16 20:15:12 +02:00
|
|
|
await self.blob_clean()
|
2021-09-10 16:53:52 +02:00
|
|
|
|
2021-11-03 02:34:15 +01:00
|
|
|
self.assertEqual(5, (await self.status())['disk_space']['total_used_mb'])
|
|
|
|
self.assertEqual(2, (await self.status())['disk_space']['content_blobs_storage_used_mb'])
|
|
|
|
self.assertEqual(3, (await self.status())['disk_space']['published_blobs_storage_used_mb'])
|
2021-09-10 16:53:52 +02:00
|
|
|
blobs = set(await self.blob_list())
|
2021-09-15 16:37:08 +02:00
|
|
|
self.assertFalse(blobs1.issubset(blobs))
|
|
|
|
self.assertTrue(blobs2.issubset(blobs))
|
|
|
|
self.assertFalse(blobs3.issubset(blobs))
|
|
|
|
self.assertTrue(blobs4.issubset(blobs))
|
2022-03-21 08:38:51 +01:00
|
|
|
# check that added_on gets set on downloads (was a bug)
|
|
|
|
self.assertLess(0, await self.daemon.storage.run_and_return_one_or_none("select min(added_on) from blob"))
|
|
|
|
await self.daemon.jsonrpc_file_delete(delete_all=True)
|
|
|
|
await self.daemon.jsonrpc_get("foo4", save_file=False)
|
|
|
|
self.assertLess(0, await self.daemon.storage.run_and_return_one_or_none("select min(added_on) from blob"))
|
|
|
|
|
2021-10-15 08:09:38 +02:00
|
|
|
|
2021-10-27 19:17:06 +02:00
|
|
|
class TestBackgroundDownloaderComponent(CommandTestCase):
|
2021-10-20 07:18:34 +02:00
|
|
|
async def get_blobs_from_sd_blob(self, sd_blob):
|
|
|
|
descriptor = await StreamDescriptor.from_stream_descriptor_blob(
|
|
|
|
asyncio.get_running_loop(), self.daemon.blob_manager.blob_dir, sd_blob
|
|
|
|
)
|
|
|
|
return descriptor.blobs
|
|
|
|
|
2021-10-18 09:33:37 +02:00
|
|
|
async def assertBlobs(self, *sd_hashes, no_files=True):
|
2021-10-18 08:10:39 +02:00
|
|
|
# checks that we have ony the finished blobs needed for the the referenced streams
|
|
|
|
seen = set(sd_hashes)
|
|
|
|
for sd_hash in sd_hashes:
|
2021-10-20 07:18:34 +02:00
|
|
|
sd_blob = self.daemon.blob_manager.get_blob(sd_hash)
|
|
|
|
self.assertTrue(sd_blob.get_is_verified())
|
|
|
|
blobs = await self.get_blobs_from_sd_blob(sd_blob)
|
2021-10-18 08:10:39 +02:00
|
|
|
for blob in blobs[:-1]:
|
|
|
|
self.assertTrue(self.daemon.blob_manager.get_blob(blob.blob_hash).get_is_verified())
|
|
|
|
seen.update(blob.blob_hash for blob in blobs if blob.blob_hash)
|
2021-10-18 09:33:37 +02:00
|
|
|
if no_files:
|
2021-10-20 07:18:34 +02:00
|
|
|
self.assertEqual(seen, self.daemon.blob_manager.completed_blob_hashes)
|
2021-10-18 09:33:37 +02:00
|
|
|
self.assertEqual(0, len(await self.file_list()))
|
2021-10-15 08:09:38 +02:00
|
|
|
|
2021-10-18 09:33:37 +02:00
|
|
|
async def clear(self):
|
2021-10-15 08:09:38 +02:00
|
|
|
await self.daemon.jsonrpc_file_delete(delete_all=True)
|
|
|
|
self.assertEqual(0, len(await self.file_list()))
|
2021-10-18 09:33:37 +02:00
|
|
|
await self.daemon.blob_manager.delete_blobs(list(self.daemon.blob_manager.completed_blob_hashes), True)
|
2021-10-18 08:10:39 +02:00
|
|
|
self.assertEqual(0, len((await self.daemon.jsonrpc_blob_list())['items']))
|
2021-10-15 08:09:38 +02:00
|
|
|
|
2021-11-03 15:43:42 +01:00
|
|
|
async def test_download(self):
|
2021-10-22 05:32:04 +02:00
|
|
|
content1 = await self.stream_create('content1', '0.01', data=bytes([0] * 32 * 1024 * 1024))
|
2021-10-18 09:33:37 +02:00
|
|
|
content1 = content1['outputs'][0]['value']['source']['sd_hash']
|
2021-10-22 05:32:04 +02:00
|
|
|
content2 = await self.stream_create('content2', '0.01', data=bytes([0] * 16 * 1024 * 1024))
|
2021-10-18 09:33:37 +02:00
|
|
|
content2 = content2['outputs'][0]['value']['source']['sd_hash']
|
2021-11-03 02:34:15 +01:00
|
|
|
self.assertEqual(48, (await self.status())['disk_space']['published_blobs_storage_used_mb'])
|
|
|
|
self.assertEqual(0, (await self.status())['disk_space']['content_blobs_storage_used_mb'])
|
2021-10-18 09:33:37 +02:00
|
|
|
|
2021-10-27 19:17:06 +02:00
|
|
|
background_downloader = BackgroundDownloader(self.daemon.conf, self.daemon.storage, self.daemon.blob_manager)
|
2022-02-11 20:13:08 +01:00
|
|
|
self.daemon.conf.network_storage_limit = 32
|
2021-10-18 09:33:37 +02:00
|
|
|
await self.clear()
|
2021-11-05 04:31:27 +01:00
|
|
|
await self.blob_clean()
|
2021-11-03 02:34:15 +01:00
|
|
|
self.assertEqual(0, (await self.status())['disk_space']['total_used_mb'])
|
2021-10-27 19:17:06 +02:00
|
|
|
await background_downloader.download_blobs(content1)
|
2021-10-18 08:10:39 +02:00
|
|
|
await self.assertBlobs(content1)
|
2021-11-05 04:31:27 +01:00
|
|
|
await self.blob_clean()
|
2021-11-03 02:34:15 +01:00
|
|
|
self.assertEqual(0, (await self.status())['disk_space']['content_blobs_storage_used_mb'])
|
|
|
|
self.assertEqual(32, (await self.status())['disk_space']['seed_blobs_storage_used_mb'])
|
2022-02-11 20:13:08 +01:00
|
|
|
self.daemon.conf.network_storage_limit = 48
|
2021-10-27 19:17:06 +02:00
|
|
|
await background_downloader.download_blobs(content2)
|
2021-10-18 08:10:39 +02:00
|
|
|
await self.assertBlobs(content1, content2)
|
2021-11-05 04:31:27 +01:00
|
|
|
await self.blob_clean()
|
2021-11-03 02:34:15 +01:00
|
|
|
self.assertEqual(0, (await self.status())['disk_space']['content_blobs_storage_used_mb'])
|
|
|
|
self.assertEqual(48, (await self.status())['disk_space']['seed_blobs_storage_used_mb'])
|
2021-10-18 09:33:37 +02:00
|
|
|
await self.clear()
|
2021-10-27 19:17:06 +02:00
|
|
|
await background_downloader.download_blobs(content2)
|
2021-10-18 09:33:37 +02:00
|
|
|
await self.assertBlobs(content2)
|
2021-11-05 04:31:27 +01:00
|
|
|
await self.blob_clean()
|
2021-11-03 02:34:15 +01:00
|
|
|
self.assertEqual(0, (await self.status())['disk_space']['content_blobs_storage_used_mb'])
|
|
|
|
self.assertEqual(16, (await self.status())['disk_space']['seed_blobs_storage_used_mb'])
|
2021-10-15 08:39:36 +02:00
|
|
|
|
2021-10-18 09:17:51 +02:00
|
|
|
# tests that an attempt to download something that isn't a sd blob will download the single blob and stop
|
2021-10-20 07:18:34 +02:00
|
|
|
blobs = await self.get_blobs_from_sd_blob(self.reflector.blob_manager.get_blob(content1))
|
2021-10-18 09:33:37 +02:00
|
|
|
await self.clear()
|
2021-10-27 19:17:06 +02:00
|
|
|
await background_downloader.download_blobs(blobs[0].blob_hash)
|
2021-10-18 09:17:51 +02:00
|
|
|
self.assertEqual({blobs[0].blob_hash}, self.daemon.blob_manager.completed_blob_hashes)
|
2021-10-18 09:59:55 +02:00
|
|
|
|
2021-10-20 07:18:34 +02:00
|
|
|
# test that disk space manager doesn't delete orphan network blobs
|
2021-10-27 19:17:06 +02:00
|
|
|
await background_downloader.download_blobs(content1)
|
2021-10-20 07:18:34 +02:00
|
|
|
await self.daemon.storage.db.execute_fetchall("update blob set added_on=0") # so it is preferred for cleaning
|
|
|
|
await self.daemon.jsonrpc_get("content2", save_file=False)
|
2021-10-24 23:19:06 +02:00
|
|
|
while (await self.file_list())[0]['status'] != 'stopped':
|
2021-10-20 07:18:34 +02:00
|
|
|
await asyncio.sleep(0.5)
|
|
|
|
await self.assertBlobs(content1, no_files=False)
|
|
|
|
|
|
|
|
self.daemon.conf.blob_storage_limit = 1
|
|
|
|
await self.blob_clean()
|
|
|
|
await self.assertBlobs(content1, no_files=False)
|
2022-02-11 20:13:08 +01:00
|
|
|
|
|
|
|
self.daemon.conf.network_storage_limit = 0
|
|
|
|
await self.blob_clean()
|
|
|
|
self.assertEqual(0, (await self.status())['disk_space']['seed_blobs_storage_used_mb'])
|