2019-02-14 00:40:14 +01:00
|
|
|
import asyncio
|
2019-02-12 05:54:24 +01:00
|
|
|
import logging
|
2019-02-14 00:40:14 +01:00
|
|
|
import os
|
2019-04-29 00:19:58 +02:00
|
|
|
from binascii import unhexlify, hexlify
|
2019-02-14 00:40:14 +01:00
|
|
|
|
2019-04-29 00:19:58 +02:00
|
|
|
from lbrynet.schema import Claim
|
2019-02-25 03:48:29 +01:00
|
|
|
from lbrynet.testcase import CommandTestCase
|
2019-02-18 21:08:37 +01:00
|
|
|
from lbrynet.blob_exchange.downloader import BlobDownloader
|
2019-02-12 05:54:24 +01:00
|
|
|
|
|
|
|
|
|
|
|
class FileCommands(CommandTestCase):
|
|
|
|
|
2019-02-19 03:45:30 +01:00
|
|
|
VERBOSITY = logging.WARN
|
2019-02-12 05:54:24 +01:00
|
|
|
|
|
|
|
async def test_file_management(self):
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_create('foo', '0.01')
|
|
|
|
await self.stream_create('foo2', '0.01')
|
2019-02-12 05:54:24 +01:00
|
|
|
|
2019-04-06 21:55:08 +02:00
|
|
|
file1, file2 = self.sout(self.daemon.jsonrpc_file_list('claim_name'))
|
2019-02-12 05:54:24 +01:00
|
|
|
self.assertEqual(file1['claim_name'], 'foo')
|
|
|
|
self.assertEqual(file2['claim_name'], 'foo2')
|
|
|
|
|
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo')
|
|
|
|
self.assertEqual(len(self.daemon.jsonrpc_file_list()), 1)
|
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo2')
|
|
|
|
self.assertEqual(len(self.daemon.jsonrpc_file_list()), 0)
|
|
|
|
|
|
|
|
await self.daemon.jsonrpc_get('lbry://foo')
|
|
|
|
self.assertEqual(len(self.daemon.jsonrpc_file_list()), 1)
|
2019-02-13 00:42:01 +01:00
|
|
|
|
2019-05-06 22:27:12 +02:00
|
|
|
async def test_announces(self):
|
|
|
|
# announces on publish
|
|
|
|
self.assertEqual(await self.daemon.storage.get_blobs_to_announce(), [])
|
|
|
|
await self.stream_create('foo', '0.01')
|
|
|
|
stream = self.daemon.jsonrpc_file_list()[0]
|
|
|
|
self.assertSetEqual(
|
|
|
|
set(await self.daemon.storage.get_blobs_to_announce()),
|
|
|
|
{stream.sd_hash, stream.descriptor.blobs[0].blob_hash}
|
|
|
|
)
|
|
|
|
self.assertTrue(await self.daemon.jsonrpc_file_delete(delete_all=True))
|
|
|
|
# announces on download
|
|
|
|
self.assertEqual(await self.daemon.storage.get_blobs_to_announce(), [])
|
|
|
|
stream = await self.daemon.jsonrpc_get('foo')
|
|
|
|
self.assertSetEqual(
|
|
|
|
set(await self.daemon.storage.get_blobs_to_announce()),
|
|
|
|
{stream.sd_hash, stream.descriptor.blobs[0].blob_hash}
|
|
|
|
)
|
|
|
|
|
2019-04-29 05:37:01 +02:00
|
|
|
async def test_file_list_fields(self):
|
|
|
|
await self.stream_create('foo', '0.01')
|
|
|
|
file_list = self.sout(self.daemon.jsonrpc_file_list())
|
|
|
|
self.assertEqual(
|
|
|
|
file_list[0]['timestamp'],
|
|
|
|
None
|
|
|
|
)
|
|
|
|
self.assertEqual(file_list[0]['confirmations'], -1)
|
|
|
|
await self.daemon.jsonrpc_resolve('foo')
|
|
|
|
file_list = self.sout(self.daemon.jsonrpc_file_list())
|
|
|
|
self.assertEqual(
|
|
|
|
file_list[0]['timestamp'],
|
|
|
|
self.ledger.headers[file_list[0]['height']]['timestamp']
|
|
|
|
)
|
|
|
|
self.assertEqual(file_list[0]['confirmations'], 1)
|
|
|
|
|
2019-05-01 18:16:32 +02:00
|
|
|
async def test_get_doesnt_touch_user_written_files_between_calls(self):
|
|
|
|
await self.stream_create('foo', '0.01', data=bytes([0] * (2 << 23)))
|
|
|
|
self.assertTrue(await self.daemon.jsonrpc_file_delete(claim_name='foo'))
|
|
|
|
first_path = (await self.daemon.jsonrpc_get('lbry://foo', save_file=True)).full_path
|
|
|
|
await self.wait_files_to_complete()
|
|
|
|
self.assertTrue(await self.daemon.jsonrpc_file_delete(claim_name='foo'))
|
|
|
|
with open(first_path, 'wb') as f:
|
|
|
|
f.write(b' ')
|
|
|
|
f.flush()
|
|
|
|
second_path = await self.daemon.jsonrpc_get('lbry://foo', save_file=True)
|
|
|
|
await self.wait_files_to_complete()
|
|
|
|
self.assertNotEquals(first_path, second_path)
|
2019-04-29 05:37:01 +02:00
|
|
|
|
2019-04-29 00:19:58 +02:00
|
|
|
async def test_file_list_updated_metadata_on_resolve(self):
|
|
|
|
await self.stream_create('foo', '0.01')
|
|
|
|
claim = await self.daemon.resolve('lbry://foo')
|
|
|
|
claim = claim['lbry://foo']['claim']['protobuf'].decode()
|
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo')
|
|
|
|
txid = await self.blockchain_claim_name('bar', claim, '0.01')
|
|
|
|
await self.daemon.jsonrpc_get('lbry://bar')
|
|
|
|
claim = Claim.from_bytes(unhexlify(claim))
|
|
|
|
claim.stream.description = "fix typos, fix the world"
|
|
|
|
await self.blockchain_update_name(txid, hexlify(claim.to_bytes()).decode(), '0.01')
|
|
|
|
await self.daemon.jsonrpc_resolve('lbry://bar')
|
|
|
|
file_list = self.daemon.jsonrpc_file_list()
|
|
|
|
self.assertEqual(file_list[0].stream_claim_info.claim.stream.description, claim.stream.description)
|
|
|
|
|
2019-02-13 00:42:01 +01:00
|
|
|
async def test_download_different_timeouts(self):
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.stream_create('foo', '0.01')
|
2019-04-20 08:11:19 +02:00
|
|
|
sd_hash = tx['outputs'][0]['value']['source']['sd_hash']
|
2019-02-13 00:42:01 +01:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo')
|
|
|
|
all_except_sd = [
|
|
|
|
blob_hash for blob_hash in self.server.blob_manager.completed_blob_hashes if blob_hash != sd_hash
|
|
|
|
]
|
|
|
|
await self.server.blob_manager.delete_blobs(all_except_sd)
|
2019-05-02 21:41:48 +02:00
|
|
|
resp = await self.daemon.jsonrpc_get('lbry://foo', timeout=2, save_file=True)
|
2019-02-13 00:42:01 +01:00
|
|
|
self.assertIn('error', resp)
|
2019-04-05 05:10:18 +02:00
|
|
|
self.assertEqual('Failed to download data blobs for sd hash %s within timeout' % sd_hash, resp['error'])
|
2019-05-02 21:41:48 +02:00
|
|
|
self.assertTrue(await self.daemon.jsonrpc_file_delete(claim_name='foo'), "data timeout didnt create a file")
|
2019-02-14 22:34:37 +01:00
|
|
|
await self.server.blob_manager.delete_blobs([sd_hash])
|
2019-05-02 21:41:48 +02:00
|
|
|
resp = await self.daemon.jsonrpc_get('lbry://foo', timeout=2, save_file=True)
|
2019-02-13 00:42:01 +01:00
|
|
|
self.assertIn('error', resp)
|
2019-04-05 05:10:18 +02:00
|
|
|
self.assertEqual('Failed to download sd blob %s within timeout' % sd_hash, resp['error'])
|
2019-02-14 00:40:14 +01:00
|
|
|
|
|
|
|
async def wait_files_to_complete(self):
|
2019-04-06 21:55:08 +02:00
|
|
|
while self.sout(self.daemon.jsonrpc_file_list(status='running')):
|
2019-02-14 00:40:14 +01:00
|
|
|
await asyncio.sleep(0.01)
|
|
|
|
|
|
|
|
async def test_filename_conflicts_management_on_resume_download(self):
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_create('foo', '0.01', data=bytes([0] * (1 << 23)))
|
2019-04-06 21:55:08 +02:00
|
|
|
file_info = self.sout(self.daemon.jsonrpc_file_list())[0]
|
2019-02-14 00:40:14 +01:00
|
|
|
original_path = os.path.join(self.daemon.conf.download_dir, file_info['file_name'])
|
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo')
|
|
|
|
await self.daemon.jsonrpc_get('lbry://foo')
|
|
|
|
with open(original_path, 'wb') as handle:
|
|
|
|
handle.write(b'some other stuff was there instead')
|
|
|
|
self.daemon.stream_manager.stop()
|
|
|
|
await self.daemon.stream_manager.start()
|
|
|
|
await asyncio.wait_for(self.wait_files_to_complete(), timeout=5) # if this hangs, file didnt get set completed
|
|
|
|
# check that internal state got through up to the file list API
|
2019-04-05 04:02:33 +02:00
|
|
|
stream = self.daemon.stream_manager.get_stream_by_stream_hash(file_info['stream_hash'])
|
2019-04-10 15:36:02 +02:00
|
|
|
file_info = self.sout(self.daemon.jsonrpc_file_list()[0])
|
2019-04-05 04:02:33 +02:00
|
|
|
self.assertEqual(stream.file_name, file_info['file_name'])
|
2019-02-14 00:40:14 +01:00
|
|
|
# checks if what the API shows is what he have at the very internal level.
|
2019-04-05 04:02:33 +02:00
|
|
|
self.assertEqual(stream.full_path, file_info['download_path'])
|
2019-02-14 01:30:59 +01:00
|
|
|
|
|
|
|
async def test_incomplete_downloads_erases_output_file_on_stop(self):
|
2019-04-05 05:10:18 +02:00
|
|
|
tx = await self.stream_create('foo', '0.01', data=b'deadbeef' * 1000000)
|
2019-04-20 08:11:19 +02:00
|
|
|
sd_hash = tx['outputs'][0]['value']['source']['sd_hash']
|
2019-04-06 21:55:08 +02:00
|
|
|
file_info = self.sout(self.daemon.jsonrpc_file_list())[0]
|
2019-02-14 01:30:59 +01:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo')
|
2019-03-11 23:43:00 +01:00
|
|
|
blobs = await self.server_storage.get_blobs_for_stream(
|
|
|
|
await self.server_storage.get_stream_hash_for_sd_hash(sd_hash)
|
|
|
|
)
|
|
|
|
all_except_sd_and_head = [
|
2019-04-05 05:10:18 +02:00
|
|
|
blob.blob_hash for blob in blobs[1:-1]
|
2019-02-14 01:30:59 +01:00
|
|
|
]
|
2019-03-11 23:43:00 +01:00
|
|
|
await self.server.blob_manager.delete_blobs(all_except_sd_and_head)
|
2019-04-10 15:36:02 +02:00
|
|
|
path = os.path.join(self.daemon.conf.download_dir, file_info['file_name'])
|
|
|
|
self.assertFalse(os.path.isfile(path))
|
2019-04-06 22:11:56 +02:00
|
|
|
resp = await self.out(self.daemon.jsonrpc_get('lbry://foo', timeout=2))
|
2019-03-11 23:43:00 +01:00
|
|
|
self.assertNotIn('error', resp)
|
2019-04-10 15:36:02 +02:00
|
|
|
self.assertTrue(os.path.isfile(path))
|
2019-03-11 23:43:00 +01:00
|
|
|
self.daemon.stream_manager.stop()
|
2019-04-24 15:46:46 +02:00
|
|
|
await asyncio.sleep(0.01, loop=self.loop) # FIXME: this sleep should not be needed
|
2019-04-10 15:36:02 +02:00
|
|
|
self.assertFalse(os.path.isfile(path))
|
2019-02-14 02:45:05 +01:00
|
|
|
|
|
|
|
async def test_incomplete_downloads_retry(self):
|
2019-04-05 05:10:18 +02:00
|
|
|
tx = await self.stream_create('foo', '0.01', data=b'deadbeef' * 1000000)
|
2019-04-20 08:11:19 +02:00
|
|
|
sd_hash = tx['outputs'][0]['value']['source']['sd_hash']
|
2019-02-14 02:45:05 +01:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo')
|
2019-03-11 23:43:00 +01:00
|
|
|
blobs = await self.server_storage.get_blobs_for_stream(
|
|
|
|
await self.server_storage.get_stream_hash_for_sd_hash(sd_hash)
|
|
|
|
)
|
|
|
|
all_except_sd_and_head = [
|
2019-04-05 05:10:18 +02:00
|
|
|
blob.blob_hash for blob in blobs[1:-1]
|
2019-02-14 02:45:05 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
# backup server blobs
|
2019-03-11 23:43:00 +01:00
|
|
|
for blob_hash in all_except_sd_and_head:
|
2019-02-14 02:45:05 +01:00
|
|
|
blob = self.server_blob_manager.get_blob(blob_hash)
|
|
|
|
os.rename(blob.file_path, blob.file_path + '__')
|
|
|
|
|
|
|
|
# erase all except sd blob
|
2019-03-11 23:43:00 +01:00
|
|
|
await self.server.blob_manager.delete_blobs(all_except_sd_and_head)
|
2019-02-14 02:45:05 +01:00
|
|
|
|
2019-03-11 23:43:00 +01:00
|
|
|
# start the download
|
2019-04-06 22:11:56 +02:00
|
|
|
resp = await self.out(self.daemon.jsonrpc_get('lbry://foo', timeout=2))
|
2019-03-11 23:43:00 +01:00
|
|
|
self.assertNotIn('error', resp)
|
2019-02-14 02:45:05 +01:00
|
|
|
self.assertEqual(len(self.daemon.jsonrpc_file_list()), 1)
|
2019-04-06 21:55:08 +02:00
|
|
|
self.assertEqual('running', self.sout(self.daemon.jsonrpc_file_list())[0]['status'])
|
2019-03-11 23:43:00 +01:00
|
|
|
await self.daemon.jsonrpc_file_set_status('stop', claim_name='foo')
|
2019-02-14 02:45:05 +01:00
|
|
|
|
|
|
|
# recover blobs
|
2019-03-11 23:43:00 +01:00
|
|
|
for blob_hash in all_except_sd_and_head:
|
2019-02-14 02:45:05 +01:00
|
|
|
blob = self.server_blob_manager.get_blob(blob_hash)
|
|
|
|
os.rename(blob.file_path + '__', blob.file_path)
|
|
|
|
self.server_blob_manager.blobs.clear()
|
|
|
|
await self.server_blob_manager.blob_completed(self.server_blob_manager.get_blob(blob_hash))
|
2019-03-11 23:43:00 +01:00
|
|
|
|
|
|
|
await self.daemon.jsonrpc_file_set_status('start', claim_name='foo')
|
2019-02-14 02:45:05 +01:00
|
|
|
await asyncio.wait_for(self.wait_files_to_complete(), timeout=5)
|
2019-04-06 21:55:08 +02:00
|
|
|
file_info = self.sout(self.daemon.jsonrpc_file_list())[0]
|
2019-02-14 02:45:05 +01:00
|
|
|
self.assertEqual(file_info['blobs_completed'], file_info['blobs_in_stream'])
|
2019-03-11 23:43:00 +01:00
|
|
|
self.assertEqual('finished', file_info['status'])
|
2019-02-18 21:08:37 +01:00
|
|
|
|
|
|
|
async def test_unban_recovers_stream(self):
|
2019-04-30 22:56:29 +02:00
|
|
|
BlobDownloader.BAN_FACTOR = .5 # fixme: temporary field, will move to connection manager or a conf
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.stream_create('foo', '0.01', data=bytes([0] * (1 << 23)))
|
2019-04-20 08:11:19 +02:00
|
|
|
sd_hash = tx['outputs'][0]['value']['source']['sd_hash']
|
2019-02-18 21:08:37 +01:00
|
|
|
missing_blob_hash = (await self.daemon.jsonrpc_blob_list(sd_hash=sd_hash))[-2]
|
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='foo')
|
|
|
|
# backup blob
|
|
|
|
missing_blob = self.server_blob_manager.get_blob(missing_blob_hash)
|
|
|
|
os.rename(missing_blob.file_path, missing_blob.file_path + '__')
|
|
|
|
self.server_blob_manager.delete_blob(missing_blob_hash)
|
|
|
|
await self.daemon.jsonrpc_get('lbry://foo')
|
|
|
|
with self.assertRaises(asyncio.TimeoutError):
|
|
|
|
await asyncio.wait_for(self.wait_files_to_complete(), timeout=1)
|
|
|
|
# restore blob
|
|
|
|
os.rename(missing_blob.file_path + '__', missing_blob.file_path)
|
|
|
|
self.server_blob_manager.blobs.clear()
|
2019-02-22 01:14:51 +01:00
|
|
|
missing_blob = self.server_blob_manager.get_blob(missing_blob_hash)
|
2019-04-15 22:14:19 +02:00
|
|
|
self.server_blob_manager.blob_completed(missing_blob)
|
2019-02-18 21:08:37 +01:00
|
|
|
await asyncio.wait_for(self.wait_files_to_complete(), timeout=1)
|
2019-02-19 00:23:11 +01:00
|
|
|
|
|
|
|
async def test_paid_download(self):
|
2019-02-19 01:37:28 +01:00
|
|
|
target_address = await self.blockchain.get_raw_change_address()
|
2019-02-19 04:16:59 +01:00
|
|
|
|
|
|
|
# FAIL: beyond available balance
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_create(
|
2019-02-19 04:16:59 +01:00
|
|
|
'expensive', '0.01', data=b'pay me if you can',
|
2019-03-24 21:55:04 +01:00
|
|
|
fee_currency='LBC', fee_amount='11.0', fee_address=target_address
|
|
|
|
)
|
2019-02-19 00:23:11 +01:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='expensive')
|
2019-04-06 22:11:56 +02:00
|
|
|
response = await self.out(self.daemon.jsonrpc_get('lbry://expensive'))
|
2019-03-22 07:18:34 +01:00
|
|
|
self.assertEqual(response['error'], 'fee of 11.00000 exceeds max available balance')
|
2019-02-19 00:23:11 +01:00
|
|
|
self.assertEqual(len(self.daemon.jsonrpc_file_list()), 0)
|
2019-02-19 04:16:59 +01:00
|
|
|
|
|
|
|
# FAIL: beyond maximum key fee
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_create(
|
2019-02-19 04:16:59 +01:00
|
|
|
'maxkey', '0.01', data=b'no pay me, no',
|
2019-03-24 21:55:04 +01:00
|
|
|
fee_currency='LBC', fee_amount='111.0', fee_address=target_address
|
|
|
|
)
|
2019-02-19 04:16:59 +01:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='maxkey')
|
2019-04-06 22:11:56 +02:00
|
|
|
response = await self.out(self.daemon.jsonrpc_get('lbry://maxkey'))
|
2019-02-19 00:23:11 +01:00
|
|
|
self.assertEqual(len(self.daemon.jsonrpc_file_list()), 0)
|
2019-03-22 07:18:34 +01:00
|
|
|
self.assertEqual(response['error'], 'fee of 111.00000 exceeds max configured to allow of 50.00000')
|
2019-02-19 04:16:59 +01:00
|
|
|
|
|
|
|
# PASS: purchase is successful
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_create(
|
2019-02-19 04:16:59 +01:00
|
|
|
'icanpay', '0.01', data=b'I got the power!',
|
2019-03-24 21:55:04 +01:00
|
|
|
fee_currency='LBC', fee_amount='1.0', fee_address=target_address
|
|
|
|
)
|
2019-02-19 04:16:59 +01:00
|
|
|
await self.daemon.jsonrpc_file_delete(claim_name='icanpay')
|
2019-02-19 01:37:28 +01:00
|
|
|
await self.assertBalance(self.account, '9.925679')
|
2019-04-10 15:36:02 +02:00
|
|
|
response = await self.daemon.jsonrpc_get('lbry://icanpay')
|
2019-05-07 20:30:35 +02:00
|
|
|
raw_content_fee = response.content_fee.raw
|
2019-04-10 15:36:02 +02:00
|
|
|
await self.ledger.wait(response.content_fee)
|
2019-02-19 04:16:59 +01:00
|
|
|
await self.assertBalance(self.account, '8.925555')
|
2019-02-19 00:23:11 +01:00
|
|
|
self.assertEqual(len(self.daemon.jsonrpc_file_list()), 1)
|
2019-02-19 04:16:59 +01:00
|
|
|
|
2019-02-19 00:23:11 +01:00
|
|
|
await asyncio.wait_for(self.wait_files_to_complete(), timeout=1)
|
2019-02-19 01:37:28 +01:00
|
|
|
|
2019-02-19 04:16:59 +01:00
|
|
|
# check that the fee was received
|
|
|
|
starting_balance = await self.blockchain.get_balance()
|
2019-02-19 01:37:28 +01:00
|
|
|
await self.generate(1)
|
2019-02-19 04:16:59 +01:00
|
|
|
block_reward_and_claim_fee = 2.0
|
|
|
|
self.assertEqual(
|
|
|
|
await self.blockchain.get_balance(), starting_balance + block_reward_and_claim_fee
|
|
|
|
)
|
2019-05-07 20:30:35 +02:00
|
|
|
|
|
|
|
# restart the daemon and make sure the fee is still there
|
|
|
|
|
|
|
|
self.daemon.stream_manager.stop()
|
|
|
|
await self.daemon.stream_manager.start()
|
|
|
|
self.assertEqual(len(self.daemon.jsonrpc_file_list()), 1)
|
|
|
|
self.assertEqual(self.daemon.jsonrpc_file_list()[0].content_fee.raw, raw_content_fee)
|