2018-02-12 20:19:15 +01:00
|
|
|
import shutil
|
|
|
|
import tempfile
|
2019-01-22 23:54:25 +01:00
|
|
|
import unittest
|
|
|
|
import asyncio
|
2018-02-12 20:19:15 +01:00
|
|
|
import logging
|
2019-01-22 23:54:25 +01:00
|
|
|
from torba.testcase import AsyncioTestCase
|
2019-01-23 16:41:34 +01:00
|
|
|
from lbrynet.conf import Config
|
2019-03-25 03:39:38 +01:00
|
|
|
from lbrynet.extras.daemon.storage import SQLiteStorage
|
2019-01-22 23:54:25 +01:00
|
|
|
from lbrynet.blob.blob_info import BlobInfo
|
|
|
|
from lbrynet.blob.blob_manager import BlobFileManager
|
|
|
|
from lbrynet.stream.descriptor import StreamDescriptor
|
2018-11-07 21:15:05 +01:00
|
|
|
from tests.test_utils import random_lbry_hash
|
2018-02-12 20:19:15 +01:00
|
|
|
|
|
|
|
log = logging.getLogger()
|
|
|
|
|
|
|
|
|
|
|
|
def blob_info_dict(blob_info):
|
|
|
|
info = {
|
|
|
|
"length": blob_info.length,
|
|
|
|
"blob_num": blob_info.blob_num,
|
|
|
|
"iv": blob_info.iv
|
|
|
|
}
|
|
|
|
if blob_info.length:
|
|
|
|
info['blob_hash'] = blob_info.blob_hash
|
|
|
|
return info
|
|
|
|
|
|
|
|
|
|
|
|
fake_claim_info = {
|
|
|
|
'name': "test",
|
|
|
|
'claim_id': 'deadbeef' * 5,
|
|
|
|
'address': "bT6wc54qiUUYt34HQF9wnW8b2o2yQTXf2S",
|
|
|
|
'claim_sequence': 1,
|
|
|
|
'value': {
|
|
|
|
"version": "_0_0_1",
|
|
|
|
"claimType": "streamType",
|
|
|
|
"stream": {
|
|
|
|
"source": {
|
|
|
|
"source": 'deadbeef' * 12,
|
|
|
|
"version": "_0_0_1",
|
|
|
|
"contentType": "video/mp4",
|
|
|
|
"sourceType": "lbry_sd_hash"
|
|
|
|
},
|
|
|
|
"version": "_0_0_1",
|
|
|
|
"metadata": {
|
|
|
|
"license": "LBRY inc",
|
|
|
|
"description": "What is LBRY? An introduction with Alex Tabarrok",
|
|
|
|
"language": "en",
|
|
|
|
"title": "What is LBRY?",
|
|
|
|
"author": "Samuel Bryan",
|
|
|
|
"version": "_0_1_0",
|
|
|
|
"nsfw": False,
|
|
|
|
"licenseUrl": "",
|
|
|
|
"preview": "",
|
|
|
|
"thumbnail": "https://s3.amazonaws.com/files.lbry.io/logo.png"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
'height': 10000,
|
2018-11-30 22:11:23 +01:00
|
|
|
'amount': '1.0',
|
|
|
|
'effective_amount': '1.0',
|
2018-02-12 20:19:15 +01:00
|
|
|
'nout': 0,
|
|
|
|
'txid': "deadbeef" * 8,
|
|
|
|
'supports': [],
|
|
|
|
'channel_claim_id': None,
|
|
|
|
'channel_name': None
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
class StorageTest(AsyncioTestCase):
|
|
|
|
async def asyncSetUp(self):
|
2019-01-23 16:41:34 +01:00
|
|
|
self.storage = SQLiteStorage(Config(), ':memory:')
|
2019-01-22 23:54:25 +01:00
|
|
|
self.blob_dir = tempfile.mkdtemp()
|
|
|
|
self.addCleanup(shutil.rmtree, self.blob_dir)
|
|
|
|
self.blob_manager = BlobFileManager(asyncio.get_event_loop(), self.blob_dir, self.storage)
|
|
|
|
await self.storage.open()
|
2018-02-12 20:19:15 +01:00
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
async def asyncTearDown(self):
|
|
|
|
await self.storage.close()
|
2018-02-12 20:19:15 +01:00
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
async def store_fake_blob(self, blob_hash, length=100):
|
2019-02-14 18:36:18 +01:00
|
|
|
await self.storage.add_completed_blob(blob_hash, length)
|
2018-02-12 20:19:15 +01:00
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
async def store_fake_stream(self, stream_hash, blobs=None, file_name="fake_file", key="DEADBEEF"):
|
|
|
|
blobs = blobs or [BlobInfo(1, 100, "DEADBEEF", random_lbry_hash())]
|
|
|
|
descriptor = StreamDescriptor(
|
|
|
|
asyncio.get_event_loop(), self.blob_dir, file_name, key, file_name, blobs, stream_hash
|
|
|
|
)
|
|
|
|
sd_blob = await descriptor.make_sd_blob()
|
|
|
|
await self.storage.store_stream(sd_blob, descriptor)
|
|
|
|
return descriptor
|
2018-02-12 20:19:15 +01:00
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
async def make_and_store_fake_stream(self, blob_count=2, stream_hash=None):
|
2018-02-12 20:19:15 +01:00
|
|
|
stream_hash = stream_hash or random_lbry_hash()
|
2019-01-22 23:54:25 +01:00
|
|
|
blobs = [
|
|
|
|
BlobInfo(i + 1, 100, "DEADBEEF", random_lbry_hash())
|
|
|
|
for i in range(blob_count)
|
|
|
|
]
|
|
|
|
await self.store_fake_stream(stream_hash, blobs)
|
2018-02-12 20:19:15 +01:00
|
|
|
|
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
class TestSQLiteStorage(StorageTest):
|
|
|
|
async def test_setup(self):
|
|
|
|
files = await self.storage.get_all_lbry_files()
|
2018-02-12 20:19:15 +01:00
|
|
|
self.assertEqual(len(files), 0)
|
2019-01-22 23:54:25 +01:00
|
|
|
blobs = await self.storage.get_all_blob_hashes()
|
2018-02-12 20:19:15 +01:00
|
|
|
self.assertEqual(len(blobs), 0)
|
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
async def test_store_blob(self):
|
2018-02-12 20:19:15 +01:00
|
|
|
blob_hash = random_lbry_hash()
|
2019-01-22 23:54:25 +01:00
|
|
|
await self.store_fake_blob(blob_hash)
|
|
|
|
blob_hashes = await self.storage.get_all_blob_hashes()
|
2018-02-12 20:19:15 +01:00
|
|
|
self.assertEqual(blob_hashes, [blob_hash])
|
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
async def test_delete_blob(self):
|
2018-02-12 20:19:15 +01:00
|
|
|
blob_hash = random_lbry_hash()
|
2019-01-22 23:54:25 +01:00
|
|
|
await self.store_fake_blob(blob_hash)
|
|
|
|
blob_hashes = await self.storage.get_all_blob_hashes()
|
2018-02-12 20:19:15 +01:00
|
|
|
self.assertEqual(blob_hashes, [blob_hash])
|
2019-01-22 23:54:25 +01:00
|
|
|
await self.storage.delete_blobs_from_db(blob_hashes)
|
|
|
|
blob_hashes = await self.storage.get_all_blob_hashes()
|
2018-02-12 20:19:15 +01:00
|
|
|
self.assertEqual(blob_hashes, [])
|
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
async def test_supports_storage(self):
|
2018-07-22 03:12:33 +02:00
|
|
|
claim_ids = [random_lbry_hash() for _ in range(10)]
|
|
|
|
random_supports = [{
|
|
|
|
"txid": random_lbry_hash(),
|
|
|
|
"nout": i,
|
2018-10-18 12:42:45 +02:00
|
|
|
"address": f"addr{i}",
|
2018-11-30 22:11:23 +01:00
|
|
|
"amount": f"{i}.0"
|
2018-07-22 03:12:33 +02:00
|
|
|
} for i in range(20)]
|
2018-05-09 00:46:29 +02:00
|
|
|
expected_supports = {}
|
|
|
|
for idx, claim_id in enumerate(claim_ids):
|
2019-04-20 01:42:35 +02:00
|
|
|
await self.storage.save_supports({claim_id: random_supports[idx*2:idx*2+2]})
|
2018-05-09 00:46:29 +02:00
|
|
|
for random_support in random_supports[idx*2:idx*2+2]:
|
|
|
|
random_support['claim_id'] = claim_id
|
|
|
|
expected_supports.setdefault(claim_id, []).append(random_support)
|
2019-01-22 23:54:25 +01:00
|
|
|
|
|
|
|
supports = await self.storage.get_supports(claim_ids[0])
|
2018-05-09 00:46:29 +02:00
|
|
|
self.assertEqual(supports, expected_supports[claim_ids[0]])
|
2019-01-22 23:54:25 +01:00
|
|
|
all_supports = await self.storage.get_supports(*claim_ids)
|
2018-05-09 00:46:29 +02:00
|
|
|
for support in all_supports:
|
|
|
|
self.assertIn(support, expected_supports[support['claim_id']])
|
|
|
|
|
|
|
|
|
2018-02-12 20:19:15 +01:00
|
|
|
class StreamStorageTests(StorageTest):
|
2019-01-22 23:54:25 +01:00
|
|
|
async def test_store_and_delete_stream(self):
|
2018-02-12 20:19:15 +01:00
|
|
|
stream_hash = random_lbry_hash()
|
2019-01-22 23:54:25 +01:00
|
|
|
descriptor = await self.store_fake_stream(stream_hash)
|
|
|
|
files = await self.storage.get_all_lbry_files()
|
|
|
|
self.assertListEqual(files, [])
|
|
|
|
stream_hashes = await self.storage.get_all_stream_hashes()
|
|
|
|
self.assertListEqual(stream_hashes, [stream_hash])
|
|
|
|
await self.storage.delete_stream(descriptor)
|
|
|
|
files = await self.storage.get_all_lbry_files()
|
|
|
|
self.assertListEqual(files, [])
|
|
|
|
stream_hashes = await self.storage.get_all_stream_hashes()
|
2018-02-12 20:19:15 +01:00
|
|
|
self.assertListEqual(stream_hashes, [])
|
|
|
|
|
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
@unittest.SkipTest
|
2018-02-12 20:19:15 +01:00
|
|
|
class FileStorageTests(StorageTest):
|
2019-01-22 23:54:25 +01:00
|
|
|
async def test_store_file(self):
|
2018-02-12 20:19:15 +01:00
|
|
|
download_directory = self.db_dir
|
2019-01-22 23:54:25 +01:00
|
|
|
out = await self.storage.get_all_lbry_files()
|
2018-02-12 20:19:15 +01:00
|
|
|
self.assertEqual(len(out), 0)
|
|
|
|
|
|
|
|
stream_hash = random_lbry_hash()
|
|
|
|
sd_hash = random_lbry_hash()
|
|
|
|
blob1 = random_lbry_hash()
|
|
|
|
blob2 = random_lbry_hash()
|
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
await self.store_fake_blob(sd_hash)
|
|
|
|
await self.store_fake_blob(blob1)
|
|
|
|
await self.store_fake_blob(blob2)
|
2018-02-12 20:19:15 +01:00
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
await self.store_fake_stream(stream_hash, sd_hash)
|
|
|
|
await self.store_fake_stream_blob(stream_hash, blob1, 1)
|
|
|
|
await self.store_fake_stream_blob(stream_hash, blob2, 2)
|
2018-02-12 20:19:15 +01:00
|
|
|
|
|
|
|
blob_data_rate = 0
|
|
|
|
file_name = "test file"
|
2019-01-22 23:54:25 +01:00
|
|
|
await self.storage.save_published_file(
|
2018-02-12 20:19:15 +01:00
|
|
|
stream_hash, file_name, download_directory, blob_data_rate
|
2019-01-22 23:54:25 +01:00
|
|
|
)
|
2018-02-12 20:19:15 +01:00
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
files = await self.storage.get_all_lbry_files()
|
2018-02-12 20:19:15 +01:00
|
|
|
self.assertEqual(1, len(files))
|
|
|
|
|
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
@unittest.SkipTest
|
2018-02-12 20:19:15 +01:00
|
|
|
class ContentClaimStorageTests(StorageTest):
|
2019-01-22 23:54:25 +01:00
|
|
|
async def test_store_content_claim(self):
|
2018-02-12 20:19:15 +01:00
|
|
|
download_directory = self.db_dir
|
2019-01-22 23:54:25 +01:00
|
|
|
out = await self.storage.get_all_lbry_files()
|
2018-02-12 20:19:15 +01:00
|
|
|
self.assertEqual(len(out), 0)
|
|
|
|
|
|
|
|
stream_hash = random_lbry_hash()
|
|
|
|
sd_hash = fake_claim_info['value']['stream']['source']['source']
|
|
|
|
|
|
|
|
# test that we can associate a content claim to a file
|
|
|
|
# use the generated sd hash in the fake claim
|
|
|
|
fake_outpoint = "%s:%i" % (fake_claim_info['txid'], fake_claim_info['nout'])
|
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
await self.make_and_store_fake_stream(blob_count=2, stream_hash=stream_hash, sd_hash=sd_hash)
|
2018-02-12 20:19:15 +01:00
|
|
|
blob_data_rate = 0
|
|
|
|
file_name = "test file"
|
2019-01-22 23:54:25 +01:00
|
|
|
await self.storage.save_published_file(
|
2018-02-12 20:19:15 +01:00
|
|
|
stream_hash, file_name, download_directory, blob_data_rate
|
2019-01-22 23:54:25 +01:00
|
|
|
)
|
|
|
|
await self.storage.save_claims([fake_claim_info])
|
|
|
|
await self.storage.save_content_claim(stream_hash, fake_outpoint)
|
|
|
|
stored_content_claim = await self.storage.get_content_claim(stream_hash)
|
2018-02-12 20:19:15 +01:00
|
|
|
self.assertDictEqual(stored_content_claim, fake_claim_info)
|
|
|
|
|
2019-01-22 23:54:25 +01:00
|
|
|
stream_hashes = await self.storage.get_old_stream_hashes_for_claim_id(fake_claim_info['claim_id'],
|
|
|
|
stream_hash)
|
2018-02-28 20:59:12 +01:00
|
|
|
self.assertListEqual(stream_hashes, [])
|
2018-02-27 22:21:37 +01:00
|
|
|
|
2018-02-12 20:19:15 +01:00
|
|
|
# test that we can't associate a claim update with a new stream to the file
|
|
|
|
second_stream_hash, second_sd_hash = random_lbry_hash(), random_lbry_hash()
|
2019-01-22 23:54:25 +01:00
|
|
|
await self.make_and_store_fake_stream(blob_count=2, stream_hash=second_stream_hash, sd_hash=second_sd_hash)
|
2018-07-13 06:21:45 +02:00
|
|
|
with self.assertRaisesRegex(Exception, "stream mismatch"):
|
2019-01-22 23:54:25 +01:00
|
|
|
await self.storage.save_content_claim(second_stream_hash, fake_outpoint)
|
2018-02-12 20:19:15 +01:00
|
|
|
|
|
|
|
# test that we can associate a new claim update containing the same stream to the file
|
|
|
|
update_info = deepcopy(fake_claim_info)
|
|
|
|
update_info['txid'] = "beef0000" * 12
|
|
|
|
update_info['nout'] = 0
|
|
|
|
second_outpoint = "%s:%i" % (update_info['txid'], update_info['nout'])
|
2019-01-22 23:54:25 +01:00
|
|
|
await self.storage.save_claims([update_info])
|
|
|
|
await self.storage.save_content_claim(stream_hash, second_outpoint)
|
|
|
|
update_info_result = await self.storage.get_content_claim(stream_hash)
|
2018-02-12 20:19:15 +01:00
|
|
|
self.assertDictEqual(update_info_result, update_info)
|
|
|
|
|
|
|
|
# test that we can't associate an update with a mismatching claim id
|
|
|
|
invalid_update_info = deepcopy(fake_claim_info)
|
|
|
|
invalid_update_info['txid'] = "beef0001" * 12
|
|
|
|
invalid_update_info['nout'] = 0
|
|
|
|
invalid_update_info['claim_id'] = "beef0002" * 5
|
|
|
|
invalid_update_outpoint = "%s:%i" % (invalid_update_info['txid'], invalid_update_info['nout'])
|
2018-10-25 23:51:17 +02:00
|
|
|
with self.assertRaisesRegex(Exception, "mismatching claim ids when updating stream "
|
|
|
|
"deadbeefdeadbeefdeadbeefdeadbeefdeadbeef "
|
|
|
|
"vs beef0002beef0002beef0002beef0002beef0002"):
|
2019-01-22 23:54:25 +01:00
|
|
|
await self.storage.save_claims([invalid_update_info])
|
|
|
|
await self.storage.save_content_claim(stream_hash, invalid_update_outpoint)
|
|
|
|
current_claim_info = await self.storage.get_content_claim(stream_hash)
|
2018-02-12 20:19:15 +01:00
|
|
|
# this should still be the previous update
|
|
|
|
self.assertDictEqual(current_claim_info, update_info)
|