2017-06-16 19:13:41 +02:00
|
|
|
import tempfile
|
|
|
|
import shutil
|
|
|
|
import os
|
|
|
|
import random
|
|
|
|
import string
|
|
|
|
|
2017-09-28 19:21:26 +02:00
|
|
|
from lbrynet.tests.util import random_lbry_hash
|
2017-06-16 19:13:41 +02:00
|
|
|
from lbrynet.core.BlobManager import DiskBlobManager
|
|
|
|
from lbrynet.core.HashAnnouncer import DummyHashAnnouncer
|
2018-02-12 20:19:15 +01:00
|
|
|
from lbrynet.database.storage import SQLiteStorage
|
2017-06-16 19:13:41 +02:00
|
|
|
from lbrynet.core.Peer import Peer
|
2017-08-02 18:11:41 +02:00
|
|
|
from lbrynet import conf
|
2017-06-16 19:13:41 +02:00
|
|
|
from lbrynet.core.cryptoutils import get_lbry_hash_obj
|
|
|
|
from twisted.trial import unittest
|
|
|
|
|
|
|
|
from twisted.internet import defer
|
|
|
|
|
2018-02-12 20:19:15 +01:00
|
|
|
|
2017-06-16 19:13:41 +02:00
|
|
|
class BlobManagerTest(unittest.TestCase):
|
|
|
|
def setUp(self):
|
2017-08-02 18:11:41 +02:00
|
|
|
conf.initialize_settings()
|
2017-06-16 19:13:41 +02:00
|
|
|
self.blob_dir = tempfile.mkdtemp()
|
|
|
|
self.db_dir = tempfile.mkdtemp()
|
2017-09-29 12:44:22 +02:00
|
|
|
hash_announcer = DummyHashAnnouncer()
|
2018-02-12 20:19:15 +01:00
|
|
|
self.bm = DiskBlobManager(hash_announcer, self.blob_dir, SQLiteStorage(self.db_dir))
|
2017-09-29 12:44:22 +02:00
|
|
|
self.peer = Peer('somehost', 22)
|
2017-06-16 19:13:41 +02:00
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
self.bm.stop()
|
2017-07-04 23:22:39 +02:00
|
|
|
# BlobFile will try to delete itself in _close_writer
|
|
|
|
# thus when calling rmtree we may get a FileNotFoundError
|
|
|
|
# for the blob file
|
|
|
|
shutil.rmtree(self.blob_dir, ignore_errors=True)
|
2017-06-16 19:13:41 +02:00
|
|
|
shutil.rmtree(self.db_dir)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2017-10-05 19:59:50 +02:00
|
|
|
def _create_and_add_blob(self, should_announce=False):
|
2017-06-16 19:13:41 +02:00
|
|
|
# create and add blob to blob manager
|
2017-09-29 12:44:22 +02:00
|
|
|
data_len = random.randint(1, 1000)
|
2017-06-16 19:13:41 +02:00
|
|
|
data = ''.join(random.choice(string.lowercase) for data_len in range(data_len))
|
|
|
|
|
|
|
|
hashobj = get_lbry_hash_obj()
|
|
|
|
hashobj.update(data)
|
2017-09-29 12:44:22 +02:00
|
|
|
out = hashobj.hexdigest()
|
|
|
|
blob_hash = out
|
2017-06-16 19:13:41 +02:00
|
|
|
|
|
|
|
# create new blob
|
2018-02-12 20:19:15 +01:00
|
|
|
yield self.bm.storage.setup()
|
2017-06-16 19:13:41 +02:00
|
|
|
yield self.bm.setup()
|
2017-09-29 12:44:22 +02:00
|
|
|
blob = yield self.bm.get_blob(blob_hash, len(data))
|
2017-06-16 19:13:41 +02:00
|
|
|
|
2017-09-13 05:52:42 +02:00
|
|
|
writer, finished_d = yield blob.open_for_writing(self.peer)
|
|
|
|
yield writer.write(data)
|
2017-10-05 19:59:50 +02:00
|
|
|
yield self.bm.blob_completed(blob, should_announce)
|
2017-06-16 19:13:41 +02:00
|
|
|
|
|
|
|
# check to see if blob is there
|
2017-09-29 12:44:22 +02:00
|
|
|
self.assertTrue(os.path.isfile(os.path.join(self.blob_dir, blob_hash)))
|
2017-06-16 19:13:41 +02:00
|
|
|
blobs = yield self.bm.get_all_verified_blobs()
|
|
|
|
self.assertTrue(blob_hash in blobs)
|
|
|
|
defer.returnValue(blob_hash)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def test_create_blob(self):
|
|
|
|
blob_hashes = []
|
|
|
|
|
|
|
|
# create a bunch of blobs
|
2017-09-29 12:44:22 +02:00
|
|
|
for i in range(0, 10):
|
2017-06-16 19:13:41 +02:00
|
|
|
blob_hash = yield self._create_and_add_blob()
|
|
|
|
blob_hashes.append(blob_hash)
|
|
|
|
blobs = yield self.bm.get_all_verified_blobs()
|
2017-09-29 12:44:22 +02:00
|
|
|
self.assertEqual(10, len(blobs))
|
2017-06-16 19:13:41 +02:00
|
|
|
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def test_delete_blob(self):
|
|
|
|
# create blob
|
2017-09-29 12:44:22 +02:00
|
|
|
blob_hash = yield self._create_and_add_blob()
|
2017-06-16 19:13:41 +02:00
|
|
|
blobs = yield self.bm.get_all_verified_blobs()
|
2017-09-29 12:44:22 +02:00
|
|
|
self.assertEqual(len(blobs), 1)
|
2017-06-16 19:13:41 +02:00
|
|
|
|
2017-09-29 12:44:22 +02:00
|
|
|
# delete blob
|
2017-06-16 19:13:41 +02:00
|
|
|
yield self.bm.delete_blobs([blob_hash])
|
2017-09-29 12:44:22 +02:00
|
|
|
self.assertFalse(os.path.isfile(os.path.join(self.blob_dir, blob_hash)))
|
2017-06-16 19:13:41 +02:00
|
|
|
blobs = yield self.bm.get_all_verified_blobs()
|
2017-09-29 12:44:22 +02:00
|
|
|
self.assertEqual(len(blobs), 0)
|
2018-02-12 20:19:15 +01:00
|
|
|
blobs = yield self.bm.storage.get_all_blob_hashes()
|
2017-09-29 12:44:22 +02:00
|
|
|
self.assertEqual(len(blobs), 0)
|
2017-11-06 03:53:51 +01:00
|
|
|
self.assertFalse(blob_hash in self.bm.blobs)
|
|
|
|
|
|
|
|
# delete blob that was already deleted once
|
|
|
|
out = yield self.bm.delete_blobs([blob_hash])
|
2017-06-16 19:13:41 +02:00
|
|
|
|
|
|
|
# delete blob that does not exist, nothing will
|
|
|
|
# happen
|
2017-09-29 12:44:22 +02:00
|
|
|
blob_hash = random_lbry_hash()
|
2017-06-16 19:13:41 +02:00
|
|
|
out = yield self.bm.delete_blobs([blob_hash])
|
|
|
|
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def test_delete_open_blob(self):
|
|
|
|
# Test that a blob that is opened for writing will not be deleted
|
|
|
|
|
|
|
|
# create blobs
|
2017-09-29 12:44:22 +02:00
|
|
|
blob_hashes = []
|
|
|
|
for i in range(0, 10):
|
|
|
|
blob_hash = yield self._create_and_add_blob()
|
2017-06-16 19:13:41 +02:00
|
|
|
blob_hashes.append(blob_hash)
|
|
|
|
blobs = yield self.bm.get_all_verified_blobs()
|
2017-09-29 12:44:22 +02:00
|
|
|
self.assertEqual(len(blobs), 10)
|
2017-06-16 19:13:41 +02:00
|
|
|
|
|
|
|
# open the last blob
|
|
|
|
blob = yield self.bm.get_blob(blob_hashes[-1])
|
2017-09-13 05:52:42 +02:00
|
|
|
writer, finished_d = yield blob.open_for_writing(self.peer)
|
2017-06-30 21:49:42 +02:00
|
|
|
|
2017-06-16 19:13:41 +02:00
|
|
|
# delete the last blob and check if it still exists
|
|
|
|
out = yield self.bm.delete_blobs([blob_hash])
|
|
|
|
blobs = yield self.bm.get_all_verified_blobs()
|
2017-09-29 12:44:22 +02:00
|
|
|
self.assertEqual(len(blobs), 10)
|
2017-06-16 19:13:41 +02:00
|
|
|
self.assertTrue(blob_hashes[-1] in blobs)
|
2017-09-29 12:44:22 +02:00
|
|
|
self.assertTrue(os.path.isfile(os.path.join(self.blob_dir, blob_hashes[-1])))
|
2017-10-05 19:59:50 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def test_should_announce(self):
|
|
|
|
# create blob with should announce
|
|
|
|
blob_hash = yield self._create_and_add_blob(should_announce=True)
|
|
|
|
out = yield self.bm.get_should_announce(blob_hash)
|
|
|
|
self.assertTrue(out)
|
|
|
|
count = yield self.bm.count_should_announce_blobs()
|
|
|
|
self.assertEqual(1, count)
|
|
|
|
|
|
|
|
# set should annouce to False
|
|
|
|
out = yield self.bm.set_should_announce(blob_hash, should_announce=False)
|
|
|
|
out = yield self.bm.get_should_announce(blob_hash)
|
|
|
|
self.assertFalse(out)
|
|
|
|
count = yield self.bm.count_should_announce_blobs()
|
|
|
|
self.assertEqual(0, count)
|
|
|
|
|