2019-01-07 23:42:50 +01:00
|
|
|
import asyncio
|
2015-08-20 17:27:15 +02:00
|
|
|
import logging
|
|
|
|
import os
|
2018-07-28 02:31:15 +02:00
|
|
|
from binascii import unhexlify
|
2018-02-12 19:43:36 +01:00
|
|
|
from sqlite3 import IntegrityError
|
2019-01-07 08:52:53 +01:00
|
|
|
from twisted.internet import defer
|
2018-12-15 21:29:25 +01:00
|
|
|
from lbrynet.extras.compat import f2d
|
2017-09-13 21:46:39 +02:00
|
|
|
from lbrynet.blob.blob_file import BlobFile
|
|
|
|
from lbrynet.blob.creator import BlobFileCreator
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2015-09-08 21:42:56 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2017-08-10 19:49:43 +02:00
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class DiskBlobManager:
|
2018-06-29 18:01:46 +02:00
|
|
|
def __init__(self, blob_dir, storage, node_datastore=None):
|
2017-08-02 18:11:41 +02:00
|
|
|
"""
|
2018-03-27 21:12:44 +02:00
|
|
|
This class stores blobs on the hard disk
|
|
|
|
|
2017-08-02 18:11:41 +02:00
|
|
|
blob_dir - directory where blobs are stored
|
2018-03-27 21:12:44 +02:00
|
|
|
storage - SQLiteStorage object
|
2017-08-02 18:11:41 +02:00
|
|
|
"""
|
2018-02-12 19:43:36 +01:00
|
|
|
self.storage = storage
|
2015-08-20 17:27:15 +02:00
|
|
|
self.blob_dir = blob_dir
|
2018-06-29 18:01:46 +02:00
|
|
|
self._node_datastore = node_datastore
|
2015-08-20 17:27:15 +02:00
|
|
|
self.blob_creator_type = BlobFileCreator
|
2016-12-14 23:37:17 +01:00
|
|
|
# TODO: consider using an LRU for blobs as there could potentially
|
|
|
|
# be thousands of blobs loaded up, many stale
|
2015-08-20 17:27:15 +02:00
|
|
|
self.blobs = {}
|
2018-01-31 02:16:25 +01:00
|
|
|
self.blob_hashes_to_delete = {} # {blob_hash: being_deleted (True/False)}
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2018-12-15 21:29:25 +01:00
|
|
|
async def setup(self):
|
2018-06-29 18:01:46 +02:00
|
|
|
if self._node_datastore is not None:
|
2018-12-15 21:29:25 +01:00
|
|
|
raw_blob_hashes = await self.storage.get_all_finished_blobs()
|
2018-06-29 18:01:46 +02:00
|
|
|
self._node_datastore.completed_blobs.update(raw_blob_hashes)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2018-12-15 21:29:25 +01:00
|
|
|
async def stop(self):
|
|
|
|
pass
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-12-20 03:16:37 +01:00
|
|
|
def get_blob(self, blob_hash, length=None):
|
2016-11-30 21:20:45 +01:00
|
|
|
"""Return a blob identified by blob_hash, which may be a new blob or a
|
|
|
|
blob that is already on the hard disk
|
|
|
|
"""
|
2017-09-15 16:48:54 +02:00
|
|
|
if length is not None and not isinstance(length, int):
|
2018-10-18 12:42:45 +02:00
|
|
|
raise Exception("invalid length type: {} ({})".format(length, str(type(length))))
|
2015-08-20 17:27:15 +02:00
|
|
|
if blob_hash in self.blobs:
|
2019-01-07 08:52:53 +01:00
|
|
|
return self.blobs[blob_hash]
|
2016-12-20 03:16:37 +01:00
|
|
|
return self._make_new_blob(blob_hash, length)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def get_blob_creator(self):
|
2017-07-27 20:31:04 +02:00
|
|
|
return self.blob_creator_type(self.blob_dir)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-12-20 03:16:37 +01:00
|
|
|
def _make_new_blob(self, blob_hash, length=None):
|
2016-12-11 00:02:13 +01:00
|
|
|
log.debug('Making a new blob for %s', blob_hash)
|
2017-09-15 15:56:01 +02:00
|
|
|
blob = BlobFile(self.blob_dir, blob_hash, length)
|
2015-08-20 17:27:15 +02:00
|
|
|
self.blobs[blob_hash] = blob
|
2019-01-07 08:52:53 +01:00
|
|
|
return blob
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2017-08-10 19:49:43 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-03-27 23:35:31 +02:00
|
|
|
def blob_completed(self, blob, should_announce=False, next_announce_time=None):
|
2019-01-07 08:52:53 +01:00
|
|
|
yield f2d(self.storage.add_completed_blob(
|
2018-02-12 19:43:36 +01:00
|
|
|
blob.blob_hash, blob.length, next_announce_time, should_announce
|
2019-01-07 08:52:53 +01:00
|
|
|
))
|
2018-06-29 18:01:46 +02:00
|
|
|
if self._node_datastore is not None:
|
2018-07-28 02:31:15 +02:00
|
|
|
self._node_datastore.completed_blobs.add(unhexlify(blob.blob_hash))
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2017-01-20 17:54:36 +01:00
|
|
|
def completed_blobs(self, blobhashes_to_check):
|
|
|
|
return self._completed_blobs(blobhashes_to_check)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2017-10-05 19:59:27 +02:00
|
|
|
def count_should_announce_blobs(self):
|
2019-01-07 08:52:53 +01:00
|
|
|
return f2d(self.storage.count_should_announce_blobs())
|
2017-10-05 19:59:27 +02:00
|
|
|
|
2017-09-21 04:04:23 +02:00
|
|
|
def set_should_announce(self, blob_hash, should_announce):
|
2019-01-07 23:42:50 +01:00
|
|
|
return f2d(self.storage.set_should_announce(
|
|
|
|
blob_hash, asyncio.get_running_loop().time(), should_announce
|
|
|
|
))
|
2017-09-21 04:04:23 +02:00
|
|
|
|
|
|
|
def get_should_announce(self, blob_hash):
|
2019-01-07 08:52:53 +01:00
|
|
|
return f2d(self.storage.should_announce(blob_hash))
|
2017-09-21 04:04:23 +02:00
|
|
|
|
2017-08-02 18:11:41 +02:00
|
|
|
def creator_finished(self, blob_creator, should_announce):
|
2015-09-08 21:42:56 +02:00
|
|
|
log.debug("blob_creator.blob_hash: %s", blob_creator.blob_hash)
|
2017-09-15 15:56:15 +02:00
|
|
|
if blob_creator.blob_hash is None:
|
|
|
|
raise Exception("Blob hash is None")
|
|
|
|
if blob_creator.blob_hash in self.blobs:
|
|
|
|
raise Exception("Creator finished for blob that is already marked as completed")
|
|
|
|
if blob_creator.length is None:
|
|
|
|
raise Exception("Blob has a length of 0")
|
|
|
|
new_blob = BlobFile(self.blob_dir, blob_creator.blob_hash, blob_creator.length)
|
2015-08-20 17:27:15 +02:00
|
|
|
self.blobs[blob_creator.blob_hash] = new_blob
|
2018-03-27 21:12:44 +02:00
|
|
|
return self.blob_completed(new_blob, should_announce)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-08-26 06:32:33 +02:00
|
|
|
def get_all_verified_blobs(self):
|
2019-01-07 08:52:53 +01:00
|
|
|
d = f2d(self._get_all_verified_blob_hashes())
|
2016-08-26 06:32:33 +02:00
|
|
|
d.addCallback(self.completed_blobs)
|
|
|
|
return d
|
|
|
|
|
2017-06-16 19:16:19 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def delete_blobs(self, blob_hashes):
|
|
|
|
bh_to_delete_from_db = []
|
|
|
|
for blob_hash in blob_hashes:
|
2018-08-09 17:26:57 +02:00
|
|
|
if not blob_hash:
|
|
|
|
continue
|
2018-06-29 18:01:46 +02:00
|
|
|
if self._node_datastore is not None:
|
|
|
|
try:
|
2018-08-04 18:10:41 +02:00
|
|
|
self._node_datastore.completed_blobs.remove(unhexlify(blob_hash))
|
2018-06-29 18:01:46 +02:00
|
|
|
except KeyError:
|
|
|
|
pass
|
2017-06-16 19:16:19 +02:00
|
|
|
try:
|
2019-01-07 08:52:53 +01:00
|
|
|
blob = self.get_blob(blob_hash)
|
|
|
|
blob.delete()
|
2017-06-16 19:16:19 +02:00
|
|
|
bh_to_delete_from_db.append(blob_hash)
|
2017-11-06 03:53:51 +01:00
|
|
|
del self.blobs[blob_hash]
|
2017-06-16 19:16:19 +02:00
|
|
|
except Exception as e:
|
|
|
|
log.warning("Failed to delete blob file. Reason: %s", e)
|
2018-02-12 19:43:36 +01:00
|
|
|
try:
|
2018-12-15 21:29:25 +01:00
|
|
|
yield f2d(self.storage.delete_blobs_from_db(bh_to_delete_from_db))
|
2018-02-12 19:43:36 +01:00
|
|
|
except IntegrityError as err:
|
2018-08-04 18:10:41 +02:00
|
|
|
if str(err) != "FOREIGN KEY constraint failed":
|
2018-02-12 19:43:36 +01:00
|
|
|
raise err
|
2017-10-05 19:59:27 +02:00
|
|
|
|
2017-01-20 17:54:36 +01:00
|
|
|
def _completed_blobs(self, blobhashes_to_check):
|
|
|
|
"""Returns of the blobhashes_to_check, which are valid"""
|
2019-01-07 08:52:53 +01:00
|
|
|
blobs = [self.get_blob(b) for b in blobhashes_to_check]
|
|
|
|
blob_hashes = [b.blob_hash for b in blobs if b.verified]
|
|
|
|
return blob_hashes
|
|
|
|
|
|
|
|
async def _get_all_verified_blob_hashes(self):
|
|
|
|
blobs = await self.storage.get_all_blob_hashes()
|
|
|
|
verified_blobs = []
|
|
|
|
for blob_hash in blobs:
|
|
|
|
file_path = os.path.join(self.blob_dir, blob_hash)
|
|
|
|
if os.path.isfile(file_path):
|
|
|
|
verified_blobs.append(blob_hash)
|
|
|
|
return verified_blobs
|