2019-01-22 18:54:17 +01:00
|
|
|
import os
|
|
|
|
import asyncio
|
|
|
|
import binascii
|
|
|
|
import logging
|
2019-01-25 21:05:22 +01:00
|
|
|
import random
|
2019-10-29 06:26:25 +01:00
|
|
|
import typing
|
|
|
|
from typing import Optional
|
2019-05-01 23:09:50 +02:00
|
|
|
from aiohttp.web import Request
|
2019-11-25 15:47:42 +01:00
|
|
|
from lbry.error import ResolveError, InvalidStreamDescriptorError, DownloadSDTimeoutError, InsufficientFundsError
|
|
|
|
from lbry.error import ResolveTimeoutError, DownloadDataTimeoutError, KeyFeeAboveMaxAllowedError
|
2019-06-21 02:55:47 +02:00
|
|
|
from lbry.utils import cache_concurrent
|
|
|
|
from lbry.stream.descriptor import StreamDescriptor
|
|
|
|
from lbry.stream.managed_stream import ManagedStream
|
|
|
|
from lbry.schema.claim import Claim
|
|
|
|
from lbry.schema.url import URL
|
2020-05-01 15:34:57 +02:00
|
|
|
from lbry.blockchain.dewies import dewies_to_lbc
|
|
|
|
from lbry.blockchain.transaction import Output
|
2020-01-03 04:18:49 +01:00
|
|
|
|
2019-01-22 18:54:17 +01:00
|
|
|
if typing.TYPE_CHECKING:
|
2019-06-21 02:55:47 +02:00
|
|
|
from lbry.conf import Config
|
|
|
|
from lbry.blob.blob_manager import BlobManager
|
|
|
|
from lbry.dht.node import Node
|
2020-06-05 06:35:22 +02:00
|
|
|
from lbry.service.exchange_rate_manager import ExchangeRateManager
|
2020-05-01 15:34:57 +02:00
|
|
|
from lbry.service.base import Service
|
2019-01-22 18:54:17 +01:00
|
|
|
|
2020-06-06 19:20:26 +02:00
|
|
|
|
|
|
|
class AnalyticsManager:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class SQLiteStorage:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class StoredContentClaim:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2019-01-22 18:54:17 +01:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2020-01-03 06:35:38 +01:00
|
|
|
FILTER_FIELDS = [
|
2019-02-15 22:44:31 +01:00
|
|
|
'rowid',
|
2019-01-22 18:54:17 +01:00
|
|
|
'status',
|
|
|
|
'file_name',
|
2019-10-26 17:24:37 +02:00
|
|
|
'added_on',
|
2019-01-22 18:54:17 +01:00
|
|
|
'sd_hash',
|
|
|
|
'stream_hash',
|
|
|
|
'claim_name',
|
|
|
|
'claim_height',
|
|
|
|
'claim_id',
|
|
|
|
'outpoint',
|
|
|
|
'txid',
|
|
|
|
'nout',
|
|
|
|
'channel_claim_id',
|
|
|
|
'channel_name',
|
2019-02-01 22:17:10 +01:00
|
|
|
'full_status', # TODO: remove
|
|
|
|
'blobs_remaining',
|
|
|
|
'blobs_in_stream'
|
2019-01-22 18:54:17 +01:00
|
|
|
]
|
|
|
|
|
2020-01-03 06:35:38 +01:00
|
|
|
COMPARISON_OPERATORS = {
|
2019-01-22 18:54:17 +01:00
|
|
|
'eq': lambda a, b: a == b,
|
|
|
|
'ne': lambda a, b: a != b,
|
|
|
|
'g': lambda a, b: a > b,
|
|
|
|
'l': lambda a, b: a < b,
|
|
|
|
'ge': lambda a, b: a >= b,
|
|
|
|
'le': lambda a, b: a <= b,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2020-01-03 06:35:38 +01:00
|
|
|
def path_or_none(path) -> Optional[str]:
|
|
|
|
if not path:
|
2019-05-07 20:30:35 +02:00
|
|
|
return
|
2020-01-03 06:35:38 +01:00
|
|
|
return binascii.unhexlify(path).decode()
|
2019-03-31 03:07:43 +02:00
|
|
|
|
2019-05-01 23:09:50 +02:00
|
|
|
|
2019-01-22 18:54:17 +01:00
|
|
|
class StreamManager:
|
2019-08-02 19:14:41 +02:00
|
|
|
def __init__(self, loop: asyncio.AbstractEventLoop, config: 'Config', blob_manager: 'BlobManager',
|
2020-05-01 15:34:57 +02:00
|
|
|
service: 'Service', storage: 'SQLiteStorage', node: Optional['Node'],
|
2019-10-29 06:26:25 +01:00
|
|
|
analytics_manager: Optional['AnalyticsManager'] = None):
|
2019-01-22 18:54:17 +01:00
|
|
|
self.loop = loop
|
2019-01-30 20:57:09 +01:00
|
|
|
self.config = config
|
2019-01-22 18:54:17 +01:00
|
|
|
self.blob_manager = blob_manager
|
2020-05-01 15:34:57 +02:00
|
|
|
self.service = service
|
2019-01-22 18:54:17 +01:00
|
|
|
self.storage = storage
|
|
|
|
self.node = node
|
2019-03-01 20:48:49 +01:00
|
|
|
self.analytics_manager = analytics_manager
|
2019-03-31 03:07:43 +02:00
|
|
|
self.streams: typing.Dict[str, ManagedStream] = {}
|
2019-10-29 06:26:25 +01:00
|
|
|
self.resume_saving_task: Optional[asyncio.Task] = None
|
|
|
|
self.re_reflect_task: Optional[asyncio.Task] = None
|
2019-01-22 18:54:17 +01:00
|
|
|
self.update_stream_finished_futs: typing.List[asyncio.Future] = []
|
2020-02-04 01:38:36 +01:00
|
|
|
self.running_reflector_uploads: typing.Dict[str, asyncio.Task] = {}
|
2019-05-02 22:56:49 +02:00
|
|
|
self.started = asyncio.Event(loop=self.loop)
|
2019-01-22 18:54:17 +01:00
|
|
|
|
2019-01-31 18:32:52 +01:00
|
|
|
async def _update_content_claim(self, stream: ManagedStream):
|
|
|
|
claim_info = await self.storage.get_content_claim(stream.stream_hash)
|
2019-04-29 00:23:01 +02:00
|
|
|
self.streams.setdefault(stream.sd_hash, stream).set_claim(claim_info, claim_info['value'])
|
2019-01-31 18:32:52 +01:00
|
|
|
|
2019-02-15 00:19:01 +01:00
|
|
|
async def recover_streams(self, file_infos: typing.List[typing.Dict]):
|
|
|
|
to_restore = []
|
|
|
|
|
|
|
|
async def recover_stream(sd_hash: str, stream_hash: str, stream_name: str,
|
2019-05-07 20:30:35 +02:00
|
|
|
suggested_file_name: str, key: str,
|
2019-10-29 06:26:25 +01:00
|
|
|
content_fee: Optional['Transaction']) -> Optional[StreamDescriptor]:
|
2019-02-15 00:19:01 +01:00
|
|
|
sd_blob = self.blob_manager.get_blob(sd_hash)
|
|
|
|
blobs = await self.storage.get_blobs_for_stream(stream_hash)
|
|
|
|
descriptor = await StreamDescriptor.recover(
|
|
|
|
self.blob_manager.blob_dir, sd_blob, stream_hash, stream_name, suggested_file_name, key, blobs
|
|
|
|
)
|
|
|
|
if not descriptor:
|
2019-02-06 15:29:19 +01:00
|
|
|
return
|
2019-05-07 20:30:35 +02:00
|
|
|
to_restore.append((descriptor, sd_blob, content_fee))
|
2019-02-15 00:19:01 +01:00
|
|
|
|
|
|
|
await asyncio.gather(*[
|
|
|
|
recover_stream(
|
|
|
|
file_info['sd_hash'], file_info['stream_hash'], binascii.unhexlify(file_info['stream_name']).decode(),
|
2019-05-07 20:30:35 +02:00
|
|
|
binascii.unhexlify(file_info['suggested_file_name']).decode(), file_info['key'],
|
|
|
|
file_info['content_fee']
|
2019-02-15 00:19:01 +01:00
|
|
|
) for file_info in file_infos
|
|
|
|
])
|
2019-02-06 15:29:19 +01:00
|
|
|
|
2019-02-15 00:19:01 +01:00
|
|
|
if to_restore:
|
|
|
|
await self.storage.recover_streams(to_restore, self.config.download_dir)
|
|
|
|
|
2019-03-31 03:07:43 +02:00
|
|
|
# if self.blob_manager._save_blobs:
|
|
|
|
# log.info("Recovered %i/%i attempted streams", len(to_restore), len(file_infos))
|
|
|
|
|
2019-10-29 06:26:25 +01:00
|
|
|
async def add_stream(self, rowid: int, sd_hash: str, file_name: Optional[str],
|
|
|
|
download_directory: Optional[str], status: str,
|
2019-11-15 20:55:49 +01:00
|
|
|
claim: Optional['StoredContentClaim'], content_fee: Optional['Transaction'],
|
2020-02-07 16:34:47 +01:00
|
|
|
added_on: Optional[int], fully_reflected: bool):
|
2019-02-15 00:19:01 +01:00
|
|
|
try:
|
2019-03-31 03:07:43 +02:00
|
|
|
descriptor = await self.blob_manager.get_stream_descriptor(sd_hash)
|
2019-02-15 00:19:01 +01:00
|
|
|
except InvalidStreamDescriptorError as err:
|
|
|
|
log.warning("Failed to start stream for sd %s - %s", sd_hash, str(err))
|
|
|
|
return
|
|
|
|
stream = ManagedStream(
|
2019-03-31 03:07:43 +02:00
|
|
|
self.loop, self.config, self.blob_manager, descriptor.sd_hash, download_directory, file_name, status,
|
2019-05-07 20:30:35 +02:00
|
|
|
claim, content_fee=content_fee, rowid=rowid, descriptor=descriptor,
|
2019-10-26 17:24:37 +02:00
|
|
|
analytics_manager=self.analytics_manager, added_on=added_on
|
2019-02-15 00:19:01 +01:00
|
|
|
)
|
2020-02-07 16:34:47 +01:00
|
|
|
if fully_reflected:
|
|
|
|
stream.fully_reflected.set()
|
2019-03-31 03:07:43 +02:00
|
|
|
self.streams[sd_hash] = stream
|
2019-05-07 20:30:35 +02:00
|
|
|
self.storage.content_claim_callbacks[stream.stream_hash] = lambda: self._update_content_claim(stream)
|
2019-02-01 17:37:51 +01:00
|
|
|
|
2019-05-07 20:30:35 +02:00
|
|
|
async def load_and_resume_streams_from_database(self):
|
2019-02-15 00:19:01 +01:00
|
|
|
to_recover = []
|
2019-03-31 03:07:43 +02:00
|
|
|
to_start = []
|
2019-05-03 20:54:09 +02:00
|
|
|
|
2019-05-07 20:30:35 +02:00
|
|
|
await self.storage.update_manually_removed_files_since_last_run()
|
|
|
|
|
2019-02-15 00:19:01 +01:00
|
|
|
for file_info in await self.storage.get_all_lbry_files():
|
2019-05-03 20:54:09 +02:00
|
|
|
# if the sd blob is not verified, try to reconstruct it from the database
|
|
|
|
# this could either be because the blob files were deleted manually or save_blobs was not true when
|
|
|
|
# the stream was downloaded
|
2019-04-18 21:19:06 +02:00
|
|
|
if not self.blob_manager.is_blob_verified(file_info['sd_hash']):
|
2019-02-15 00:19:01 +01:00
|
|
|
to_recover.append(file_info)
|
2019-03-31 03:07:43 +02:00
|
|
|
to_start.append(file_info)
|
2019-02-15 00:19:01 +01:00
|
|
|
if to_recover:
|
2020-01-13 21:37:11 +01:00
|
|
|
log.info("Recover %i files", len(to_recover))
|
2019-02-15 00:19:01 +01:00
|
|
|
await self.recover_streams(to_recover)
|
|
|
|
|
|
|
|
log.info("Initializing %i files", len(to_start))
|
2019-05-07 20:30:35 +02:00
|
|
|
to_resume_saving = []
|
|
|
|
add_stream_tasks = []
|
|
|
|
for file_info in to_start:
|
|
|
|
file_name = path_or_none(file_info['file_name'])
|
|
|
|
download_directory = path_or_none(file_info['download_directory'])
|
|
|
|
if file_name and download_directory and not file_info['saved_file'] and file_info['status'] == 'running':
|
|
|
|
to_resume_saving.append((file_name, download_directory, file_info['sd_hash']))
|
|
|
|
add_stream_tasks.append(self.loop.create_task(self.add_stream(
|
|
|
|
file_info['rowid'], file_info['sd_hash'], file_name,
|
|
|
|
download_directory, file_info['status'],
|
2019-10-11 01:46:00 +02:00
|
|
|
file_info['claim'], file_info['content_fee'],
|
2020-02-07 16:34:47 +01:00
|
|
|
file_info['added_on'], file_info['fully_reflected']
|
2019-05-07 20:30:35 +02:00
|
|
|
)))
|
|
|
|
if add_stream_tasks:
|
|
|
|
await asyncio.gather(*add_stream_tasks, loop=self.loop)
|
2019-02-15 00:19:01 +01:00
|
|
|
log.info("Started stream manager with %i files", len(self.streams))
|
2019-03-31 03:07:43 +02:00
|
|
|
if not self.node:
|
2019-10-26 21:02:43 +02:00
|
|
|
log.info("no DHT node given, resuming downloads trusting that we can contact reflector")
|
2019-05-07 20:30:35 +02:00
|
|
|
if to_resume_saving:
|
|
|
|
self.resume_saving_task = self.loop.create_task(self.resume(to_resume_saving))
|
|
|
|
|
|
|
|
async def resume(self, to_resume_saving):
|
|
|
|
log.info("Resuming saving %i files", len(to_resume_saving))
|
|
|
|
await asyncio.gather(
|
|
|
|
*(self.streams[sd_hash].save_file(file_name, download_directory, node=self.node)
|
|
|
|
for (file_name, download_directory, sd_hash) in to_resume_saving),
|
|
|
|
loop=self.loop
|
|
|
|
)
|
2019-01-22 18:54:17 +01:00
|
|
|
|
2019-01-25 21:05:22 +01:00
|
|
|
async def reflect_streams(self):
|
2019-02-02 04:59:41 +01:00
|
|
|
while True:
|
2019-02-09 02:13:26 +01:00
|
|
|
if self.config.reflect_streams and self.config.reflector_servers:
|
2019-02-02 04:59:41 +01:00
|
|
|
sd_hashes = await self.storage.get_streams_to_re_reflect()
|
2019-03-31 19:42:27 +02:00
|
|
|
sd_hashes = [sd for sd in sd_hashes if sd in self.streams]
|
2019-01-25 21:05:22 +01:00
|
|
|
batch = []
|
2019-03-31 19:42:27 +02:00
|
|
|
while sd_hashes:
|
|
|
|
stream = self.streams[sd_hashes.pop()]
|
2020-02-04 01:38:36 +01:00
|
|
|
if self.blob_manager.is_blob_verified(stream.sd_hash) and stream.blobs_completed and \
|
|
|
|
stream.sd_hash not in self.running_reflector_uploads and not \
|
|
|
|
stream.fully_reflected.is_set():
|
|
|
|
batch.append(self.reflect_stream(stream))
|
2019-02-02 04:59:41 +01:00
|
|
|
if len(batch) >= self.config.concurrent_reflector_uploads:
|
2019-04-18 20:39:02 +02:00
|
|
|
await asyncio.gather(*batch, loop=self.loop)
|
2019-02-02 04:59:41 +01:00
|
|
|
batch = []
|
|
|
|
if batch:
|
2019-04-18 20:39:02 +02:00
|
|
|
await asyncio.gather(*batch, loop=self.loop)
|
2019-02-02 04:59:41 +01:00
|
|
|
await asyncio.sleep(300, loop=self.loop)
|
2019-01-25 21:05:22 +01:00
|
|
|
|
2019-01-22 18:54:17 +01:00
|
|
|
async def start(self):
|
2019-05-07 20:30:35 +02:00
|
|
|
await self.load_and_resume_streams_from_database()
|
2019-03-31 19:42:27 +02:00
|
|
|
self.re_reflect_task = self.loop.create_task(self.reflect_streams())
|
2019-05-02 22:56:49 +02:00
|
|
|
self.started.set()
|
2019-01-22 18:54:17 +01:00
|
|
|
|
2019-02-01 20:04:53 +01:00
|
|
|
def stop(self):
|
2019-05-07 20:30:35 +02:00
|
|
|
if self.resume_saving_task and not self.resume_saving_task.done():
|
|
|
|
self.resume_saving_task.cancel()
|
2019-02-02 04:59:41 +01:00
|
|
|
if self.re_reflect_task and not self.re_reflect_task.done():
|
|
|
|
self.re_reflect_task.cancel()
|
2019-01-22 18:54:17 +01:00
|
|
|
while self.streams:
|
2019-03-31 03:07:43 +02:00
|
|
|
_, stream = self.streams.popitem()
|
2019-05-01 23:09:50 +02:00
|
|
|
stream.stop_tasks()
|
2019-01-22 18:54:17 +01:00
|
|
|
while self.update_stream_finished_futs:
|
|
|
|
self.update_stream_finished_futs.pop().cancel()
|
2019-02-14 21:42:12 +01:00
|
|
|
while self.running_reflector_uploads:
|
2020-02-04 01:38:36 +01:00
|
|
|
_, t = self.running_reflector_uploads.popitem()
|
|
|
|
t.cancel()
|
2019-05-02 22:56:49 +02:00
|
|
|
self.started.clear()
|
2019-05-03 20:54:09 +02:00
|
|
|
log.info("finished stopping the stream manager")
|
2019-01-22 18:54:17 +01:00
|
|
|
|
2020-02-04 01:38:36 +01:00
|
|
|
def reflect_stream(self, stream: ManagedStream, server: Optional[str] = None,
|
|
|
|
port: Optional[int] = None) -> asyncio.Task:
|
|
|
|
if not server or not port:
|
|
|
|
server, port = random.choice(self.config.reflector_servers)
|
|
|
|
if stream.sd_hash in self.running_reflector_uploads:
|
|
|
|
return self.running_reflector_uploads[stream.sd_hash]
|
|
|
|
task = self.loop.create_task(stream.upload_to_reflector(server, port))
|
|
|
|
self.running_reflector_uploads[stream.sd_hash] = task
|
|
|
|
task.add_done_callback(
|
|
|
|
lambda _: None if stream.sd_hash not in self.running_reflector_uploads else
|
|
|
|
self.running_reflector_uploads.pop(stream.sd_hash)
|
|
|
|
)
|
|
|
|
return task
|
|
|
|
|
2019-10-29 06:26:25 +01:00
|
|
|
async def create_stream(self, file_path: str, key: Optional[bytes] = None,
|
|
|
|
iv_generator: Optional[typing.Generator[bytes, None, None]] = None) -> ManagedStream:
|
2019-03-31 03:07:43 +02:00
|
|
|
stream = await ManagedStream.create(self.loop, self.config, self.blob_manager, file_path, key, iv_generator)
|
|
|
|
self.streams[stream.sd_hash] = stream
|
2019-01-31 18:32:52 +01:00
|
|
|
self.storage.content_claim_callbacks[stream.stream_hash] = lambda: self._update_content_claim(stream)
|
2019-02-09 02:13:26 +01:00
|
|
|
if self.config.reflect_streams and self.config.reflector_servers:
|
2020-02-04 01:38:36 +01:00
|
|
|
self.reflect_stream(stream)
|
2019-01-22 18:54:17 +01:00
|
|
|
return stream
|
|
|
|
|
2019-10-29 06:26:25 +01:00
|
|
|
async def delete_stream(self, stream: ManagedStream, delete_file: Optional[bool] = False):
|
2020-02-04 01:38:36 +01:00
|
|
|
if stream.sd_hash in self.running_reflector_uploads:
|
|
|
|
self.running_reflector_uploads[stream.sd_hash].cancel()
|
2019-05-01 23:09:50 +02:00
|
|
|
stream.stop_tasks()
|
2019-03-31 03:07:43 +02:00
|
|
|
if stream.sd_hash in self.streams:
|
|
|
|
del self.streams[stream.sd_hash]
|
2019-02-06 15:29:19 +01:00
|
|
|
blob_hashes = [stream.sd_hash] + [b.blob_hash for b in stream.descriptor.blobs[:-1]]
|
|
|
|
await self.blob_manager.delete_blobs(blob_hashes, delete_from_db=False)
|
2019-01-22 18:54:17 +01:00
|
|
|
await self.storage.delete_stream(stream.descriptor)
|
2019-02-20 01:44:31 +01:00
|
|
|
if delete_file and stream.output_file_exists:
|
2019-02-06 15:29:19 +01:00
|
|
|
os.remove(stream.full_path)
|
2019-01-22 18:54:17 +01:00
|
|
|
|
2019-10-29 06:26:25 +01:00
|
|
|
def get_stream_by_stream_hash(self, stream_hash: str) -> Optional[ManagedStream]:
|
2019-03-31 03:07:43 +02:00
|
|
|
streams = tuple(filter(lambda stream: stream.stream_hash == stream_hash, self.streams.values()))
|
2019-02-01 17:37:13 +01:00
|
|
|
if streams:
|
|
|
|
return streams[0]
|
|
|
|
|
2019-10-29 06:26:25 +01:00
|
|
|
def get_filtered_streams(self, sort_by: Optional[str] = None, reverse: Optional[bool] = False,
|
|
|
|
comparison: Optional[str] = None,
|
2019-01-22 18:54:17 +01:00
|
|
|
**search_by) -> typing.List[ManagedStream]:
|
|
|
|
"""
|
|
|
|
Get a list of filtered and sorted ManagedStream objects
|
|
|
|
|
|
|
|
:param sort_by: field to sort by
|
|
|
|
:param reverse: reverse sorting
|
|
|
|
:param comparison: comparison operator used for filtering
|
|
|
|
:param search_by: fields and values to filter by
|
|
|
|
"""
|
2020-01-03 06:35:38 +01:00
|
|
|
if sort_by and sort_by not in FILTER_FIELDS:
|
2019-01-22 18:54:17 +01:00
|
|
|
raise ValueError(f"'{sort_by}' is not a valid field to sort by")
|
2020-01-03 06:35:38 +01:00
|
|
|
if comparison and comparison not in COMPARISON_OPERATORS:
|
2019-01-22 18:54:17 +01:00
|
|
|
raise ValueError(f"'{comparison}' is not a valid comparison")
|
2019-02-15 00:19:01 +01:00
|
|
|
if 'full_status' in search_by:
|
|
|
|
del search_by['full_status']
|
2020-01-03 06:35:38 +01:00
|
|
|
for search in search_by:
|
|
|
|
if search not in FILTER_FIELDS:
|
2019-01-22 18:54:17 +01:00
|
|
|
raise ValueError(f"'{search}' is not a valid search operation")
|
|
|
|
if search_by:
|
|
|
|
comparison = comparison or 'eq'
|
|
|
|
streams = []
|
2019-03-31 03:07:43 +02:00
|
|
|
for stream in self.streams.values():
|
2019-01-22 18:54:17 +01:00
|
|
|
for search, val in search_by.items():
|
2020-01-03 06:35:38 +01:00
|
|
|
if COMPARISON_OPERATORS[comparison](getattr(stream, search), val):
|
2019-01-22 18:54:17 +01:00
|
|
|
streams.append(stream)
|
|
|
|
break
|
|
|
|
else:
|
2019-03-31 03:07:43 +02:00
|
|
|
streams = list(self.streams.values())
|
2019-01-22 18:54:17 +01:00
|
|
|
if sort_by:
|
|
|
|
streams.sort(key=lambda s: getattr(s, sort_by))
|
|
|
|
if reverse:
|
|
|
|
streams.reverse()
|
|
|
|
return streams
|
2019-02-02 02:46:09 +01:00
|
|
|
|
2020-01-03 06:35:38 +01:00
|
|
|
async def _check_update_or_replace(self, outpoint: str, claim_id: str, claim: Claim
|
|
|
|
) -> typing.Tuple[Optional[ManagedStream], Optional[ManagedStream]]:
|
2019-03-11 02:55:33 +01:00
|
|
|
existing = self.get_filtered_streams(outpoint=outpoint)
|
|
|
|
if existing:
|
|
|
|
return existing[0], None
|
2019-04-20 07:12:43 +02:00
|
|
|
existing = self.get_filtered_streams(sd_hash=claim.stream.source.sd_hash)
|
2019-03-11 02:55:33 +01:00
|
|
|
if existing and existing[0].claim_id != claim_id:
|
2019-05-01 23:09:50 +02:00
|
|
|
raise ResolveError(f"stream for {existing[0].claim_id} collides with existing download {claim_id}")
|
2019-03-11 02:55:33 +01:00
|
|
|
if existing:
|
|
|
|
log.info("claim contains a metadata only update to a stream we have")
|
|
|
|
await self.storage.save_content_claim(
|
|
|
|
existing[0].stream_hash, outpoint
|
|
|
|
)
|
|
|
|
await self._update_content_claim(existing[0])
|
|
|
|
return existing[0], None
|
|
|
|
else:
|
|
|
|
existing_for_claim_id = self.get_filtered_streams(claim_id=claim_id)
|
|
|
|
if existing_for_claim_id:
|
|
|
|
log.info("claim contains an update to a stream we have, downloading it")
|
|
|
|
return None, existing_for_claim_id[0]
|
|
|
|
return None, None
|
|
|
|
|
2019-10-29 06:26:25 +01:00
|
|
|
@staticmethod
|
2020-05-01 15:34:57 +02:00
|
|
|
def _convert_to_old_resolve_output(service: 'Service', resolves):
|
2019-05-05 21:35:41 +02:00
|
|
|
result = {}
|
|
|
|
for url, txo in resolves.items():
|
|
|
|
if isinstance(txo, Output):
|
|
|
|
tx_height = txo.tx_ref.height
|
2020-05-01 15:34:57 +02:00
|
|
|
# TODO: fix
|
|
|
|
#best_height = wallet_manager.ledger.headers.height
|
2019-05-05 21:35:41 +02:00
|
|
|
result[url] = {
|
|
|
|
'name': txo.claim_name,
|
|
|
|
'value': txo.claim,
|
|
|
|
'protobuf': binascii.hexlify(txo.claim.to_bytes()),
|
|
|
|
'claim_id': txo.claim_id,
|
|
|
|
'txid': txo.tx_ref.id,
|
|
|
|
'nout': txo.position,
|
|
|
|
'amount': dewies_to_lbc(txo.amount),
|
|
|
|
'effective_amount': txo.meta.get('effective_amount', 0),
|
|
|
|
'height': tx_height,
|
2020-05-01 15:34:57 +02:00
|
|
|
# TODO: fix
|
|
|
|
'confirmations': 0, # (best_height+1) - tx_height if tx_height > 0 else tx_height,
|
2019-05-05 21:35:41 +02:00
|
|
|
'claim_sequence': -1,
|
2020-05-01 15:34:57 +02:00
|
|
|
'address': txo.get_address(service.ledger),
|
2019-05-05 21:35:41 +02:00
|
|
|
'valid_at_height': txo.meta.get('activation_height', None),
|
2020-05-01 15:34:57 +02:00
|
|
|
# TODO: fix
|
|
|
|
'timestamp': 0, # wallet_manager.ledger.headers.estimated_timestamp(tx_height),
|
2019-05-05 21:35:41 +02:00
|
|
|
'supports': []
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
result[url] = txo
|
|
|
|
return result
|
|
|
|
|
2019-03-31 03:07:43 +02:00
|
|
|
@cache_concurrent
|
2019-03-31 19:42:27 +02:00
|
|
|
async def download_stream_from_uri(self, uri, exchange_rate_manager: 'ExchangeRateManager',
|
2019-10-29 06:26:25 +01:00
|
|
|
timeout: Optional[float] = None,
|
|
|
|
file_name: Optional[str] = None,
|
|
|
|
download_directory: Optional[str] = None,
|
|
|
|
save_file: Optional[bool] = None,
|
2019-10-27 18:54:48 +01:00
|
|
|
resolve_timeout: float = 3.0,
|
2019-10-29 06:26:25 +01:00
|
|
|
wallet: Optional['Wallet'] = None) -> ManagedStream:
|
2020-05-01 15:34:57 +02:00
|
|
|
service = self.service
|
|
|
|
wallet = wallet or self.service.wallet_manager.default_wallet
|
2019-03-31 19:42:27 +02:00
|
|
|
timeout = timeout or self.config.download_timeout
|
2019-03-11 02:55:33 +01:00
|
|
|
start_time = self.loop.time()
|
2019-03-31 19:42:27 +02:00
|
|
|
resolved_time = None
|
|
|
|
stream = None
|
2019-10-29 06:26:25 +01:00
|
|
|
txo: Optional[Output] = None
|
2019-03-31 19:42:27 +02:00
|
|
|
error = None
|
|
|
|
outpoint = None
|
2019-05-01 23:09:50 +02:00
|
|
|
if save_file is None:
|
|
|
|
save_file = self.config.save_files
|
|
|
|
if file_name and not save_file:
|
|
|
|
save_file = True
|
|
|
|
if save_file:
|
|
|
|
download_directory = download_directory or self.config.download_dir
|
|
|
|
else:
|
|
|
|
download_directory = None
|
|
|
|
|
2019-10-29 06:26:25 +01:00
|
|
|
payment = None
|
2019-03-31 19:42:27 +02:00
|
|
|
try:
|
|
|
|
# resolve the claim
|
2019-04-29 06:38:58 +02:00
|
|
|
if not URL.parse(uri).has_stream:
|
2019-03-31 19:42:27 +02:00
|
|
|
raise ResolveError("cannot download a channel claim, specify a /path")
|
|
|
|
try:
|
2019-10-29 06:26:25 +01:00
|
|
|
response = await asyncio.wait_for(
|
2020-05-01 15:34:57 +02:00
|
|
|
service.resolve(wallet.accounts, [uri], include_purchase_receipt=True),
|
2019-10-29 06:26:25 +01:00
|
|
|
resolve_timeout
|
2019-05-05 21:35:41 +02:00
|
|
|
)
|
2020-05-01 15:34:57 +02:00
|
|
|
resolved_result = self._convert_to_old_resolve_output(service, response)
|
2019-03-31 19:42:27 +02:00
|
|
|
except asyncio.TimeoutError:
|
2019-11-19 19:57:14 +01:00
|
|
|
raise ResolveTimeoutError(uri)
|
2019-07-29 23:47:40 +02:00
|
|
|
except Exception as err:
|
2020-01-10 18:27:56 +01:00
|
|
|
if isinstance(err, asyncio.CancelledError): # TODO: remove when updated to 3.8
|
2019-07-29 23:47:40 +02:00
|
|
|
raise
|
2019-10-29 06:26:25 +01:00
|
|
|
log.exception("Unexpected error resolving stream:")
|
2019-07-29 23:47:40 +02:00
|
|
|
raise ResolveError(f"Unexpected error resolving stream: {str(err)}")
|
2019-03-31 19:42:27 +02:00
|
|
|
await self.storage.save_claims_for_resolve([
|
|
|
|
value for value in resolved_result.values() if 'error' not in value
|
|
|
|
])
|
|
|
|
resolved = resolved_result.get(uri, {})
|
|
|
|
resolved = resolved if 'value' in resolved else resolved.get('claim')
|
|
|
|
if not resolved:
|
|
|
|
raise ResolveError(f"Failed to resolve stream at '{uri}'")
|
|
|
|
if 'error' in resolved:
|
|
|
|
raise ResolveError(f"error resolving stream: {resolved['error']}")
|
2019-10-29 06:26:25 +01:00
|
|
|
txo = response[uri]
|
2019-02-02 02:46:09 +01:00
|
|
|
|
2019-04-05 05:10:18 +02:00
|
|
|
claim = Claim.from_bytes(binascii.unhexlify(resolved['protobuf']))
|
|
|
|
outpoint = f"{resolved['txid']}:{resolved['nout']}"
|
|
|
|
resolved_time = self.loop.time() - start_time
|
2019-03-11 02:55:33 +01:00
|
|
|
|
2019-10-29 06:26:25 +01:00
|
|
|
# resume or update an existing stream, if the stream changed: download it and delete the old one after
|
2019-03-31 19:42:27 +02:00
|
|
|
updated_stream, to_replace = await self._check_update_or_replace(outpoint, resolved['claim_id'], claim)
|
|
|
|
if updated_stream:
|
2019-04-28 23:05:15 +02:00
|
|
|
log.info("already have stream for %s", uri)
|
2019-05-01 23:09:50 +02:00
|
|
|
if save_file and updated_stream.output_file_exists:
|
|
|
|
save_file = False
|
|
|
|
await updated_stream.start(node=self.node, timeout=timeout, save_now=save_file)
|
2019-05-06 20:28:12 +02:00
|
|
|
if not updated_stream.output_file_exists and (save_file or file_name or download_directory):
|
2019-05-06 19:58:57 +02:00
|
|
|
await updated_stream.save_file(
|
|
|
|
file_name=file_name, download_directory=download_directory, node=self.node
|
|
|
|
)
|
2019-03-31 19:42:27 +02:00
|
|
|
return updated_stream
|
|
|
|
|
2019-10-29 06:26:25 +01:00
|
|
|
if not to_replace and txo.has_price and not txo.purchase_receipt:
|
2020-05-01 15:34:57 +02:00
|
|
|
payment = await wallet.create_purchase_transaction(
|
2019-10-29 06:26:25 +01:00
|
|
|
wallet.accounts, txo, exchange_rate_manager
|
|
|
|
)
|
2019-04-05 05:10:18 +02:00
|
|
|
|
2019-03-31 19:42:27 +02:00
|
|
|
stream = ManagedStream(
|
|
|
|
self.loop, self.config, self.blob_manager, claim.stream.source.sd_hash, download_directory,
|
2019-10-29 06:26:25 +01:00
|
|
|
file_name, ManagedStream.STATUS_RUNNING, content_fee=payment,
|
2019-04-05 05:10:18 +02:00
|
|
|
analytics_manager=self.analytics_manager
|
2019-03-31 19:42:27 +02:00
|
|
|
)
|
2019-04-05 06:23:34 +02:00
|
|
|
log.info("starting download for %s", uri)
|
2019-05-06 01:41:35 +02:00
|
|
|
|
|
|
|
before_download = self.loop.time()
|
|
|
|
await stream.start(self.node, timeout)
|
|
|
|
stream.set_claim(resolved, claim)
|
2019-05-01 23:09:50 +02:00
|
|
|
if to_replace: # delete old stream now that the replacement has started downloading
|
|
|
|
await self.delete_stream(to_replace)
|
2019-10-29 06:26:25 +01:00
|
|
|
|
|
|
|
if payment is not None:
|
2020-05-01 15:34:57 +02:00
|
|
|
await service.broadcast_or_release(payment)
|
2019-10-29 06:26:25 +01:00
|
|
|
payment = None # to avoid releasing in `finally` later
|
|
|
|
log.info("paid fee of %s for %s", dewies_to_lbc(stream.content_fee.outputs[0].amount), uri)
|
2019-05-07 20:30:35 +02:00
|
|
|
await self.storage.save_content_fee(stream.stream_hash, stream.content_fee)
|
2019-05-06 01:41:35 +02:00
|
|
|
|
2019-05-06 19:50:34 +02:00
|
|
|
self.streams[stream.sd_hash] = stream
|
2019-05-06 01:41:35 +02:00
|
|
|
self.storage.content_claim_callbacks[stream.stream_hash] = lambda: self._update_content_claim(stream)
|
2019-05-01 23:09:50 +02:00
|
|
|
await self.storage.save_content_claim(stream.stream_hash, outpoint)
|
2019-05-06 01:41:35 +02:00
|
|
|
if save_file:
|
|
|
|
await asyncio.wait_for(stream.save_file(node=self.node), timeout - (self.loop.time() - before_download),
|
|
|
|
loop=self.loop)
|
2019-03-31 19:42:27 +02:00
|
|
|
return stream
|
2019-05-06 01:41:35 +02:00
|
|
|
except asyncio.TimeoutError:
|
2019-11-19 19:57:14 +01:00
|
|
|
error = DownloadDataTimeoutError(stream.sd_hash)
|
2019-05-06 01:41:35 +02:00
|
|
|
raise error
|
2019-10-02 20:04:30 +02:00
|
|
|
except Exception as err: # forgive data timeout, don't delete stream
|
2019-11-25 15:47:42 +01:00
|
|
|
expected = (DownloadSDTimeoutError, DownloadDataTimeoutError, InsufficientFundsError,
|
|
|
|
KeyFeeAboveMaxAllowedError)
|
|
|
|
if isinstance(err, expected):
|
|
|
|
log.warning("Failed to download %s: %s", uri, str(err))
|
|
|
|
elif isinstance(err, asyncio.CancelledError):
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
log.exception("Unexpected error downloading stream:")
|
2019-05-02 21:41:48 +02:00
|
|
|
error = err
|
|
|
|
raise
|
2019-03-31 19:42:27 +02:00
|
|
|
finally:
|
2019-10-29 06:26:25 +01:00
|
|
|
if payment is not None:
|
|
|
|
# payment is set to None after broadcasting, if we're here an exception probably happened
|
2020-05-01 15:34:57 +02:00
|
|
|
await service.release_tx(payment)
|
2019-03-31 19:42:27 +02:00
|
|
|
if self.analytics_manager and (error or (stream and (stream.downloader.time_to_descriptor or
|
|
|
|
stream.downloader.time_to_first_bytes))):
|
2020-05-01 15:34:57 +02:00
|
|
|
# TODO: fix
|
|
|
|
# server = self.wallet_manager.ledger.network.client.server
|
2019-03-31 19:42:27 +02:00
|
|
|
self.loop.create_task(
|
|
|
|
self.analytics_manager.send_time_to_first_bytes(
|
|
|
|
resolved_time, self.loop.time() - start_time, None if not stream else stream.download_id,
|
|
|
|
uri, outpoint,
|
|
|
|
None if not stream else len(stream.downloader.blob_downloader.active_connections),
|
|
|
|
None if not stream else len(stream.downloader.blob_downloader.scores),
|
2019-08-05 15:24:23 +02:00
|
|
|
None if not stream else len(stream.downloader.blob_downloader.connection_failures),
|
2019-03-31 19:42:27 +02:00
|
|
|
False if not stream else stream.downloader.added_fixed_peers,
|
|
|
|
self.config.fixed_peer_delay if not stream else stream.downloader.fixed_peers_delay,
|
|
|
|
None if not stream else stream.sd_hash,
|
|
|
|
None if not stream else stream.downloader.time_to_descriptor,
|
|
|
|
None if not (stream and stream.descriptor) else stream.descriptor.blobs[0].blob_hash,
|
|
|
|
None if not (stream and stream.descriptor) else stream.descriptor.blobs[0].length,
|
|
|
|
None if not stream else stream.downloader.time_to_first_bytes,
|
2019-08-07 18:17:08 +02:00
|
|
|
None if not error else error.__class__.__name__,
|
2019-08-07 18:55:18 +02:00
|
|
|
None if not error else str(error),
|
2020-05-01 15:34:57 +02:00
|
|
|
# TODO: fix
|
|
|
|
'fakespv.lbry.com:50001' # None if not server else f"{server[0]}:{server[1]}"
|
2019-03-31 19:42:27 +02:00
|
|
|
)
|
|
|
|
)
|
2019-05-01 23:09:50 +02:00
|
|
|
|
|
|
|
async def stream_partial_content(self, request: Request, sd_hash: str):
|
|
|
|
return await self.streams[sd_hash].stream_file(request, self.node)
|