2020-02-05 16:29:26 +01:00
|
|
|
import asyncio
|
|
|
|
import binascii
|
|
|
|
import logging
|
2020-02-28 18:58:59 +01:00
|
|
|
import os
|
2020-02-05 16:29:26 +01:00
|
|
|
import typing
|
2022-09-16 02:01:41 +02:00
|
|
|
from pathlib import Path
|
2020-02-05 16:29:26 +01:00
|
|
|
from typing import Optional
|
2022-09-01 23:23:44 +02:00
|
|
|
from aiohttp.web import Request, StreamResponse, HTTPRequestRangeNotSatisfiable
|
|
|
|
|
2020-02-05 16:29:26 +01:00
|
|
|
from lbry.file.source_manager import SourceManager
|
|
|
|
from lbry.file.source import ManagedDownloadSource
|
2022-09-01 23:23:44 +02:00
|
|
|
from lbry.schema.mime_types import guess_media_type
|
2020-02-05 16:29:26 +01:00
|
|
|
|
|
|
|
if typing.TYPE_CHECKING:
|
|
|
|
from lbry.torrent.session import TorrentSession
|
|
|
|
from lbry.conf import Config
|
|
|
|
from lbry.wallet.transaction import Transaction
|
|
|
|
from lbry.extras.daemon.analytics import AnalyticsManager
|
|
|
|
from lbry.extras.daemon.storage import SQLiteStorage, StoredContentClaim
|
|
|
|
from lbry.extras.daemon.storage import StoredContentClaim
|
|
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
def path_or_none(encoded_path) -> Optional[str]:
|
|
|
|
if not encoded_path:
|
|
|
|
return
|
|
|
|
return binascii.unhexlify(encoded_path).decode()
|
|
|
|
|
|
|
|
|
|
|
|
class TorrentSource(ManagedDownloadSource):
|
|
|
|
STATUS_STOPPED = "stopped"
|
2020-02-07 16:32:39 +01:00
|
|
|
filter_fields = SourceManager.filter_fields
|
|
|
|
filter_fields.update({
|
|
|
|
'bt_infohash'
|
|
|
|
})
|
2020-02-05 16:29:26 +01:00
|
|
|
|
|
|
|
def __init__(self, loop: asyncio.AbstractEventLoop, config: 'Config', storage: 'SQLiteStorage', identifier: str,
|
|
|
|
file_name: Optional[str] = None, download_directory: Optional[str] = None,
|
|
|
|
status: Optional[str] = STATUS_STOPPED, claim: Optional['StoredContentClaim'] = None,
|
|
|
|
download_id: Optional[str] = None, rowid: Optional[int] = None,
|
|
|
|
content_fee: Optional['Transaction'] = None,
|
|
|
|
analytics_manager: Optional['AnalyticsManager'] = None,
|
|
|
|
added_on: Optional[int] = None, torrent_session: Optional['TorrentSession'] = None):
|
|
|
|
super().__init__(loop, config, storage, identifier, file_name, download_directory, status, claim, download_id,
|
|
|
|
rowid, content_fee, analytics_manager, added_on)
|
|
|
|
self.torrent_session = torrent_session
|
|
|
|
|
2020-02-26 16:40:11 +01:00
|
|
|
@property
|
|
|
|
def full_path(self) -> Optional[str]:
|
2020-02-28 18:58:59 +01:00
|
|
|
full_path = self.torrent_session.full_path(self.identifier)
|
2022-09-05 15:52:42 +02:00
|
|
|
self.download_directory = self.torrent_session.save_path(self.identifier)
|
2020-02-28 18:58:59 +01:00
|
|
|
return full_path
|
2020-02-26 16:40:11 +01:00
|
|
|
|
2022-09-01 23:23:44 +02:00
|
|
|
@property
|
|
|
|
def mime_type(self) -> Optional[str]:
|
|
|
|
return guess_media_type(os.path.basename(self.full_path))[0]
|
|
|
|
|
2020-02-05 16:29:26 +01:00
|
|
|
async def start(self, timeout: Optional[float] = None, save_now: Optional[bool] = False):
|
|
|
|
await self.torrent_session.add_torrent(self.identifier, self.download_directory)
|
2022-09-16 02:01:41 +02:00
|
|
|
self.download_directory = self.torrent_session.save_path(self.identifier)
|
|
|
|
self._file_name = Path(self.torrent_session.full_path(self.identifier)).name
|
|
|
|
await self.storage.add_torrent(self.identifier, self.torrent_length, self.torrent_name)
|
|
|
|
self.rowid = await self.storage.save_downloaded_file(
|
|
|
|
self.identifier, self.file_name, self.download_directory, 0.0, added_on=self._added_on
|
|
|
|
)
|
2020-02-05 16:29:26 +01:00
|
|
|
|
|
|
|
async def stop(self, finished: bool = False):
|
|
|
|
await self.torrent_session.remove_torrent(self.identifier)
|
|
|
|
|
|
|
|
async def save_file(self, file_name: Optional[str] = None, download_directory: Optional[str] = None):
|
2020-02-11 01:50:16 +01:00
|
|
|
await self.torrent_session.save_file(self.identifier, download_directory)
|
2020-02-05 16:29:26 +01:00
|
|
|
|
2020-02-11 03:15:18 +01:00
|
|
|
@property
|
|
|
|
def torrent_length(self):
|
|
|
|
return self.torrent_session.get_size(self.identifier)
|
|
|
|
|
2022-09-01 23:23:44 +02:00
|
|
|
@property
|
|
|
|
def stream_length(self):
|
|
|
|
return os.path.getsize(self.full_path)
|
|
|
|
|
2020-02-28 19:22:57 +01:00
|
|
|
@property
|
|
|
|
def written_bytes(self):
|
|
|
|
return self.torrent_session.get_downloaded(self.identifier)
|
|
|
|
|
2020-02-11 03:15:18 +01:00
|
|
|
@property
|
|
|
|
def torrent_name(self):
|
|
|
|
return self.torrent_session.get_name(self.identifier)
|
|
|
|
|
2020-02-24 17:03:42 +01:00
|
|
|
@property
|
|
|
|
def bt_infohash(self):
|
|
|
|
return self.identifier
|
|
|
|
|
2020-02-05 16:29:26 +01:00
|
|
|
def stop_tasks(self):
|
2020-02-11 01:50:16 +01:00
|
|
|
pass
|
2020-02-05 16:29:26 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def completed(self):
|
2020-02-28 18:58:59 +01:00
|
|
|
return self.torrent_session.is_completed(self.identifier)
|
2020-02-05 16:29:26 +01:00
|
|
|
|
2022-09-01 23:23:44 +02:00
|
|
|
async def stream_file(self, request):
|
|
|
|
log.info("stream torrent to browser for lbry://%s#%s (btih %s...)", self.claim_name, self.claim_id,
|
|
|
|
self.identifier[:6])
|
2022-09-05 18:11:00 +02:00
|
|
|
headers, start, end = self._prepare_range_response_headers(
|
2022-09-01 23:23:44 +02:00
|
|
|
request.headers.get('range', 'bytes=0-')
|
|
|
|
)
|
|
|
|
await self.start()
|
|
|
|
response = StreamResponse(
|
|
|
|
status=206,
|
|
|
|
headers=headers
|
|
|
|
)
|
|
|
|
await response.prepare(request)
|
|
|
|
with open(self.full_path, 'rb') as infile:
|
|
|
|
infile.seek(start)
|
2022-09-05 18:11:00 +02:00
|
|
|
async for read_size in self.torrent_session.stream_largest_file(self.identifier, start, end):
|
2022-09-23 15:58:38 +02:00
|
|
|
if infile.tell() + read_size < end:
|
2022-09-05 18:11:00 +02:00
|
|
|
await response.write(infile.read(read_size))
|
|
|
|
else:
|
2022-09-23 15:58:38 +02:00
|
|
|
await response.write_eof(infile.read(end - infile.tell() + 1))
|
|
|
|
return response
|
2022-09-01 23:23:44 +02:00
|
|
|
|
2022-09-05 18:11:00 +02:00
|
|
|
def _prepare_range_response_headers(self, get_range: str) -> typing.Tuple[typing.Dict[str, str], int, int]:
|
2022-09-01 23:23:44 +02:00
|
|
|
if '=' in get_range:
|
|
|
|
get_range = get_range.split('=')[1]
|
|
|
|
start, end = get_range.split('-')
|
|
|
|
size = self.stream_length
|
|
|
|
|
|
|
|
start = int(start)
|
|
|
|
end = int(end) if end else size - 1
|
|
|
|
|
|
|
|
if end >= size or not 0 <= start < size:
|
|
|
|
raise HTTPRequestRangeNotSatisfiable()
|
|
|
|
|
|
|
|
final_size = end - start + 1
|
|
|
|
headers = {
|
|
|
|
'Accept-Ranges': 'bytes',
|
|
|
|
'Content-Range': f'bytes {start}-{end}/{size}',
|
|
|
|
'Content-Length': str(final_size),
|
|
|
|
'Content-Type': self.mime_type
|
|
|
|
}
|
2022-09-05 18:11:00 +02:00
|
|
|
return headers, start, end
|
2022-09-01 23:23:44 +02:00
|
|
|
|
2020-02-11 01:50:16 +01:00
|
|
|
|
2020-02-05 16:29:26 +01:00
|
|
|
class TorrentManager(SourceManager):
|
|
|
|
_sources: typing.Dict[str, ManagedDownloadSource]
|
|
|
|
|
|
|
|
filter_fields = set(SourceManager.filter_fields)
|
|
|
|
filter_fields.update({
|
|
|
|
'bt_infohash',
|
|
|
|
'blobs_remaining', # TODO: here they call them "parts", but its pretty much the same concept
|
|
|
|
'blobs_in_stream'
|
|
|
|
})
|
|
|
|
|
|
|
|
def __init__(self, loop: asyncio.AbstractEventLoop, config: 'Config', torrent_session: 'TorrentSession',
|
|
|
|
storage: 'SQLiteStorage', analytics_manager: Optional['AnalyticsManager'] = None):
|
|
|
|
super().__init__(loop, config, storage, analytics_manager)
|
|
|
|
self.torrent_session: 'TorrentSession' = torrent_session
|
|
|
|
|
|
|
|
async def recover_streams(self, file_infos: typing.List[typing.Dict]):
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
async def _load_stream(self, rowid: int, bt_infohash: str, file_name: Optional[str],
|
|
|
|
download_directory: Optional[str], status: str,
|
|
|
|
claim: Optional['StoredContentClaim'], content_fee: Optional['Transaction'],
|
2022-09-16 05:37:05 +02:00
|
|
|
added_on: Optional[int], **kwargs):
|
2020-02-05 16:29:26 +01:00
|
|
|
stream = TorrentSource(
|
|
|
|
self.loop, self.config, self.storage, identifier=bt_infohash, file_name=file_name,
|
|
|
|
download_directory=download_directory, status=status, claim=claim, rowid=rowid,
|
|
|
|
content_fee=content_fee, analytics_manager=self.analytics_manager, added_on=added_on,
|
|
|
|
torrent_session=self.torrent_session
|
|
|
|
)
|
|
|
|
self.add(stream)
|
|
|
|
|
|
|
|
async def initialize_from_database(self):
|
2022-09-16 05:37:05 +02:00
|
|
|
for file in await self.storage.get_all_torrent_files():
|
|
|
|
claim = await self.storage.get_content_claim_for_torrent(file['bt_infohash'])
|
|
|
|
await self._load_stream(None, claim=claim, **file)
|
2020-02-05 16:29:26 +01:00
|
|
|
|
|
|
|
async def start(self):
|
|
|
|
await super().start()
|
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
super().stop()
|
|
|
|
log.info("finished stopping the torrent manager")
|
|
|
|
|
2020-02-26 01:18:01 +01:00
|
|
|
async def delete(self, source: ManagedDownloadSource, delete_file: Optional[bool] = False):
|
|
|
|
await super().delete(source, delete_file)
|
|
|
|
self.torrent_session.remove_torrent(source.identifier, delete_file)
|
|
|
|
|
2020-02-05 16:29:26 +01:00
|
|
|
async def create(self, file_path: str, key: Optional[bytes] = None,
|
|
|
|
iv_generator: Optional[typing.Generator[bytes, None, None]] = None):
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
async def _delete(self, source: ManagedDownloadSource, delete_file: Optional[bool] = False):
|
|
|
|
raise NotImplementedError
|
|
|
|
# blob_hashes = [source.sd_hash] + [b.blob_hash for b in source.descriptor.blobs[:-1]]
|
|
|
|
# await self.blob_manager.delete_blobs(blob_hashes, delete_from_db=False)
|
|
|
|
# await self.storage.delete_stream(source.descriptor)
|
|
|
|
|
2022-09-01 23:23:44 +02:00
|
|
|
async def stream_partial_content(self, request: Request, identifier: str):
|
|
|
|
return await self._sources[identifier].stream_file(request)
|