lbry-sdk/lbrynet/blob_exchange/downloader.py

148 lines
6.7 KiB
Python
Raw Normal View History

2019-01-22 18:52:32 +01:00
import asyncio
import typing
import logging
2019-03-31 03:05:46 +02:00
from lbrynet.utils import drain_tasks, cache_concurrent
from lbrynet.blob_exchange.client import request_blob
2019-01-22 18:52:32 +01:00
if typing.TYPE_CHECKING:
from lbrynet.conf import Config
2019-01-22 18:52:32 +01:00
from lbrynet.dht.node import Node
from lbrynet.dht.peer import KademliaPeer
2019-03-28 19:51:55 +01:00
from lbrynet.blob.blob_manager import BlobManager
from lbrynet.blob.blob_file import AbstractBlob
2019-01-22 18:52:32 +01:00
log = logging.getLogger(__name__)
class BlobDownloader:
BAN_FACTOR = 2.0 # fixme: when connection manager gets implemented, move it out from here
2019-03-28 19:51:55 +01:00
def __init__(self, loop: asyncio.BaseEventLoop, config: 'Config', blob_manager: 'BlobManager',
peer_queue: asyncio.Queue):
2019-01-22 18:52:32 +01:00
self.loop = loop
self.config = config
2019-01-22 18:52:32 +01:00
self.blob_manager = blob_manager
self.peer_queue = peer_queue
2019-01-31 19:05:00 +01:00
self.active_connections: typing.Dict['KademliaPeer', asyncio.Task] = {} # active request_blob calls
self.ignored: typing.Dict['KademliaPeer', int] = {}
2019-01-31 18:28:59 +01:00
self.scores: typing.Dict['KademliaPeer', int] = {}
self.failures: typing.Dict['KademliaPeer', int] = {}
self.connections: typing.Dict['KademliaPeer', asyncio.Transport] = {}
2019-01-22 18:52:32 +01:00
def should_race_continue(self, blob: 'AbstractBlob'):
2019-02-08 05:28:03 +01:00
if len(self.active_connections) >= self.config.max_connections_per_download:
return False
2019-02-08 07:05:53 +01:00
# if a peer won 3 or more blob races and is active as a downloader, stop the race so bandwidth improves
2019-02-08 05:28:03 +01:00
# the safe net side is that any failure will reset the peer score, triggering the race back
2019-02-08 07:32:38 +01:00
# TODO: this is a good idea for low bandwidth, but doesnt play nice on high bandwidth
# for peer, task in self.active_connections.items():
# if self.scores.get(peer, 0) >= 0 and self.rounds_won.get(peer, 0) >= 3 and not task.done():
# return False
return not (blob.get_is_verified() or not blob.is_writeable())
2019-02-08 05:28:03 +01:00
async def request_blob_from_peer(self, blob: 'AbstractBlob', peer: 'KademliaPeer'):
2019-02-04 22:03:08 +01:00
if blob.get_is_verified():
return
self.scores[peer] = self.scores.get(peer, 0) - 1 # starts losing score, to account for cancelled ones
transport = self.connections.get(peer)
2019-02-08 07:05:53 +01:00
start = self.loop.time()
bytes_received, transport = await request_blob(
2019-02-04 22:03:08 +01:00
self.loop, blob, peer.address, peer.tcp_port, self.config.peer_connect_timeout,
self.config.blob_download_timeout, connected_transport=transport
2019-02-04 22:03:08 +01:00
)
if not transport and peer not in self.ignored:
self.ignored[peer] = self.loop.time()
2019-02-04 22:03:08 +01:00
log.debug("drop peer %s:%i", peer.address, peer.tcp_port)
self.failures[peer] = self.failures.get(peer, 0) + 1
if peer in self.connections:
del self.connections[peer]
elif transport:
2019-02-04 22:03:08 +01:00
log.debug("keep peer %s:%i", peer.address, peer.tcp_port)
if bytes_received:
self.failures[peer] = 0
self.connections[peer] = transport
rough_speed = (bytes_received / (self.loop.time() - start)) if bytes_received else 0
self.scores[peer] = rough_speed
2019-01-22 18:52:32 +01:00
async def new_peer_or_finished(self):
active_tasks = list(self.active_connections.values()) + [asyncio.sleep(0.2)]
await asyncio.wait(active_tasks, loop=self.loop, return_when='FIRST_COMPLETED')
2019-01-22 18:52:32 +01:00
2019-02-08 00:11:28 +01:00
def cleanup_active(self):
to_remove = [peer for (peer, task) in self.active_connections.items() if task.done()]
for peer in to_remove:
del self.active_connections[peer]
def clearbanned(self):
now = self.loop.time()
2019-05-01 00:51:02 +02:00
timeout_for = lambda peer: min(30.0, (self.failures.get(peer, 0) ** self.BAN_FACTOR) - 1.0)
forgiven = [banned_peer for banned_peer, when in self.ignored.items() if now - when > timeout_for(banned_peer)]
self.peer_queue.put_nowait(forgiven)
for banned_peer in forgiven:
self.ignored.pop(banned_peer)
2019-03-31 03:05:46 +02:00
@cache_concurrent
async def download_blob(self, blob_hash: str, length: typing.Optional[int] = None) -> 'AbstractBlob':
blob = self.blob_manager.get_blob(blob_hash, length)
if blob.get_is_verified():
return blob
2019-01-22 18:52:32 +01:00
try:
while not blob.get_is_verified():
batch: typing.List['KademliaPeer'] = []
while not self.peer_queue.empty():
2019-02-08 00:11:28 +01:00
batch.extend(self.peer_queue.get_nowait())
batch.sort(key=lambda p: self.scores.get(p, 0), reverse=True)
2019-02-08 00:11:28 +01:00
log.debug(
"running, %d peers, %d ignored, %d active",
len(batch), len(self.ignored), len(self.active_connections)
)
for peer in batch:
if not self.should_race_continue(blob):
2019-01-31 18:28:59 +01:00
break
2019-01-31 19:05:00 +01:00
if peer not in self.active_connections and peer not in self.ignored:
2019-01-31 18:28:59 +01:00
log.debug("request %s from %s:%i", blob_hash[:8], peer.address, peer.tcp_port)
2019-02-04 22:03:08 +01:00
t = self.loop.create_task(self.request_blob_from_peer(blob, peer))
self.active_connections[peer] = t
await self.new_peer_or_finished()
self.cleanup_active()
if batch:
to_re_add = list(set(batch).difference(self.ignored))
if to_re_add:
self.peer_queue.put_nowait(to_re_add)
else:
self.clearbanned()
else:
self.clearbanned()
blob.close()
2019-02-04 22:09:50 +01:00
log.debug("downloaded %s", blob_hash[:8])
return blob
finally:
re_add = set()
while self.active_connections:
peer, t = self.active_connections.popitem()
t.cancel()
re_add.add(peer)
re_add = re_add.difference(self.ignored)
if re_add:
self.peer_queue.put_nowait(list(re_add))
blob.close()
def close(self):
self.scores.clear()
self.ignored.clear()
2019-02-08 07:05:53 +01:00
for transport in self.connections.values():
transport.close()
2019-03-28 19:51:55 +01:00
async def download_blob(loop, config: 'Config', blob_manager: 'BlobManager', node: 'Node',
blob_hash: str) -> 'AbstractBlob':
2019-01-31 18:28:59 +01:00
search_queue = asyncio.Queue(loop=loop, maxsize=config.max_connections_per_download)
search_queue.put_nowait(blob_hash)
peer_queue, accumulate_task = node.accumulate_peers(search_queue)
downloader = BlobDownloader(loop, config, blob_manager, peer_queue)
try:
return await downloader.download_blob(blob_hash)
finally:
if accumulate_task and not accumulate_task.done():
accumulate_task.cancel()
downloader.close()