2018-04-02 21:13:13 +02:00
|
|
|
import os
|
2018-10-16 17:03:56 +02:00
|
|
|
import asyncio
|
2018-04-02 21:13:13 +02:00
|
|
|
import logging
|
2018-07-25 21:32:01 +02:00
|
|
|
import math
|
2018-08-03 15:36:03 +02:00
|
|
|
import binascii
|
2019-01-22 23:44:17 +01:00
|
|
|
import typing
|
2019-01-23 16:41:34 +01:00
|
|
|
import base58
|
2018-11-09 20:02:03 +01:00
|
|
|
|
2018-10-26 18:42:12 +02:00
|
|
|
from aioupnp import __version__ as aioupnp_version
|
2018-10-17 16:57:10 +02:00
|
|
|
from aioupnp.upnp import UPnP
|
2018-10-18 23:41:49 +02:00
|
|
|
from aioupnp.fault import UPnPError
|
2018-11-09 20:02:03 +01:00
|
|
|
|
2019-06-21 02:55:47 +02:00
|
|
|
from lbry import utils
|
|
|
|
from lbry.dht.node import Node
|
|
|
|
from lbry.dht.blob_announcer import BlobAnnouncer
|
|
|
|
from lbry.blob.blob_manager import BlobManager
|
|
|
|
from lbry.blob_exchange.server import BlobServer
|
|
|
|
from lbry.stream.stream_manager import StreamManager
|
|
|
|
from lbry.extras.daemon.Component import Component
|
|
|
|
from lbry.extras.daemon.exchange_rate_manager import ExchangeRateManager
|
|
|
|
from lbry.extras.daemon.storage import SQLiteStorage
|
|
|
|
from lbry.wallet import LbryWalletManager
|
2019-07-09 20:00:29 +02:00
|
|
|
from lbry.wallet.header import Headers
|
2018-08-05 02:20:37 +02:00
|
|
|
|
2018-04-02 21:13:13 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
# settings must be initialized before this file is imported
|
|
|
|
|
|
|
|
DATABASE_COMPONENT = "database"
|
2018-07-25 21:33:43 +02:00
|
|
|
BLOB_COMPONENT = "blob_manager"
|
2018-07-25 21:32:01 +02:00
|
|
|
HEADERS_COMPONENT = "blockchain_headers"
|
2018-07-24 18:26:29 +02:00
|
|
|
WALLET_COMPONENT = "wallet"
|
|
|
|
DHT_COMPONENT = "dht"
|
|
|
|
HASH_ANNOUNCER_COMPONENT = "hash_announcer"
|
2019-01-22 23:44:17 +01:00
|
|
|
STREAM_MANAGER_COMPONENT = "stream_manager"
|
2018-07-24 18:26:29 +02:00
|
|
|
PEER_PROTOCOL_SERVER_COMPONENT = "peer_protocol_server"
|
|
|
|
UPNP_COMPONENT = "upnp"
|
|
|
|
EXCHANGE_RATE_MANAGER_COMPONENT = "exchange_rate_manager"
|
2018-07-25 21:33:43 +02:00
|
|
|
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
class DatabaseComponent(Component):
|
|
|
|
component_name = DATABASE_COMPONENT
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
self.storage = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.storage
|
2018-04-02 21:13:13 +02:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_current_db_revision():
|
2019-05-07 20:30:35 +02:00
|
|
|
return 11
|
2018-04-02 21:13:13 +02:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
@property
|
|
|
|
def revision_filename(self):
|
2019-01-23 16:41:34 +01:00
|
|
|
return os.path.join(self.conf.data_dir, 'db_revision')
|
2018-04-02 21:13:13 +02:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
def _write_db_revision_file(self, version_num):
|
|
|
|
with open(self.revision_filename, mode='w') as db_revision:
|
2018-04-02 21:13:13 +02:00
|
|
|
db_revision.write(str(version_num))
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
2018-04-02 21:13:13 +02:00
|
|
|
# check directories exist, create them if they don't
|
|
|
|
log.info("Loading databases")
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
if not os.path.exists(self.revision_filename):
|
2018-04-02 21:13:13 +02:00
|
|
|
log.warning("db_revision file not found. Creating it")
|
2018-07-24 18:26:29 +02:00
|
|
|
self._write_db_revision_file(self.get_current_db_revision())
|
2018-04-02 21:13:13 +02:00
|
|
|
|
|
|
|
# check the db migration and run any needed migrations
|
2019-01-21 21:55:50 +01:00
|
|
|
with open(self.revision_filename, "r") as revision_read_handle:
|
2018-04-02 21:13:13 +02:00
|
|
|
old_revision = int(revision_read_handle.read().strip())
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
if old_revision > self.get_current_db_revision():
|
2018-04-02 21:13:13 +02:00
|
|
|
raise Exception('This version of lbrynet is not compatible with the database\n'
|
|
|
|
'Your database is revision %i, expected %i' %
|
2018-07-24 18:26:29 +02:00
|
|
|
(old_revision, self.get_current_db_revision()))
|
|
|
|
if old_revision < self.get_current_db_revision():
|
2019-06-21 02:55:47 +02:00
|
|
|
from lbry.extras.daemon.migrator import dbmigrator
|
2018-07-24 18:26:29 +02:00
|
|
|
log.info("Upgrading your databases (revision %i to %i)", old_revision, self.get_current_db_revision())
|
2018-12-15 21:31:02 +01:00
|
|
|
await asyncio.get_event_loop().run_in_executor(
|
2019-02-12 18:48:17 +01:00
|
|
|
None, dbmigrator.migrate_db, self.conf, old_revision, self.get_current_db_revision()
|
2018-04-02 21:13:13 +02:00
|
|
|
)
|
2018-07-24 18:26:29 +02:00
|
|
|
self._write_db_revision_file(self.get_current_db_revision())
|
2018-04-02 21:13:13 +02:00
|
|
|
log.info("Finished upgrading the databases.")
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
self.storage = SQLiteStorage(
|
2019-01-21 21:55:50 +01:00
|
|
|
self.conf, os.path.join(self.conf.data_dir, "lbrynet.sqlite")
|
2018-12-15 21:31:02 +01:00
|
|
|
)
|
|
|
|
await self.storage.open()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
|
|
|
await self.storage.close()
|
2018-07-24 18:26:29 +02:00
|
|
|
self.storage = None
|
|
|
|
|
|
|
|
|
2018-08-05 02:20:37 +02:00
|
|
|
HEADERS_URL = "https://headers.lbry.io/blockchain_headers_latest"
|
|
|
|
HEADER_SIZE = 112
|
|
|
|
|
|
|
|
|
2018-07-25 21:32:01 +02:00
|
|
|
class HeadersComponent(Component):
|
|
|
|
component_name = HEADERS_COMPONENT
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-24 09:06:53 +02:00
|
|
|
super().__init__(component_manager)
|
2019-01-21 21:55:50 +01:00
|
|
|
self.headers_dir = os.path.join(self.conf.wallet_dir, 'lbc_mainnet')
|
2018-08-05 02:20:37 +02:00
|
|
|
self.headers_file = os.path.join(self.headers_dir, 'headers')
|
2019-01-21 21:55:50 +01:00
|
|
|
self.old_file = os.path.join(self.conf.wallet_dir, 'blockchain_headers')
|
2019-07-09 20:00:29 +02:00
|
|
|
self.headers = Headers(self.headers_file)
|
2018-07-25 21:32:01 +02:00
|
|
|
self._downloading_headers = None
|
2018-11-13 03:16:05 +01:00
|
|
|
self._headers_progress_percent = 0
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
2018-07-25 21:32:01 +02:00
|
|
|
return self
|
|
|
|
|
2019-01-07 08:52:53 +01:00
|
|
|
async def get_status(self):
|
2019-06-28 08:02:57 +02:00
|
|
|
if self._downloading_headers:
|
|
|
|
progress = self._headers_progress_percent
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
wallet_manager = self.component_manager.get_component(WALLET_COMPONENT)
|
|
|
|
if wallet_manager and wallet_manager.ledger.network.remote_height > 0:
|
|
|
|
local_height = wallet_manager.ledger.headers.height
|
|
|
|
remote_height = wallet_manager.ledger.network.remote_height
|
|
|
|
progress = max(math.ceil(float(local_height) / float(remote_height) * 100), 0)
|
|
|
|
else:
|
|
|
|
return {}
|
|
|
|
except NameError:
|
|
|
|
return {}
|
|
|
|
return {
|
|
|
|
'downloading_headers': True,
|
|
|
|
'download_progress': progress
|
|
|
|
} if progress < 100 else {}
|
2018-07-25 21:32:01 +02:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
async def fetch_headers_from_s3(self):
|
2019-07-09 20:00:29 +02:00
|
|
|
local_header_size = self.headers.bytes_size
|
2018-10-18 12:42:45 +02:00
|
|
|
resume_header = {"Range": f"bytes={local_header_size}-"}
|
2019-02-28 18:40:11 +01:00
|
|
|
async with utils.aiohttp_request('get', HEADERS_URL, headers=resume_header) as response:
|
2019-02-18 22:44:46 +01:00
|
|
|
if response.status == 406 or response.content_length < HEADER_SIZE: # our file is bigger
|
2019-01-22 23:44:17 +01:00
|
|
|
log.warning("s3 is more out of date than we are")
|
2019-02-18 22:44:46 +01:00
|
|
|
return
|
|
|
|
if response.content_length % HEADER_SIZE != 0:
|
|
|
|
log.warning("s3 appears to have corrupted header")
|
|
|
|
return
|
|
|
|
final_size_after_download = response.content_length + local_header_size
|
|
|
|
if local_header_size > 0:
|
|
|
|
log.info("Resuming download of %i bytes from s3", response.content_length)
|
2019-07-09 20:00:29 +02:00
|
|
|
buffer, header_size = b'', self.headers.header_size
|
|
|
|
async for chunk in response.content.iter_any():
|
|
|
|
chunk = buffer + chunk
|
|
|
|
remaining = len(chunk) % header_size
|
|
|
|
chunk, buffer = chunk[:-remaining], bytes(chunk[-remaining:])
|
|
|
|
if not chunk:
|
|
|
|
continue
|
|
|
|
if not await self.headers.connect(len(self.headers), chunk):
|
|
|
|
log.warning("Error connecting downloaded headers from at %s.", self.headers.height)
|
|
|
|
return
|
|
|
|
self._headers_progress_percent = math.ceil(
|
|
|
|
float(self.headers.bytes_size) / float(final_size_after_download) * 100
|
|
|
|
)
|
2018-07-25 21:32:01 +02:00
|
|
|
|
|
|
|
def local_header_file_size(self):
|
2018-08-05 02:20:37 +02:00
|
|
|
if os.path.isfile(self.headers_file):
|
|
|
|
return os.stat(self.headers_file).st_size
|
2018-07-25 21:32:01 +02:00
|
|
|
return 0
|
|
|
|
|
2019-06-26 08:41:35 +02:00
|
|
|
async def get_download_height(self):
|
|
|
|
async with utils.aiohttp_request('HEAD', HEADERS_URL) as response:
|
2019-06-28 08:02:57 +02:00
|
|
|
if response.status != 200:
|
|
|
|
log.warning("Header download error: %s", response.status)
|
|
|
|
return 0
|
2019-06-26 08:41:35 +02:00
|
|
|
return response.content_length // HEADER_SIZE
|
2018-10-16 17:57:43 +02:00
|
|
|
|
|
|
|
async def should_download_headers_from_s3(self):
|
2019-01-21 21:55:50 +01:00
|
|
|
if self.conf.blockchain_name != "lbrycrd_main":
|
2018-10-16 17:57:43 +02:00
|
|
|
return False
|
2019-01-21 21:55:50 +01:00
|
|
|
s3_headers_depth = self.conf.s3_headers_depth
|
2018-07-25 21:32:01 +02:00
|
|
|
if not s3_headers_depth:
|
2018-10-16 17:57:43 +02:00
|
|
|
return False
|
2019-06-26 08:41:35 +02:00
|
|
|
|
2019-07-09 20:00:29 +02:00
|
|
|
local_height = self.headers.height
|
|
|
|
try:
|
|
|
|
remote_height = await self.get_download_height()
|
|
|
|
except OSError:
|
|
|
|
log.warning("Failed to download headers using https.")
|
|
|
|
return False
|
2018-10-09 23:39:28 +02:00
|
|
|
log.info("remote height: %i, local height: %i", remote_height, local_height)
|
2018-08-05 02:20:37 +02:00
|
|
|
if remote_height > (local_height + s3_headers_depth):
|
2018-10-16 17:57:43 +02:00
|
|
|
return True
|
|
|
|
return False
|
2018-07-25 21:32:01 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
2018-08-05 02:20:37 +02:00
|
|
|
if not os.path.exists(self.headers_dir):
|
|
|
|
os.mkdir(self.headers_dir)
|
|
|
|
if os.path.exists(self.old_file):
|
2018-08-05 02:35:04 +02:00
|
|
|
log.warning("Moving old headers from %s to %s.", self.old_file, self.headers_file)
|
2018-08-05 02:20:37 +02:00
|
|
|
os.rename(self.old_file, self.headers_file)
|
2019-07-09 20:00:29 +02:00
|
|
|
await self.headers.open()
|
|
|
|
self.headers.repair()
|
2018-12-15 21:31:02 +01:00
|
|
|
self._downloading_headers = await self.should_download_headers_from_s3()
|
2018-07-25 21:32:01 +02:00
|
|
|
if self._downloading_headers:
|
|
|
|
try:
|
2019-01-22 23:44:17 +01:00
|
|
|
await self.fetch_headers_from_s3()
|
2018-07-25 21:32:01 +02:00
|
|
|
except Exception as err:
|
|
|
|
log.error("failed to fetch headers from s3: %s", err)
|
2018-08-09 15:24:04 +02:00
|
|
|
finally:
|
|
|
|
self._downloading_headers = False
|
2019-07-09 20:00:29 +02:00
|
|
|
await self.headers.close()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
|
|
|
pass
|
2018-10-16 17:03:56 +02:00
|
|
|
|
|
|
|
|
2018-07-25 21:32:01 +02:00
|
|
|
class WalletComponent(Component):
|
|
|
|
component_name = WALLET_COMPONENT
|
|
|
|
depends_on = [DATABASE_COMPONENT, HEADERS_COMPONENT]
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-07-25 21:32:01 +02:00
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-08-26 05:20:43 +02:00
|
|
|
self.wallet_manager = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-07-25 21:32:01 +02:00
|
|
|
@property
|
|
|
|
def component(self):
|
2018-08-26 05:20:43 +02:00
|
|
|
return self.wallet_manager
|
2018-07-25 21:32:01 +02:00
|
|
|
|
2019-01-07 08:52:53 +01:00
|
|
|
async def get_status(self):
|
2019-06-26 08:41:35 +02:00
|
|
|
if self.wallet_manager and self.wallet_manager.ledger.network.remote_height:
|
2019-03-25 00:45:54 +01:00
|
|
|
local_height = self.wallet_manager.ledger.headers.height
|
2019-06-26 08:41:35 +02:00
|
|
|
remote_height = self.wallet_manager.ledger.network.remote_height
|
2018-10-16 22:00:12 +02:00
|
|
|
best_hash = self.wallet_manager.get_best_blockhash()
|
|
|
|
return {
|
2018-08-13 20:03:15 +02:00
|
|
|
'blocks': max(local_height, 0),
|
2019-03-25 21:25:49 +01:00
|
|
|
'blocks_behind': max(remote_height - local_height, 0),
|
2018-08-03 15:36:03 +02:00
|
|
|
'best_blockhash': best_hash,
|
2019-03-25 00:45:54 +01:00
|
|
|
'is_encrypted': self.wallet_manager.use_encryption,
|
2018-08-26 05:20:43 +02:00
|
|
|
'is_locked': not self.wallet_manager.is_wallet_unlocked,
|
2018-10-16 22:00:12 +02:00
|
|
|
}
|
2018-07-25 21:32:01 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
2018-07-22 00:34:59 +02:00
|
|
|
log.info("Starting torba wallet")
|
2019-03-24 21:55:04 +01:00
|
|
|
self.wallet_manager = await LbryWalletManager.from_lbrynet_config(self.conf)
|
2018-12-15 21:31:02 +01:00
|
|
|
await self.wallet_manager.start()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
|
|
|
await self.wallet_manager.stop()
|
2018-08-26 05:20:43 +02:00
|
|
|
self.wallet_manager = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
class BlobComponent(Component):
|
|
|
|
component_name = BLOB_COMPONENT
|
2018-10-30 18:41:38 +01:00
|
|
|
depends_on = [DATABASE_COMPONENT]
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2019-06-03 05:50:17 +02:00
|
|
|
self.blob_manager: typing.Optional[BlobManager] = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
2019-03-28 19:51:55 +01:00
|
|
|
def component(self) -> typing.Optional[BlobManager]:
|
2018-07-25 21:33:43 +02:00
|
|
|
return self.blob_manager
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
async def start(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
2019-01-22 23:44:17 +01:00
|
|
|
data_store = None
|
2018-10-30 18:41:38 +01:00
|
|
|
if DHT_COMPONENT not in self.component_manager.skip_components:
|
2019-01-22 23:44:17 +01:00
|
|
|
dht_node: Node = self.component_manager.get_component(DHT_COMPONENT)
|
2018-10-30 18:41:38 +01:00
|
|
|
if dht_node:
|
2019-01-22 23:44:17 +01:00
|
|
|
data_store = dht_node.protocol.data_store
|
2019-03-28 19:51:55 +01:00
|
|
|
blob_dir = os.path.join(self.conf.data_dir, 'blobfiles')
|
|
|
|
if not os.path.isdir(blob_dir):
|
|
|
|
os.mkdir(blob_dir)
|
2019-06-03 05:50:17 +02:00
|
|
|
self.blob_manager = BlobManager(self.component_manager.loop, blob_dir, storage, self.conf, data_store)
|
2019-01-22 23:44:17 +01:00
|
|
|
return await self.blob_manager.setup()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
async def stop(self):
|
2019-02-14 18:36:18 +01:00
|
|
|
self.blob_manager.stop()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def get_status(self):
|
2018-08-03 15:36:03 +02:00
|
|
|
count = 0
|
|
|
|
if self.blob_manager:
|
2019-01-22 23:44:17 +01:00
|
|
|
count = len(self.blob_manager.completed_blob_hashes)
|
2019-06-03 05:50:17 +02:00
|
|
|
return {
|
|
|
|
'finished_blobs': count,
|
2019-06-07 18:14:05 +02:00
|
|
|
'connections': {} if not self.blob_manager else self.blob_manager.connection_manager.status
|
2019-06-03 05:50:17 +02:00
|
|
|
}
|
2018-08-02 23:33:56 +02:00
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
class DHTComponent(Component):
|
|
|
|
component_name = DHT_COMPONENT
|
|
|
|
depends_on = [UPNP_COMPONENT]
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2019-06-23 23:54:04 +02:00
|
|
|
self.dht_node: typing.Optional[Node] = None
|
2018-08-05 19:12:39 +02:00
|
|
|
self.external_udp_port = None
|
|
|
|
self.external_peer_port = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
2019-01-22 23:44:17 +01:00
|
|
|
def component(self) -> typing.Optional[Node]:
|
2018-07-24 18:26:29 +02:00
|
|
|
return self.dht_node
|
|
|
|
|
2019-01-07 08:52:53 +01:00
|
|
|
async def get_status(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
return {
|
2019-02-02 04:12:07 +01:00
|
|
|
'node_id': None if not self.dht_node else binascii.hexlify(self.dht_node.protocol.node_id),
|
2019-01-22 23:44:17 +01:00
|
|
|
'peers_in_routing_table': 0 if not self.dht_node else len(self.dht_node.protocol.routing_table.get_peers())
|
2018-07-25 21:33:43 +02:00
|
|
|
}
|
|
|
|
|
2019-01-23 16:41:34 +01:00
|
|
|
def get_node_id(self):
|
|
|
|
node_id_filename = os.path.join(self.conf.data_dir, "node_id")
|
|
|
|
if os.path.isfile(node_id_filename):
|
|
|
|
with open(node_id_filename, "r") as node_id_file:
|
|
|
|
return base58.b58decode(str(node_id_file.read()).strip())
|
|
|
|
node_id = utils.generate_id()
|
|
|
|
with open(node_id_filename, "w") as node_id_file:
|
|
|
|
node_id_file.write(base58.b58encode(node_id).decode())
|
|
|
|
return node_id
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
2019-01-22 23:44:17 +01:00
|
|
|
log.info("start the dht")
|
2019-06-23 23:54:04 +02:00
|
|
|
upnp_component = self.component_manager.get_component(UPNP_COMPONENT)
|
|
|
|
self.external_peer_port = upnp_component.upnp_redirects.get("TCP", self.conf.tcp_port)
|
|
|
|
self.external_udp_port = upnp_component.upnp_redirects.get("UDP", self.conf.udp_port)
|
|
|
|
external_ip = upnp_component.external_ip
|
2018-10-29 18:41:14 +01:00
|
|
|
if not external_ip:
|
|
|
|
log.warning("UPnP component failed to get external ip")
|
2019-03-11 02:55:33 +01:00
|
|
|
external_ip = await utils.get_external_ip()
|
2018-10-29 18:41:14 +01:00
|
|
|
if not external_ip:
|
|
|
|
log.warning("failed to get external ip")
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-11-07 21:15:05 +01:00
|
|
|
self.dht_node = Node(
|
2019-06-23 23:54:04 +02:00
|
|
|
self.component_manager.loop,
|
2019-01-22 23:44:17 +01:00
|
|
|
self.component_manager.peer_manager,
|
2019-01-23 16:41:34 +01:00
|
|
|
node_id=self.get_node_id(),
|
2019-01-28 15:51:02 +01:00
|
|
|
internal_udp_port=self.conf.udp_port,
|
2019-01-22 23:44:17 +01:00
|
|
|
udp_port=self.external_udp_port,
|
|
|
|
external_ip=external_ip,
|
2019-01-28 15:51:02 +01:00
|
|
|
peer_port=self.external_peer_port,
|
2019-02-20 17:22:55 +01:00
|
|
|
rpc_timeout=self.conf.node_rpc_timeout,
|
|
|
|
split_buckets_under_index=self.conf.split_buckets_under_index
|
2019-01-22 23:44:17 +01:00
|
|
|
)
|
|
|
|
self.dht_node.start(
|
2019-01-28 15:51:02 +01:00
|
|
|
interface=self.conf.network_interface, known_node_urls=self.conf.known_dht_nodes
|
2018-07-24 18:26:29 +02:00
|
|
|
)
|
2018-07-23 22:13:56 +02:00
|
|
|
log.info("Started the dht")
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
async def stop(self):
|
|
|
|
self.dht_node.stop()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
class HashAnnouncerComponent(Component):
|
|
|
|
component_name = HASH_ANNOUNCER_COMPONENT
|
|
|
|
depends_on = [DHT_COMPONENT, DATABASE_COMPONENT]
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2019-06-23 23:54:04 +02:00
|
|
|
self.hash_announcer: typing.Optional[BlobAnnouncer] = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
2019-01-22 23:44:17 +01:00
|
|
|
def component(self) -> typing.Optional[BlobAnnouncer]:
|
2018-07-24 18:26:29 +02:00
|
|
|
return self.hash_announcer
|
|
|
|
|
2019-01-07 21:35:03 +01:00
|
|
|
async def start(self):
|
2018-07-24 18:26:29 +02:00
|
|
|
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
|
|
|
dht_node = self.component_manager.get_component(DHT_COMPONENT)
|
2019-06-23 23:54:04 +02:00
|
|
|
self.hash_announcer = BlobAnnouncer(self.component_manager.loop, dht_node, storage)
|
2019-01-28 15:51:02 +01:00
|
|
|
self.hash_announcer.start(self.conf.concurrent_blob_announcers)
|
2019-01-22 23:44:17 +01:00
|
|
|
log.info("Started blob announcer")
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
async def stop(self):
|
2018-12-15 21:31:02 +01:00
|
|
|
self.hash_announcer.stop()
|
2019-01-22 23:44:17 +01:00
|
|
|
log.info("Stopped blob announcer")
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2019-01-07 08:52:53 +01:00
|
|
|
async def get_status(self):
|
2018-08-02 23:33:56 +02:00
|
|
|
return {
|
2019-01-22 23:44:17 +01:00
|
|
|
'announce_queue_size': 0 if not self.hash_announcer else len(self.hash_announcer.announce_queue)
|
2018-08-02 23:33:56 +02:00
|
|
|
}
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
class StreamManagerComponent(Component):
|
|
|
|
component_name = STREAM_MANAGER_COMPONENT
|
2019-01-23 23:55:03 +01:00
|
|
|
depends_on = [BLOB_COMPONENT, DATABASE_COMPONENT, WALLET_COMPONENT]
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2019-06-03 05:50:17 +02:00
|
|
|
self.stream_manager: typing.Optional[StreamManager] = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
2019-01-22 23:44:17 +01:00
|
|
|
def component(self) -> typing.Optional[StreamManager]:
|
|
|
|
return self.stream_manager
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2019-01-07 08:52:53 +01:00
|
|
|
async def get_status(self):
|
2019-01-22 23:44:17 +01:00
|
|
|
if not self.stream_manager:
|
2018-07-25 21:33:43 +02:00
|
|
|
return
|
|
|
|
return {
|
2019-06-03 05:50:17 +02:00
|
|
|
'managed_files': len(self.stream_manager.streams),
|
2018-07-25 21:33:43 +02:00
|
|
|
}
|
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
async def start(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
blob_manager = self.component_manager.get_component(BLOB_COMPONENT)
|
|
|
|
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
|
|
|
wallet = self.component_manager.get_component(WALLET_COMPONENT)
|
2019-01-23 23:55:03 +01:00
|
|
|
try:
|
|
|
|
node = self.component_manager.get_component(DHT_COMPONENT)
|
|
|
|
except NameError:
|
|
|
|
node = None
|
2018-07-24 18:26:29 +02:00
|
|
|
log.info('Starting the file manager')
|
2019-01-22 23:44:17 +01:00
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
self.stream_manager = StreamManager(
|
2019-03-01 20:48:49 +01:00
|
|
|
loop, self.conf, blob_manager, wallet, storage, node, self.component_manager.analytics_manager
|
2019-01-21 21:55:50 +01:00
|
|
|
)
|
2019-01-22 23:44:17 +01:00
|
|
|
await self.stream_manager.start()
|
|
|
|
log.info('Done setting up file manager')
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
async def stop(self):
|
2019-02-01 20:04:53 +01:00
|
|
|
self.stream_manager.stop()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
class PeerProtocolServerComponent(Component):
|
|
|
|
component_name = PEER_PROTOCOL_SERVER_COMPONENT
|
2019-01-22 23:44:17 +01:00
|
|
|
depends_on = [UPNP_COMPONENT, BLOB_COMPONENT, WALLET_COMPONENT]
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2019-06-03 05:50:17 +02:00
|
|
|
self.blob_server: typing.Optional[BlobServer] = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
2019-01-22 23:44:17 +01:00
|
|
|
def component(self) -> typing.Optional[BlobServer]:
|
|
|
|
return self.blob_server
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
2019-01-22 23:44:17 +01:00
|
|
|
log.info("start blob server")
|
2018-08-05 19:12:39 +02:00
|
|
|
upnp = self.component_manager.get_component(UPNP_COMPONENT)
|
2019-03-28 19:51:55 +01:00
|
|
|
blob_manager: BlobManager = self.component_manager.get_component(BLOB_COMPONENT)
|
2019-01-22 23:44:17 +01:00
|
|
|
wallet: LbryWalletManager = self.component_manager.get_component(WALLET_COMPONENT)
|
2019-02-16 23:25:18 +01:00
|
|
|
peer_port = self.conf.tcp_port
|
2019-01-22 23:44:17 +01:00
|
|
|
address = await wallet.get_unused_address()
|
|
|
|
self.blob_server = BlobServer(asyncio.get_event_loop(), blob_manager, address)
|
2019-01-28 15:51:02 +01:00
|
|
|
self.blob_server.start_server(peer_port, interface=self.conf.network_interface)
|
2019-01-22 23:44:17 +01:00
|
|
|
await self.blob_server.started_listening.wait()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
2019-01-22 23:44:17 +01:00
|
|
|
if self.blob_server:
|
|
|
|
self.blob_server.stop_server()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
class UPnPComponent(Component):
|
|
|
|
component_name = UPNP_COMPONENT
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2019-01-28 15:51:02 +01:00
|
|
|
self._int_peer_port = self.conf.tcp_port
|
|
|
|
self._int_dht_node_port = self.conf.udp_port
|
2019-01-21 21:55:50 +01:00
|
|
|
self.use_upnp = self.conf.use_upnp
|
2019-06-23 23:54:04 +02:00
|
|
|
self.upnp: typing.Optional[UPnP] = None
|
2018-07-30 23:58:17 +02:00
|
|
|
self.upnp_redirects = {}
|
2019-06-23 23:54:04 +02:00
|
|
|
self.external_ip: typing.Optional[str] = None
|
2019-01-07 21:35:03 +01:00
|
|
|
self._maintain_redirects_task = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
2019-01-22 23:44:17 +01:00
|
|
|
def component(self) -> 'UPnPComponent':
|
2018-07-24 18:26:29 +02:00
|
|
|
return self
|
|
|
|
|
2019-01-07 21:35:03 +01:00
|
|
|
async def _repeatedly_maintain_redirects(self, now=True):
|
|
|
|
while True:
|
|
|
|
if now:
|
|
|
|
await self._maintain_redirects()
|
2019-04-15 22:14:19 +02:00
|
|
|
await asyncio.sleep(360, loop=self.component_manager.loop)
|
2019-01-07 21:35:03 +01:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def _maintain_redirects(self):
|
2018-10-18 23:41:49 +02:00
|
|
|
# setup the gateway if necessary
|
|
|
|
if not self.upnp:
|
|
|
|
try:
|
2019-06-23 23:54:04 +02:00
|
|
|
self.upnp = await UPnP.discover(loop=self.component_manager.loop)
|
2018-10-18 23:41:49 +02:00
|
|
|
log.info("found upnp gateway: %s", self.upnp.gateway.manufacturer_string)
|
|
|
|
except Exception as err:
|
2019-06-23 23:54:04 +02:00
|
|
|
if isinstance(err, asyncio.CancelledError):
|
|
|
|
raise
|
2018-10-18 23:41:49 +02:00
|
|
|
log.warning("upnp discovery failed: %s", err)
|
2018-10-29 18:41:14 +01:00
|
|
|
self.upnp = None
|
2018-10-18 23:41:49 +02:00
|
|
|
|
|
|
|
# update the external ip
|
2018-10-29 18:41:14 +01:00
|
|
|
external_ip = None
|
|
|
|
if self.upnp:
|
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
external_ip = await self.upnp.get_external_ip()
|
2018-11-12 20:47:11 +01:00
|
|
|
if external_ip != "0.0.0.0" and not self.external_ip:
|
2018-10-29 18:41:14 +01:00
|
|
|
log.info("got external ip from UPnP: %s", external_ip)
|
2019-06-23 23:54:04 +02:00
|
|
|
except (asyncio.TimeoutError, UPnPError, NotImplementedError):
|
2018-10-29 18:41:14 +01:00
|
|
|
pass
|
|
|
|
|
2019-06-23 23:54:04 +02:00
|
|
|
if external_ip == "0.0.0.0" or (external_ip and external_ip.startswith("192.")):
|
2019-06-04 16:23:04 +02:00
|
|
|
log.warning("unable to get external ip from UPnP, checking lbry.com fallback")
|
2019-03-11 02:55:33 +01:00
|
|
|
external_ip = await utils.get_external_ip()
|
2018-10-29 18:41:14 +01:00
|
|
|
if self.external_ip and self.external_ip != external_ip:
|
|
|
|
log.info("external ip changed from %s to %s", self.external_ip, external_ip)
|
2019-06-24 21:39:31 +02:00
|
|
|
if external_ip:
|
2019-06-23 23:54:04 +02:00
|
|
|
self.external_ip = external_ip
|
|
|
|
# assert self.external_ip is not None # TODO: handle going/starting offline
|
2018-10-29 18:41:14 +01:00
|
|
|
|
|
|
|
if not self.upnp_redirects and self.upnp: # setup missing redirects
|
2019-06-23 23:54:04 +02:00
|
|
|
log.info("add UPnP port mappings")
|
|
|
|
upnp_redirects = {}
|
|
|
|
if PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components:
|
|
|
|
try:
|
|
|
|
upnp_redirects["TCP"] = await self.upnp.get_next_mapping(
|
|
|
|
self._int_peer_port, "TCP", "LBRY peer port", self._int_peer_port
|
|
|
|
)
|
|
|
|
except (UPnPError, asyncio.TimeoutError, NotImplementedError):
|
|
|
|
pass
|
|
|
|
if DHT_COMPONENT not in self.component_manager.skip_components:
|
|
|
|
try:
|
|
|
|
upnp_redirects["UDP"] = await self.upnp.get_next_mapping(
|
|
|
|
self._int_dht_node_port, "UDP", "LBRY DHT port", self._int_dht_node_port
|
|
|
|
)
|
|
|
|
except (UPnPError, asyncio.TimeoutError, NotImplementedError):
|
|
|
|
pass
|
|
|
|
if upnp_redirects:
|
2018-10-29 18:41:14 +01:00
|
|
|
log.info("set up redirects: %s", upnp_redirects)
|
2018-10-18 23:41:49 +02:00
|
|
|
self.upnp_redirects.update(upnp_redirects)
|
2018-10-29 18:41:14 +01:00
|
|
|
elif self.upnp: # check existing redirects are still active
|
2018-10-18 23:41:49 +02:00
|
|
|
found = set()
|
2018-12-15 21:31:02 +01:00
|
|
|
mappings = await self.upnp.get_redirects()
|
2018-10-18 23:41:49 +02:00
|
|
|
for mapping in mappings:
|
2019-06-23 23:54:04 +02:00
|
|
|
proto = mapping.protocol
|
|
|
|
if proto in self.upnp_redirects and mapping.external_port == self.upnp_redirects[proto]:
|
|
|
|
if mapping.lan_address == self.upnp.lan_address:
|
2018-10-18 23:41:49 +02:00
|
|
|
found.add(proto)
|
2018-10-30 18:41:38 +01:00
|
|
|
if 'UDP' not in found and DHT_COMPONENT not in self.component_manager.skip_components:
|
2018-10-18 23:41:49 +02:00
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
udp_port = await self.upnp.get_next_mapping(self._int_dht_node_port, "UDP", "LBRY DHT port")
|
2018-10-18 23:41:49 +02:00
|
|
|
self.upnp_redirects['UDP'] = udp_port
|
|
|
|
log.info("refreshed upnp redirect for dht port: %i", udp_port)
|
2019-06-23 23:54:04 +02:00
|
|
|
except (asyncio.TimeoutError, UPnPError, NotImplementedError):
|
2018-10-18 23:41:49 +02:00
|
|
|
del self.upnp_redirects['UDP']
|
2018-10-30 18:41:38 +01:00
|
|
|
if 'TCP' not in found and PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components:
|
2018-10-18 23:41:49 +02:00
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
tcp_port = await self.upnp.get_next_mapping(self._int_peer_port, "TCP", "LBRY peer port")
|
2018-10-18 23:41:49 +02:00
|
|
|
self.upnp_redirects['TCP'] = tcp_port
|
|
|
|
log.info("refreshed upnp redirect for peer port: %i", tcp_port)
|
2019-06-23 23:54:04 +02:00
|
|
|
except (asyncio.TimeoutError, UPnPError, NotImplementedError):
|
2018-10-18 23:41:49 +02:00
|
|
|
del self.upnp_redirects['TCP']
|
2018-10-30 18:41:38 +01:00
|
|
|
if ('TCP' in self.upnp_redirects
|
|
|
|
and PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components) and (
|
|
|
|
'UDP' in self.upnp_redirects and DHT_COMPONENT not in self.component_manager.skip_components):
|
|
|
|
if self.upnp_redirects:
|
|
|
|
log.debug("upnp redirects are still active")
|
2018-10-18 23:41:49 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
2018-10-29 18:41:14 +01:00
|
|
|
log.info("detecting external ip")
|
2018-08-05 19:12:39 +02:00
|
|
|
if not self.use_upnp:
|
2019-03-11 02:55:33 +01:00
|
|
|
self.external_ip = await utils.get_external_ip()
|
2018-08-05 19:12:39 +02:00
|
|
|
return
|
2018-10-18 23:41:49 +02:00
|
|
|
success = False
|
2018-12-15 21:31:02 +01:00
|
|
|
await self._maintain_redirects()
|
2018-10-18 21:10:00 +02:00
|
|
|
if self.upnp:
|
2018-10-30 18:41:38 +01:00
|
|
|
if not self.upnp_redirects and not all([x in self.component_manager.skip_components for x in
|
|
|
|
(DHT_COMPONENT, PEER_PROTOCOL_SERVER_COMPONENT)]):
|
2019-06-23 23:54:04 +02:00
|
|
|
log.error("failed to setup upnp")
|
2018-10-18 21:10:00 +02:00
|
|
|
else:
|
2018-10-18 23:41:49 +02:00
|
|
|
success = True
|
2018-10-30 18:41:38 +01:00
|
|
|
if self.upnp_redirects:
|
|
|
|
log.debug("set up upnp port redirects for gateway: %s", self.upnp.gateway.manufacturer_string)
|
2018-10-18 21:10:00 +02:00
|
|
|
else:
|
|
|
|
log.error("failed to setup upnp")
|
2019-01-22 23:44:17 +01:00
|
|
|
if self.component_manager.analytics_manager:
|
2019-01-28 23:27:39 +01:00
|
|
|
await self.component_manager.analytics_manager.send_upnp_setup_success_fail(
|
|
|
|
success, await self.get_status()
|
|
|
|
)
|
2019-06-23 23:54:04 +02:00
|
|
|
self._maintain_redirects_task = self.component_manager.loop.create_task(
|
|
|
|
self._repeatedly_maintain_redirects(now=False)
|
|
|
|
)
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
|
|
|
if self.upnp_redirects:
|
2019-06-23 23:54:04 +02:00
|
|
|
log.info("Removing upnp redirects: %s", self.upnp_redirects)
|
2018-12-15 21:31:02 +01:00
|
|
|
await asyncio.wait([
|
|
|
|
self.upnp.delete_port_mapping(port, protocol) for protocol, port in self.upnp_redirects.items()
|
2019-06-23 23:54:04 +02:00
|
|
|
], loop=self.component_manager.loop)
|
2019-01-22 23:44:17 +01:00
|
|
|
if self._maintain_redirects_task and not self._maintain_redirects_task.done():
|
2019-01-07 21:35:03 +01:00
|
|
|
self._maintain_redirects_task.cancel()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2019-01-07 08:52:53 +01:00
|
|
|
async def get_status(self):
|
2018-10-18 21:10:00 +02:00
|
|
|
return {
|
2018-10-26 18:42:12 +02:00
|
|
|
'aioupnp_version': aioupnp_version,
|
2018-10-18 21:10:00 +02:00
|
|
|
'redirects': self.upnp_redirects,
|
2018-10-26 18:42:12 +02:00
|
|
|
'gateway': 'No gateway found' if not self.upnp else self.upnp.gateway.manufacturer_string,
|
2018-10-18 23:41:49 +02:00
|
|
|
'dht_redirect_set': 'UDP' in self.upnp_redirects,
|
|
|
|
'peer_redirect_set': 'TCP' in self.upnp_redirects,
|
|
|
|
'external_ip': self.external_ip
|
2018-10-18 21:10:00 +02:00
|
|
|
}
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
class ExchangeRateManagerComponent(Component):
|
|
|
|
component_name = EXCHANGE_RATE_MANAGER_COMPONENT
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2019-02-12 05:54:24 +01:00
|
|
|
super().__init__(component_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
self.exchange_rate_manager = ExchangeRateManager()
|
|
|
|
|
|
|
|
@property
|
2019-01-22 23:44:17 +01:00
|
|
|
def component(self) -> ExchangeRateManager:
|
2018-07-24 18:26:29 +02:00
|
|
|
return self.exchange_rate_manager
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
|
|
|
self.exchange_rate_manager.start()
|
2018-04-02 21:13:13 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
|
|
|
self.exchange_rate_manager.stop()
|