2018-04-02 21:13:13 +02:00
|
|
|
import os
|
2018-10-16 17:03:56 +02:00
|
|
|
import asyncio
|
2018-12-15 21:31:02 +01:00
|
|
|
import aiohttp
|
2018-04-02 21:13:13 +02:00
|
|
|
import logging
|
2018-07-25 21:32:01 +02:00
|
|
|
import treq
|
|
|
|
import math
|
2018-08-03 15:36:03 +02:00
|
|
|
import binascii
|
2018-08-05 02:20:37 +02:00
|
|
|
from hashlib import sha256
|
|
|
|
from types import SimpleNamespace
|
2019-01-07 21:35:03 +01:00
|
|
|
from twisted.internet import defer, reactor, error
|
2018-11-09 20:02:03 +01:00
|
|
|
|
2018-10-26 18:42:12 +02:00
|
|
|
from aioupnp import __version__ as aioupnp_version
|
2018-10-17 16:57:10 +02:00
|
|
|
from aioupnp.upnp import UPnP
|
2018-10-18 23:41:49 +02:00
|
|
|
from aioupnp.fault import UPnPError
|
2018-11-09 20:02:03 +01:00
|
|
|
|
|
|
|
import lbrynet.schema
|
2018-11-08 00:35:32 +01:00
|
|
|
from lbrynet import conf
|
2018-11-09 20:02:03 +01:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
from lbrynet.extras.compat import d2f
|
2018-11-09 20:02:03 +01:00
|
|
|
from lbrynet.blob.EncryptedFileManager import EncryptedFileManager
|
|
|
|
from lbrynet.blob.client.EncryptedFileDownloader import EncryptedFileSaverFactory
|
|
|
|
from lbrynet.blob.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier
|
|
|
|
from lbrynet.dht.node import Node
|
|
|
|
from lbrynet.extras.daemon.Component import Component
|
|
|
|
from lbrynet.extras.daemon.ExchangeRateManager import ExchangeRateManager
|
|
|
|
from lbrynet.extras.daemon.storage import SQLiteStorage
|
|
|
|
from lbrynet.extras.daemon.HashAnnouncer import DHTHashAnnouncer
|
|
|
|
from lbrynet.extras.reflector.server.server import ReflectorServerFactory
|
|
|
|
from lbrynet.extras.wallet import LbryWalletManager
|
|
|
|
from lbrynet.extras.wallet import Network
|
2018-12-15 21:31:02 +01:00
|
|
|
from lbrynet.utils import generate_id
|
2018-11-04 20:06:29 +01:00
|
|
|
from lbrynet.p2p.PaymentRateManager import OnlyFreePaymentsManager
|
|
|
|
from lbrynet.p2p.RateLimiter import RateLimiter
|
|
|
|
from lbrynet.p2p.BlobManager import DiskBlobManager
|
|
|
|
from lbrynet.p2p.StreamDescriptor import StreamDescriptorIdentifier, EncryptedFileStreamType
|
|
|
|
from lbrynet.p2p.server.BlobRequestHandler import BlobRequestHandlerFactory
|
|
|
|
from lbrynet.p2p.server.ServerProtocol import ServerProtocolFactory
|
2018-11-07 21:15:05 +01:00
|
|
|
|
2018-08-05 02:20:37 +02:00
|
|
|
|
2018-04-02 21:13:13 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
# settings must be initialized before this file is imported
|
|
|
|
|
|
|
|
DATABASE_COMPONENT = "database"
|
2018-07-25 21:33:43 +02:00
|
|
|
BLOB_COMPONENT = "blob_manager"
|
2018-07-25 21:32:01 +02:00
|
|
|
HEADERS_COMPONENT = "blockchain_headers"
|
2018-07-24 18:26:29 +02:00
|
|
|
WALLET_COMPONENT = "wallet"
|
|
|
|
DHT_COMPONENT = "dht"
|
|
|
|
HASH_ANNOUNCER_COMPONENT = "hash_announcer"
|
|
|
|
FILE_MANAGER_COMPONENT = "file_manager"
|
|
|
|
PEER_PROTOCOL_SERVER_COMPONENT = "peer_protocol_server"
|
|
|
|
REFLECTOR_COMPONENT = "reflector"
|
|
|
|
UPNP_COMPONENT = "upnp"
|
|
|
|
EXCHANGE_RATE_MANAGER_COMPONENT = "exchange_rate_manager"
|
2018-07-25 21:33:43 +02:00
|
|
|
RATE_LIMITER_COMPONENT = "rate_limiter"
|
|
|
|
PAYMENT_RATE_COMPONENT = "payment_rate_manager"
|
|
|
|
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def gather_dict(tasks: dict):
|
|
|
|
async def wait_value(key, value):
|
|
|
|
return key, await value
|
|
|
|
return dict(await asyncio.gather(*(
|
|
|
|
wait_value(*kv) for kv in tasks.items()
|
|
|
|
)))
|
2018-10-17 16:57:10 +02:00
|
|
|
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def get_external_ip(): # used if upnp is disabled or non-functioning
|
2018-11-27 21:56:11 +01:00
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
async with aiohttp.ClientSession() as session:
|
|
|
|
async with session.get("https://api.lbry.io/ip") as resp:
|
|
|
|
response = await resp.json()
|
|
|
|
if response['success']:
|
|
|
|
return response['data']['ip']
|
|
|
|
except Exception as e:
|
|
|
|
pass
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
class DatabaseComponent(Component):
|
|
|
|
component_name = DATABASE_COMPONENT
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
self.storage = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.storage
|
2018-04-02 21:13:13 +02:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_current_db_revision():
|
2018-07-24 18:26:29 +02:00
|
|
|
return 9
|
2018-04-02 21:13:13 +02:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_revision_filename():
|
|
|
|
return conf.settings.get_db_revision_filename()
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _write_db_revision_file(version_num):
|
|
|
|
with open(conf.settings.get_db_revision_filename(), mode='w') as db_revision:
|
|
|
|
db_revision.write(str(version_num))
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
2018-04-02 21:13:13 +02:00
|
|
|
# check directories exist, create them if they don't
|
|
|
|
log.info("Loading databases")
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
if not os.path.exists(self.get_revision_filename()):
|
2018-04-02 21:13:13 +02:00
|
|
|
log.warning("db_revision file not found. Creating it")
|
2018-07-24 18:26:29 +02:00
|
|
|
self._write_db_revision_file(self.get_current_db_revision())
|
2018-04-02 21:13:13 +02:00
|
|
|
|
|
|
|
# check the db migration and run any needed migrations
|
2018-07-24 18:26:29 +02:00
|
|
|
with open(self.get_revision_filename(), "r") as revision_read_handle:
|
2018-04-02 21:13:13 +02:00
|
|
|
old_revision = int(revision_read_handle.read().strip())
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
if old_revision > self.get_current_db_revision():
|
2018-04-02 21:13:13 +02:00
|
|
|
raise Exception('This version of lbrynet is not compatible with the database\n'
|
|
|
|
'Your database is revision %i, expected %i' %
|
2018-07-24 18:26:29 +02:00
|
|
|
(old_revision, self.get_current_db_revision()))
|
|
|
|
if old_revision < self.get_current_db_revision():
|
2018-11-04 20:00:57 +01:00
|
|
|
from lbrynet.extras.daemon.migrator import dbmigrator
|
2018-07-24 18:26:29 +02:00
|
|
|
log.info("Upgrading your databases (revision %i to %i)", old_revision, self.get_current_db_revision())
|
2018-12-15 21:31:02 +01:00
|
|
|
await asyncio.get_event_loop().run_in_executor(
|
|
|
|
None, dbmigrator.migrate_db, conf.settings.data_dir, old_revision, self.get_current_db_revision()
|
2018-04-02 21:13:13 +02:00
|
|
|
)
|
2018-07-24 18:26:29 +02:00
|
|
|
self._write_db_revision_file(self.get_current_db_revision())
|
2018-04-02 21:13:13 +02:00
|
|
|
log.info("Finished upgrading the databases.")
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
self.storage = SQLiteStorage(
|
|
|
|
os.path.join(conf.settings.data_dir, "lbrynet.sqlite")
|
|
|
|
)
|
|
|
|
await self.storage.open()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
|
|
|
await self.storage.close()
|
2018-07-24 18:26:29 +02:00
|
|
|
self.storage = None
|
|
|
|
|
|
|
|
|
2018-08-05 02:20:37 +02:00
|
|
|
HEADERS_URL = "https://headers.lbry.io/blockchain_headers_latest"
|
|
|
|
HEADER_SIZE = 112
|
|
|
|
|
|
|
|
|
2018-07-25 21:32:01 +02:00
|
|
|
class HeadersComponent(Component):
|
|
|
|
component_name = HEADERS_COMPONENT
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-24 09:06:53 +02:00
|
|
|
super().__init__(component_manager)
|
2018-11-27 21:56:11 +01:00
|
|
|
self.headers_dir = os.path.join(conf.settings.wallet_dir, 'lbc_mainnet')
|
2018-08-05 02:20:37 +02:00
|
|
|
self.headers_file = os.path.join(self.headers_dir, 'headers')
|
2018-11-27 21:56:11 +01:00
|
|
|
self.old_file = os.path.join(conf.settings.wallet_dir, 'blockchain_headers')
|
2018-07-25 21:32:01 +02:00
|
|
|
self._downloading_headers = None
|
2018-11-13 03:16:05 +01:00
|
|
|
self._headers_progress_percent = 0
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
2018-07-25 21:32:01 +02:00
|
|
|
return self
|
|
|
|
|
2019-01-07 08:52:53 +01:00
|
|
|
async def get_status(self):
|
2018-08-03 15:36:03 +02:00
|
|
|
return {} if not self._downloading_headers else {
|
2018-07-25 21:32:01 +02:00
|
|
|
'downloading_headers': self._downloading_headers,
|
|
|
|
'download_progress': self._headers_progress_percent
|
|
|
|
}
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def fetch_headers_from_s3(self):
|
2018-08-05 02:20:37 +02:00
|
|
|
def collector(data, h_file):
|
2018-08-13 20:03:15 +02:00
|
|
|
h_file.write(data)
|
|
|
|
local_size = float(h_file.tell())
|
|
|
|
final_size = float(final_size_after_download)
|
2018-08-05 02:20:37 +02:00
|
|
|
self._headers_progress_percent = math.ceil(local_size / final_size * 100)
|
2018-08-13 20:03:15 +02:00
|
|
|
|
2018-08-05 02:20:37 +02:00
|
|
|
local_header_size = self.local_header_file_size()
|
2018-10-18 12:42:45 +02:00
|
|
|
resume_header = {"Range": f"bytes={local_header_size}-"}
|
2018-08-05 02:20:37 +02:00
|
|
|
response = yield treq.get(HEADERS_URL, headers=resume_header)
|
|
|
|
got_406 = response.code == 406 # our file is bigger
|
|
|
|
final_size_after_download = response.length + local_header_size
|
|
|
|
if got_406:
|
2018-07-25 21:32:01 +02:00
|
|
|
log.warning("s3 is more out of date than we are")
|
|
|
|
# should have something to download and a final length divisible by the header size
|
|
|
|
elif final_size_after_download and not final_size_after_download % HEADER_SIZE:
|
|
|
|
s3_height = (final_size_after_download / HEADER_SIZE) - 1
|
|
|
|
local_height = self.local_header_file_height()
|
|
|
|
if s3_height > local_height:
|
|
|
|
if local_header_size:
|
|
|
|
log.info("Resuming download of %i bytes from s3", response.length)
|
2018-08-05 02:20:37 +02:00
|
|
|
with open(self.headers_file, "a+b") as headers_file:
|
|
|
|
yield treq.collect(response, lambda d: collector(d, headers_file))
|
2018-07-25 21:32:01 +02:00
|
|
|
else:
|
2018-08-05 02:20:37 +02:00
|
|
|
with open(self.headers_file, "wb") as headers_file:
|
|
|
|
yield treq.collect(response, lambda d: collector(d, headers_file))
|
2018-07-25 21:32:01 +02:00
|
|
|
log.info("fetched headers from s3 (s3 height: %i), now verifying integrity after download.", s3_height)
|
|
|
|
self._check_header_file_integrity()
|
|
|
|
else:
|
|
|
|
log.warning("s3 is more out of date than we are")
|
|
|
|
else:
|
|
|
|
log.error("invalid size for headers from s3")
|
|
|
|
|
|
|
|
def local_header_file_height(self):
|
|
|
|
return max((self.local_header_file_size() / HEADER_SIZE) - 1, 0)
|
|
|
|
|
|
|
|
def local_header_file_size(self):
|
2018-08-05 02:20:37 +02:00
|
|
|
if os.path.isfile(self.headers_file):
|
|
|
|
return os.stat(self.headers_file).st_size
|
2018-07-25 21:32:01 +02:00
|
|
|
return 0
|
|
|
|
|
2018-10-16 17:57:43 +02:00
|
|
|
async def get_remote_height(self):
|
2018-08-05 02:20:37 +02:00
|
|
|
ledger = SimpleNamespace()
|
2018-08-05 06:55:22 +02:00
|
|
|
ledger.config = {
|
|
|
|
'default_servers': conf.settings['lbryum_servers'],
|
2018-11-27 21:56:11 +01:00
|
|
|
'data_path': conf.settings.wallet_dir
|
2018-08-05 06:55:22 +02:00
|
|
|
}
|
2018-08-05 02:20:37 +02:00
|
|
|
net = Network(ledger)
|
2018-10-16 17:57:43 +02:00
|
|
|
first_connection = net.on_connected.first
|
|
|
|
asyncio.ensure_future(net.start())
|
|
|
|
await first_connection
|
|
|
|
remote_height = await net.get_server_height()
|
|
|
|
await net.stop()
|
|
|
|
return remote_height
|
|
|
|
|
|
|
|
async def should_download_headers_from_s3(self):
|
2018-07-25 21:32:01 +02:00
|
|
|
if conf.settings['blockchain_name'] != "lbrycrd_main":
|
2018-10-16 17:57:43 +02:00
|
|
|
return False
|
2018-07-25 21:32:01 +02:00
|
|
|
self._check_header_file_integrity()
|
|
|
|
s3_headers_depth = conf.settings['s3_headers_depth']
|
|
|
|
if not s3_headers_depth:
|
2018-10-16 17:57:43 +02:00
|
|
|
return False
|
2018-07-25 21:32:01 +02:00
|
|
|
local_height = self.local_header_file_height()
|
2018-10-16 17:57:43 +02:00
|
|
|
remote_height = await self.get_remote_height()
|
2018-10-09 23:39:28 +02:00
|
|
|
log.info("remote height: %i, local height: %i", remote_height, local_height)
|
2018-08-05 02:20:37 +02:00
|
|
|
if remote_height > (local_height + s3_headers_depth):
|
2018-10-16 17:57:43 +02:00
|
|
|
return True
|
|
|
|
return False
|
2018-07-25 21:32:01 +02:00
|
|
|
|
|
|
|
def _check_header_file_integrity(self):
|
|
|
|
# TODO: temporary workaround for usability. move to txlbryum and check headers instead of file integrity
|
|
|
|
if conf.settings['blockchain_name'] != "lbrycrd_main":
|
|
|
|
return
|
|
|
|
hashsum = sha256()
|
|
|
|
checksum_height, checksum = conf.settings['HEADERS_FILE_SHA256_CHECKSUM']
|
|
|
|
checksum_length_in_bytes = checksum_height * HEADER_SIZE
|
|
|
|
if self.local_header_file_size() < checksum_length_in_bytes:
|
|
|
|
return
|
2018-08-05 02:20:37 +02:00
|
|
|
with open(self.headers_file, "rb") as headers_file:
|
2018-07-25 21:32:01 +02:00
|
|
|
hashsum.update(headers_file.read(checksum_length_in_bytes))
|
|
|
|
current_checksum = hashsum.hexdigest()
|
|
|
|
if current_checksum != checksum:
|
2018-10-18 12:42:45 +02:00
|
|
|
msg = f"Expected checksum {checksum}, got {current_checksum}"
|
2018-07-25 21:32:01 +02:00
|
|
|
log.warning("Wallet file corrupted, checksum mismatch. " + msg)
|
|
|
|
log.warning("Deleting header file so it can be downloaded again.")
|
2018-08-05 02:20:37 +02:00
|
|
|
os.unlink(self.headers_file)
|
2018-07-25 21:32:01 +02:00
|
|
|
elif (self.local_header_file_size() % HEADER_SIZE) != 0:
|
|
|
|
log.warning("Header file is good up to checkpoint height, but incomplete. Truncating to checkpoint.")
|
2018-08-05 02:20:37 +02:00
|
|
|
with open(self.headers_file, "rb+") as headers_file:
|
2018-07-25 21:32:01 +02:00
|
|
|
headers_file.truncate(checksum_length_in_bytes)
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
2018-09-28 23:02:04 +02:00
|
|
|
conf.settings.ensure_wallet_dir()
|
2018-08-05 02:20:37 +02:00
|
|
|
if not os.path.exists(self.headers_dir):
|
|
|
|
os.mkdir(self.headers_dir)
|
|
|
|
if os.path.exists(self.old_file):
|
2018-08-05 02:35:04 +02:00
|
|
|
log.warning("Moving old headers from %s to %s.", self.old_file, self.headers_file)
|
2018-08-05 02:20:37 +02:00
|
|
|
os.rename(self.old_file, self.headers_file)
|
2018-12-15 21:31:02 +01:00
|
|
|
self._downloading_headers = await self.should_download_headers_from_s3()
|
2018-07-25 21:32:01 +02:00
|
|
|
if self._downloading_headers:
|
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
await d2f(self.fetch_headers_from_s3())
|
2018-07-25 21:32:01 +02:00
|
|
|
except Exception as err:
|
|
|
|
log.error("failed to fetch headers from s3: %s", err)
|
2018-08-09 15:24:04 +02:00
|
|
|
finally:
|
|
|
|
self._downloading_headers = False
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
|
|
|
pass
|
2018-10-16 17:03:56 +02:00
|
|
|
|
|
|
|
|
2018-07-25 21:32:01 +02:00
|
|
|
class WalletComponent(Component):
|
|
|
|
component_name = WALLET_COMPONENT
|
|
|
|
depends_on = [DATABASE_COMPONENT, HEADERS_COMPONENT]
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-07-25 21:32:01 +02:00
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-08-26 05:20:43 +02:00
|
|
|
self.wallet_manager = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-07-25 21:32:01 +02:00
|
|
|
@property
|
|
|
|
def component(self):
|
2018-08-26 05:20:43 +02:00
|
|
|
return self.wallet_manager
|
2018-07-25 21:32:01 +02:00
|
|
|
|
2019-01-07 08:52:53 +01:00
|
|
|
async def get_status(self):
|
2018-11-09 16:44:53 +01:00
|
|
|
if self.wallet_manager and self.running:
|
2018-08-26 05:20:43 +02:00
|
|
|
local_height = self.wallet_manager.network.get_local_height()
|
|
|
|
remote_height = self.wallet_manager.network.get_server_height()
|
2018-10-16 22:00:12 +02:00
|
|
|
best_hash = self.wallet_manager.get_best_blockhash()
|
|
|
|
return {
|
2018-08-13 20:03:15 +02:00
|
|
|
'blocks': max(local_height, 0),
|
|
|
|
'blocks_behind': max(remote_height - local_height, 0),
|
2018-08-03 15:36:03 +02:00
|
|
|
'best_blockhash': best_hash,
|
2018-08-26 05:20:43 +02:00
|
|
|
'is_encrypted': self.wallet_manager.wallet.use_encryption,
|
|
|
|
'is_locked': not self.wallet_manager.is_wallet_unlocked,
|
2018-10-16 22:00:12 +02:00
|
|
|
}
|
2018-07-25 21:32:01 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
2018-09-28 23:02:04 +02:00
|
|
|
conf.settings.ensure_wallet_dir()
|
2018-07-22 00:34:59 +02:00
|
|
|
log.info("Starting torba wallet")
|
2018-07-25 21:32:01 +02:00
|
|
|
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
2018-09-17 22:31:44 +02:00
|
|
|
lbrynet.schema.BLOCKCHAIN_NAME = conf.settings['blockchain_name']
|
2018-12-15 21:31:02 +01:00
|
|
|
self.wallet_manager = await LbryWalletManager.from_lbrynet_config(conf.settings, storage)
|
2018-08-26 05:20:43 +02:00
|
|
|
self.wallet_manager.old_db = storage
|
2018-12-15 21:31:02 +01:00
|
|
|
await self.wallet_manager.start()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
|
|
|
await self.wallet_manager.stop()
|
2018-08-26 05:20:43 +02:00
|
|
|
self.wallet_manager = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
class BlobComponent(Component):
|
|
|
|
component_name = BLOB_COMPONENT
|
2018-10-30 18:41:38 +01:00
|
|
|
depends_on = [DATABASE_COMPONENT]
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-25 21:33:43 +02:00
|
|
|
self.blob_manager = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
return self.blob_manager
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def start(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
2018-10-30 18:41:38 +01:00
|
|
|
datastore = None
|
|
|
|
if DHT_COMPONENT not in self.component_manager.skip_components:
|
|
|
|
dht_node = self.component_manager.get_component(DHT_COMPONENT)
|
|
|
|
if dht_node:
|
|
|
|
datastore = dht_node._dataStore
|
2018-11-27 21:56:11 +01:00
|
|
|
self.blob_manager = DiskBlobManager(os.path.join(conf.settings.data_dir, "blobfiles"), storage, datastore)
|
2018-07-25 21:33:43 +02:00
|
|
|
return self.blob_manager.setup()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def stop(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
return self.blob_manager.stop()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def get_status(self):
|
2018-08-03 15:36:03 +02:00
|
|
|
count = 0
|
|
|
|
if self.blob_manager:
|
2018-12-15 21:31:02 +01:00
|
|
|
count = await self.blob_manager.storage.count_finished_blobs()
|
|
|
|
return {'finished_blobs': count}
|
2018-08-02 23:33:56 +02:00
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
class DHTComponent(Component):
|
|
|
|
component_name = DHT_COMPONENT
|
|
|
|
depends_on = [UPNP_COMPONENT]
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
self.dht_node = None
|
|
|
|
self.upnp_component = None
|
2018-08-05 19:12:39 +02:00
|
|
|
self.external_udp_port = None
|
|
|
|
self.external_peer_port = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.dht_node
|
|
|
|
|
2019-01-07 08:52:53 +01:00
|
|
|
async def get_status(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
return {
|
2018-11-27 21:56:11 +01:00
|
|
|
'node_id': binascii.hexlify(conf.settings.get_node_id()),
|
2018-07-25 21:33:43 +02:00
|
|
|
'peers_in_routing_table': 0 if not self.dht_node else len(self.dht_node.contacts)
|
|
|
|
}
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
2018-07-24 18:26:29 +02:00
|
|
|
self.upnp_component = self.component_manager.get_component(UPNP_COMPONENT)
|
2018-11-27 21:56:11 +01:00
|
|
|
self.external_peer_port = self.upnp_component.upnp_redirects.get("TCP", conf.settings["peer_port"])
|
|
|
|
self.external_udp_port = self.upnp_component.upnp_redirects.get("UDP", conf.settings["dht_node_port"])
|
|
|
|
node_id = conf.settings.get_node_id()
|
2018-07-24 18:26:29 +02:00
|
|
|
if node_id is None:
|
|
|
|
node_id = generate_id()
|
2018-10-29 18:41:14 +01:00
|
|
|
external_ip = self.upnp_component.external_ip
|
|
|
|
if not external_ip:
|
|
|
|
log.warning("UPnP component failed to get external ip")
|
2018-12-15 21:31:02 +01:00
|
|
|
external_ip = await get_external_ip()
|
2018-10-29 18:41:14 +01:00
|
|
|
if not external_ip:
|
|
|
|
log.warning("failed to get external ip")
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-11-07 21:15:05 +01:00
|
|
|
self.dht_node = Node(
|
2018-07-24 18:26:29 +02:00
|
|
|
node_id=node_id,
|
2018-11-27 21:56:11 +01:00
|
|
|
udpPort=conf.settings['dht_node_port'],
|
2018-08-05 19:12:39 +02:00
|
|
|
externalUDPPort=self.external_udp_port,
|
2018-10-29 18:41:14 +01:00
|
|
|
externalIP=external_ip,
|
2018-08-05 19:12:39 +02:00
|
|
|
peerPort=self.external_peer_port
|
2018-07-24 18:26:29 +02:00
|
|
|
)
|
2018-07-23 22:13:56 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
await d2f(self.dht_node.start(conf.settings['known_dht_nodes'], block_on_join=False))
|
2018-07-23 22:13:56 +02:00
|
|
|
log.info("Started the dht")
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def stop(self):
|
2018-12-15 21:31:02 +01:00
|
|
|
return d2f(self.dht_node.stop())
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
class HashAnnouncerComponent(Component):
|
|
|
|
component_name = HASH_ANNOUNCER_COMPONENT
|
|
|
|
depends_on = [DHT_COMPONENT, DATABASE_COMPONENT]
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
self.hash_announcer = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.hash_announcer
|
|
|
|
|
2019-01-07 21:35:03 +01:00
|
|
|
async def start(self):
|
2018-07-24 18:26:29 +02:00
|
|
|
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
|
|
|
dht_node = self.component_manager.get_component(DHT_COMPONENT)
|
2018-11-07 15:54:09 +01:00
|
|
|
self.hash_announcer = DHTHashAnnouncer(dht_node, storage)
|
2018-12-15 21:31:02 +01:00
|
|
|
self.hash_announcer.start()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def stop(self):
|
2018-12-15 21:31:02 +01:00
|
|
|
self.hash_announcer.stop()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2019-01-07 08:52:53 +01:00
|
|
|
async def get_status(self):
|
2018-08-02 23:33:56 +02:00
|
|
|
return {
|
|
|
|
'announce_queue_size': 0 if not self.hash_announcer else len(self.hash_announcer.hash_queue)
|
|
|
|
}
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
class RateLimiterComponent(Component):
|
|
|
|
component_name = RATE_LIMITER_COMPONENT
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-25 21:33:43 +02:00
|
|
|
self.rate_limiter = RateLimiter()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.rate_limiter
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
self.rate_limiter.start()
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
self.rate_limiter.stop()
|
|
|
|
|
|
|
|
|
|
|
|
class PaymentRateComponent(Component):
|
|
|
|
component_name = PAYMENT_RATE_COMPONENT
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-25 21:33:43 +02:00
|
|
|
self.payment_rate_manager = OnlyFreePaymentsManager()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.payment_rate_manager
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
|
|
|
pass
|
2018-07-25 21:33:43 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
|
|
|
pass
|
2018-07-25 21:33:43 +02:00
|
|
|
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
class FileManagerComponent(Component):
|
|
|
|
component_name = FILE_MANAGER_COMPONENT
|
2018-10-30 18:41:38 +01:00
|
|
|
depends_on = [RATE_LIMITER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT, WALLET_COMPONENT,
|
|
|
|
PAYMENT_RATE_COMPONENT]
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
self.file_manager = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.file_manager
|
|
|
|
|
2019-01-07 08:52:53 +01:00
|
|
|
async def get_status(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
if not self.file_manager:
|
|
|
|
return
|
|
|
|
return {
|
2018-08-02 23:33:56 +02:00
|
|
|
'managed_files': len(self.file_manager.lbry_files)
|
2018-07-25 21:33:43 +02:00
|
|
|
}
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
def start(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
rate_limiter = self.component_manager.get_component(RATE_LIMITER_COMPONENT)
|
|
|
|
blob_manager = self.component_manager.get_component(BLOB_COMPONENT)
|
|
|
|
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
|
|
|
wallet = self.component_manager.get_component(WALLET_COMPONENT)
|
2018-10-30 18:41:38 +01:00
|
|
|
|
|
|
|
sd_identifier = StreamDescriptorIdentifier()
|
|
|
|
add_lbry_file_to_sd_identifier(sd_identifier)
|
|
|
|
file_saver_factory = EncryptedFileSaverFactory(
|
|
|
|
self.component_manager.peer_finder,
|
|
|
|
rate_limiter,
|
|
|
|
blob_manager,
|
|
|
|
storage,
|
|
|
|
wallet,
|
2018-11-27 21:56:11 +01:00
|
|
|
conf.settings.download_dir
|
2018-10-30 18:41:38 +01:00
|
|
|
)
|
2018-12-15 21:31:02 +01:00
|
|
|
sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType, file_saver_factory)
|
2018-10-30 18:41:38 +01:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
payment_rate_manager = self.component_manager.get_component(PAYMENT_RATE_COMPONENT)
|
2018-07-24 18:26:29 +02:00
|
|
|
log.info('Starting the file manager')
|
2018-10-30 18:41:38 +01:00
|
|
|
self.file_manager = EncryptedFileManager(self.component_manager.peer_finder, rate_limiter, blob_manager, wallet,
|
2018-07-25 21:33:43 +02:00
|
|
|
payment_rate_manager, storage, sd_identifier)
|
2018-12-15 21:31:02 +01:00
|
|
|
return self.file_manager.setup()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def stop(self):
|
2018-12-15 21:31:02 +01:00
|
|
|
return d2f(self.file_manager.stop())
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
class PeerProtocolServerComponent(Component):
|
|
|
|
component_name = PEER_PROTOCOL_SERVER_COMPONENT
|
2018-10-30 18:41:38 +01:00
|
|
|
depends_on = [UPNP_COMPONENT, RATE_LIMITER_COMPONENT, BLOB_COMPONENT, WALLET_COMPONENT,
|
2018-07-25 21:33:43 +02:00
|
|
|
PAYMENT_RATE_COMPONENT]
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
self.lbry_server_port = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.lbry_server_port
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
wallet = self.component_manager.get_component(WALLET_COMPONENT)
|
2018-08-05 19:12:39 +02:00
|
|
|
upnp = self.component_manager.get_component(UPNP_COMPONENT)
|
2018-11-27 21:56:11 +01:00
|
|
|
peer_port = conf.settings['peer_port']
|
2018-07-30 23:58:17 +02:00
|
|
|
query_handlers = {
|
|
|
|
handler.get_primary_query_identifier(): handler for handler in [
|
|
|
|
BlobRequestHandlerFactory(
|
|
|
|
self.component_manager.get_component(BLOB_COMPONENT),
|
|
|
|
wallet,
|
|
|
|
self.component_manager.get_component(PAYMENT_RATE_COMPONENT),
|
|
|
|
self.component_manager.analytics_manager
|
|
|
|
),
|
|
|
|
wallet.get_wallet_info_query_handler_factory(),
|
|
|
|
]
|
|
|
|
}
|
|
|
|
server_factory = ServerProtocolFactory(
|
|
|
|
self.component_manager.get_component(RATE_LIMITER_COMPONENT), query_handlers,
|
2018-10-30 18:41:38 +01:00
|
|
|
self.component_manager.peer_manager
|
2018-07-30 23:58:17 +02:00
|
|
|
)
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-07-30 23:58:17 +02:00
|
|
|
try:
|
2018-08-05 19:12:39 +02:00
|
|
|
log.info("Peer protocol listening on TCP %i (ext port %i)", peer_port,
|
|
|
|
upnp.upnp_redirects.get("TCP", peer_port))
|
2019-01-07 21:35:03 +01:00
|
|
|
self.lbry_server_port = reactor.listenTCP(peer_port, server_factory)
|
2018-07-30 23:58:17 +02:00
|
|
|
except error.CannotListenError as e:
|
|
|
|
import traceback
|
|
|
|
log.error("Couldn't bind to port %d. Visit lbry.io/faq/how-to-change-port for"
|
|
|
|
" more details.", peer_port)
|
|
|
|
log.error("%s", traceback.format_exc())
|
|
|
|
raise ValueError("%s lbrynet may already be running on your computer." % str(e))
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
2018-07-24 18:26:29 +02:00
|
|
|
if self.lbry_server_port is not None:
|
|
|
|
self.lbry_server_port, old_port = None, self.lbry_server_port
|
|
|
|
log.info('Stop listening on port %s', old_port.port)
|
2018-12-15 21:31:02 +01:00
|
|
|
await d2f(old_port.stopListening())
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
class ReflectorComponent(Component):
|
|
|
|
component_name = REFLECTOR_COMPONENT
|
2018-10-30 18:41:38 +01:00
|
|
|
depends_on = [BLOB_COMPONENT, FILE_MANAGER_COMPONENT]
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-11-27 21:56:11 +01:00
|
|
|
self.reflector_server_port = conf.settings['reflector_port']
|
2018-07-24 18:26:29 +02:00
|
|
|
self.reflector_server = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.reflector_server
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
2018-07-24 18:26:29 +02:00
|
|
|
log.info("Starting reflector server")
|
2018-07-25 21:33:43 +02:00
|
|
|
blob_manager = self.component_manager.get_component(BLOB_COMPONENT)
|
2018-07-24 18:26:29 +02:00
|
|
|
file_manager = self.component_manager.get_component(FILE_MANAGER_COMPONENT)
|
2018-11-09 20:02:03 +01:00
|
|
|
reflector_factory = ReflectorServerFactory(self.component_manager.peer_manager, blob_manager, file_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
self.reflector_server = await d2f(reactor.listenTCP(self.reflector_server_port, reflector_factory))
|
2018-07-24 18:26:29 +02:00
|
|
|
log.info('Started reflector on port %s', self.reflector_server_port)
|
|
|
|
except error.CannotListenError as e:
|
|
|
|
log.exception("Couldn't bind reflector to port %d", self.reflector_server_port)
|
2018-10-18 12:42:45 +02:00
|
|
|
raise ValueError(f"{e} lbrynet may already be running on your computer.")
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
2018-07-24 18:26:29 +02:00
|
|
|
if self.reflector_server is not None:
|
|
|
|
log.info("Stopping reflector server")
|
|
|
|
self.reflector_server, p = None, self.reflector_server
|
2018-12-15 21:31:02 +01:00
|
|
|
await d2f(p.stopListening())
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
class UPnPComponent(Component):
|
|
|
|
component_name = UPNP_COMPONENT
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-11-27 21:56:11 +01:00
|
|
|
self._int_peer_port = conf.settings['peer_port']
|
|
|
|
self._int_dht_node_port = conf.settings['dht_node_port']
|
|
|
|
self.use_upnp = conf.settings['use_upnp']
|
2018-08-05 19:12:39 +02:00
|
|
|
self.upnp = None
|
2018-07-30 23:58:17 +02:00
|
|
|
self.upnp_redirects = {}
|
2018-08-05 19:12:39 +02:00
|
|
|
self.external_ip = None
|
2019-01-07 21:35:03 +01:00
|
|
|
self._maintain_redirects_task = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self
|
|
|
|
|
2019-01-07 21:35:03 +01:00
|
|
|
async def _repeatedly_maintain_redirects(self, now=True):
|
|
|
|
while True:
|
|
|
|
if now:
|
|
|
|
await self._maintain_redirects()
|
|
|
|
await asyncio.sleep(360)
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def _maintain_redirects(self):
|
2018-10-18 23:41:49 +02:00
|
|
|
# setup the gateway if necessary
|
|
|
|
if not self.upnp:
|
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
self.upnp = await UPnP.discover()
|
2018-10-18 23:41:49 +02:00
|
|
|
log.info("found upnp gateway: %s", self.upnp.gateway.manufacturer_string)
|
|
|
|
except Exception as err:
|
|
|
|
log.warning("upnp discovery failed: %s", err)
|
2018-10-29 18:41:14 +01:00
|
|
|
self.upnp = None
|
2018-10-18 23:41:49 +02:00
|
|
|
|
|
|
|
# update the external ip
|
2018-10-29 18:41:14 +01:00
|
|
|
external_ip = None
|
|
|
|
if self.upnp:
|
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
external_ip = await self.upnp.get_external_ip()
|
2018-11-12 20:47:11 +01:00
|
|
|
if external_ip != "0.0.0.0" and not self.external_ip:
|
2018-10-29 18:41:14 +01:00
|
|
|
log.info("got external ip from UPnP: %s", external_ip)
|
|
|
|
except (asyncio.TimeoutError, UPnPError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
if external_ip == "0.0.0.0" or not external_ip:
|
|
|
|
log.warning("unable to get external ip from UPnP, checking lbry.io fallback")
|
2018-12-15 21:31:02 +01:00
|
|
|
external_ip = await get_external_ip()
|
2018-10-29 18:41:14 +01:00
|
|
|
if self.external_ip and self.external_ip != external_ip:
|
|
|
|
log.info("external ip changed from %s to %s", self.external_ip, external_ip)
|
|
|
|
self.external_ip = external_ip
|
|
|
|
assert self.external_ip is not None # TODO: handle going/starting offline
|
|
|
|
|
|
|
|
if not self.upnp_redirects and self.upnp: # setup missing redirects
|
2018-10-18 23:41:49 +02:00
|
|
|
try:
|
2018-10-29 18:41:14 +01:00
|
|
|
log.info("add UPnP port mappings")
|
2018-10-30 18:41:38 +01:00
|
|
|
d = {}
|
|
|
|
if PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components:
|
2018-12-15 21:31:02 +01:00
|
|
|
d["TCP"] = self.upnp.get_next_mapping(self._int_peer_port, "TCP", "LBRY peer port")
|
2018-10-30 18:41:38 +01:00
|
|
|
if DHT_COMPONENT not in self.component_manager.skip_components:
|
2018-12-15 21:31:02 +01:00
|
|
|
d["UDP"] = self.upnp.get_next_mapping(self._int_dht_node_port, "UDP", "LBRY DHT port")
|
|
|
|
upnp_redirects = await gather_dict(d)
|
2018-10-29 18:41:14 +01:00
|
|
|
log.info("set up redirects: %s", upnp_redirects)
|
2018-10-18 23:41:49 +02:00
|
|
|
self.upnp_redirects.update(upnp_redirects)
|
|
|
|
except (asyncio.TimeoutError, UPnPError):
|
|
|
|
self.upnp = None
|
|
|
|
return self._maintain_redirects()
|
2018-10-29 18:41:14 +01:00
|
|
|
elif self.upnp: # check existing redirects are still active
|
2018-10-18 23:41:49 +02:00
|
|
|
found = set()
|
2018-12-15 21:31:02 +01:00
|
|
|
mappings = await self.upnp.get_redirects()
|
2018-10-18 23:41:49 +02:00
|
|
|
for mapping in mappings:
|
|
|
|
proto = mapping['NewProtocol']
|
|
|
|
if proto in self.upnp_redirects and mapping['NewExternalPort'] == self.upnp_redirects[proto]:
|
|
|
|
if mapping['NewInternalClient'] == self.upnp.lan_address:
|
|
|
|
found.add(proto)
|
2018-10-30 18:41:38 +01:00
|
|
|
if 'UDP' not in found and DHT_COMPONENT not in self.component_manager.skip_components:
|
2018-10-18 23:41:49 +02:00
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
udp_port = await self.upnp.get_next_mapping(self._int_dht_node_port, "UDP", "LBRY DHT port")
|
2018-10-18 23:41:49 +02:00
|
|
|
self.upnp_redirects['UDP'] = udp_port
|
|
|
|
log.info("refreshed upnp redirect for dht port: %i", udp_port)
|
|
|
|
except (asyncio.TimeoutError, UPnPError):
|
|
|
|
del self.upnp_redirects['UDP']
|
2018-10-30 18:41:38 +01:00
|
|
|
if 'TCP' not in found and PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components:
|
2018-10-18 23:41:49 +02:00
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
tcp_port = await self.upnp.get_next_mapping(self._int_peer_port, "TCP", "LBRY peer port")
|
2018-10-18 23:41:49 +02:00
|
|
|
self.upnp_redirects['TCP'] = tcp_port
|
|
|
|
log.info("refreshed upnp redirect for peer port: %i", tcp_port)
|
|
|
|
except (asyncio.TimeoutError, UPnPError):
|
|
|
|
del self.upnp_redirects['TCP']
|
2018-10-30 18:41:38 +01:00
|
|
|
if ('TCP' in self.upnp_redirects
|
|
|
|
and PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components) and (
|
|
|
|
'UDP' in self.upnp_redirects and DHT_COMPONENT not in self.component_manager.skip_components):
|
|
|
|
if self.upnp_redirects:
|
|
|
|
log.debug("upnp redirects are still active")
|
2018-10-18 23:41:49 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
2018-10-29 18:41:14 +01:00
|
|
|
log.info("detecting external ip")
|
2018-08-05 19:12:39 +02:00
|
|
|
if not self.use_upnp:
|
2018-12-15 21:31:02 +01:00
|
|
|
self.external_ip = await get_external_ip()
|
2018-08-05 19:12:39 +02:00
|
|
|
return
|
2018-10-18 23:41:49 +02:00
|
|
|
success = False
|
2018-12-15 21:31:02 +01:00
|
|
|
await self._maintain_redirects()
|
2018-10-18 21:10:00 +02:00
|
|
|
if self.upnp:
|
2018-10-30 18:41:38 +01:00
|
|
|
if not self.upnp_redirects and not all([x in self.component_manager.skip_components for x in
|
|
|
|
(DHT_COMPONENT, PEER_PROTOCOL_SERVER_COMPONENT)]):
|
2018-10-18 21:10:00 +02:00
|
|
|
log.error("failed to setup upnp, debugging infomation: %s", self.upnp.zipped_debugging_info)
|
|
|
|
else:
|
2018-10-18 23:41:49 +02:00
|
|
|
success = True
|
2018-10-30 18:41:38 +01:00
|
|
|
if self.upnp_redirects:
|
|
|
|
log.debug("set up upnp port redirects for gateway: %s", self.upnp.gateway.manufacturer_string)
|
2018-10-18 21:10:00 +02:00
|
|
|
else:
|
|
|
|
log.error("failed to setup upnp")
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.component_manager.analytics_manager.send_upnp_setup_success_fail(success, await self.get_status())
|
2019-01-07 21:35:03 +01:00
|
|
|
self._maintain_redirects_task = asyncio.create_task(self._repeatedly_maintain_redirects(now=False))
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
|
|
|
if self.upnp_redirects:
|
|
|
|
await asyncio.wait([
|
|
|
|
self.upnp.delete_port_mapping(port, protocol) for protocol, port in self.upnp_redirects.items()
|
|
|
|
])
|
2019-01-07 21:35:03 +01:00
|
|
|
if self._maintain_redirects_task is not None and not self._maintain_redirects_task.done():
|
|
|
|
self._maintain_redirects_task.cancel()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2019-01-07 08:52:53 +01:00
|
|
|
async def get_status(self):
|
2018-10-18 21:10:00 +02:00
|
|
|
return {
|
2018-10-26 18:42:12 +02:00
|
|
|
'aioupnp_version': aioupnp_version,
|
2018-10-18 21:10:00 +02:00
|
|
|
'redirects': self.upnp_redirects,
|
2018-10-26 18:42:12 +02:00
|
|
|
'gateway': 'No gateway found' if not self.upnp else self.upnp.gateway.manufacturer_string,
|
2018-10-18 23:41:49 +02:00
|
|
|
'dht_redirect_set': 'UDP' in self.upnp_redirects,
|
|
|
|
'peer_redirect_set': 'TCP' in self.upnp_redirects,
|
|
|
|
'external_ip': self.external_ip
|
2018-10-18 21:10:00 +02:00
|
|
|
}
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
class ExchangeRateManagerComponent(Component):
|
|
|
|
component_name = EXCHANGE_RATE_MANAGER_COMPONENT
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
|
|
|
Component.__init__(self, component_manager)
|
|
|
|
self.exchange_rate_manager = ExchangeRateManager()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.exchange_rate_manager
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def start(self):
|
|
|
|
self.exchange_rate_manager.start()
|
2018-04-02 21:13:13 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def stop(self):
|
|
|
|
self.exchange_rate_manager.stop()
|