2018-04-02 21:13:13 +02:00
|
|
|
import os
|
2018-10-16 17:03:56 +02:00
|
|
|
import asyncio
|
2018-04-02 21:13:13 +02:00
|
|
|
import logging
|
2018-07-25 21:32:01 +02:00
|
|
|
import treq
|
2018-11-08 00:35:32 +01:00
|
|
|
import json
|
2018-07-25 21:32:01 +02:00
|
|
|
import math
|
2018-08-03 15:36:03 +02:00
|
|
|
import binascii
|
2018-08-05 02:20:37 +02:00
|
|
|
from hashlib import sha256
|
|
|
|
from types import SimpleNamespace
|
2018-10-18 23:41:49 +02:00
|
|
|
from twisted.internet import defer, threads, reactor, error, task
|
2018-09-17 22:31:44 +02:00
|
|
|
import lbrynet.schema
|
2018-10-26 18:42:12 +02:00
|
|
|
from aioupnp import __version__ as aioupnp_version
|
2018-10-17 16:57:10 +02:00
|
|
|
from aioupnp.upnp import UPnP
|
2018-10-18 23:41:49 +02:00
|
|
|
from aioupnp.fault import UPnPError
|
2018-11-08 00:35:32 +01:00
|
|
|
from lbrynet import conf
|
2018-11-07 21:15:05 +01:00
|
|
|
from lbrynet.utils import DeferredDict, generate_id
|
2018-11-04 20:06:29 +01:00
|
|
|
from lbrynet.p2p.PaymentRateManager import OnlyFreePaymentsManager
|
|
|
|
from lbrynet.p2p.RateLimiter import RateLimiter
|
|
|
|
from lbrynet.p2p.BlobManager import DiskBlobManager
|
|
|
|
from lbrynet.p2p.StreamDescriptor import StreamDescriptorIdentifier, EncryptedFileStreamType
|
2018-11-04 10:42:47 +01:00
|
|
|
from lbrynet.extras.wallet import LbryWalletManager
|
|
|
|
from lbrynet.extras.wallet import Network
|
2018-11-04 20:06:29 +01:00
|
|
|
from lbrynet.p2p.server.BlobRequestHandler import BlobRequestHandlerFactory
|
|
|
|
from lbrynet.p2p.server.ServerProtocol import ServerProtocolFactory
|
2018-11-04 19:44:17 +01:00
|
|
|
from .Component import Component
|
|
|
|
from .ExchangeRateManager import ExchangeRateManager
|
2018-11-07 21:15:05 +01:00
|
|
|
from .storage import SQLiteStorage
|
|
|
|
from .HashAnnouncer import DHTHashAnnouncer
|
|
|
|
|
|
|
|
from lbrynet.dht.node import Node
|
2018-11-04 20:55:01 +01:00
|
|
|
from lbrynet.blob.EncryptedFileManager import EncryptedFileManager
|
|
|
|
from lbrynet.blob.client.EncryptedFileDownloader import EncryptedFileSaverFactory
|
|
|
|
from lbrynet.blob.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier
|
2018-11-04 19:24:46 +01:00
|
|
|
from lbrynet.extras.reflector import ServerFactory as reflector_server_factory
|
2018-08-05 02:20:37 +02:00
|
|
|
|
2018-04-02 21:13:13 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
# settings must be initialized before this file is imported
|
|
|
|
|
|
|
|
DATABASE_COMPONENT = "database"
|
2018-07-25 21:33:43 +02:00
|
|
|
BLOB_COMPONENT = "blob_manager"
|
2018-07-25 21:32:01 +02:00
|
|
|
HEADERS_COMPONENT = "blockchain_headers"
|
2018-07-24 18:26:29 +02:00
|
|
|
WALLET_COMPONENT = "wallet"
|
|
|
|
DHT_COMPONENT = "dht"
|
|
|
|
HASH_ANNOUNCER_COMPONENT = "hash_announcer"
|
|
|
|
FILE_MANAGER_COMPONENT = "file_manager"
|
|
|
|
PEER_PROTOCOL_SERVER_COMPONENT = "peer_protocol_server"
|
|
|
|
REFLECTOR_COMPONENT = "reflector"
|
|
|
|
UPNP_COMPONENT = "upnp"
|
|
|
|
EXCHANGE_RATE_MANAGER_COMPONENT = "exchange_rate_manager"
|
2018-07-25 21:33:43 +02:00
|
|
|
RATE_LIMITER_COMPONENT = "rate_limiter"
|
|
|
|
PAYMENT_RATE_COMPONENT = "payment_rate_manager"
|
|
|
|
|
|
|
|
|
2018-10-17 16:57:10 +02:00
|
|
|
def from_future(coroutine: asyncio.coroutine) -> defer.Deferred:
|
|
|
|
return defer.Deferred.fromFuture(asyncio.ensure_future(coroutine))
|
|
|
|
|
|
|
|
|
2018-07-25 21:32:01 +02:00
|
|
|
def get_wallet_config():
|
|
|
|
wallet_type = GCS('wallet')
|
|
|
|
if wallet_type == conf.LBRYCRD_WALLET:
|
|
|
|
raise ValueError('LBRYcrd Wallet is no longer supported')
|
|
|
|
elif wallet_type != conf.LBRYUM_WALLET:
|
2018-10-18 12:42:45 +02:00
|
|
|
raise ValueError(f'Wallet Type {wallet_type} is not valid')
|
2018-07-25 21:32:01 +02:00
|
|
|
lbryum_servers = {address: {'t': str(port)}
|
|
|
|
for address, port in GCS('lbryum_servers')}
|
|
|
|
config = {
|
|
|
|
'auto_connect': True,
|
|
|
|
'chain': GCS('blockchain_name'),
|
|
|
|
'default_servers': lbryum_servers
|
|
|
|
}
|
|
|
|
if 'use_keyring' in conf.settings:
|
|
|
|
config['use_keyring'] = GCS('use_keyring')
|
|
|
|
if conf.settings['lbryum_wallet_dir']:
|
|
|
|
config['lbryum_path'] = GCS('lbryum_wallet_dir')
|
|
|
|
return config
|
2018-04-02 21:13:13 +02:00
|
|
|
|
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class ConfigSettings:
|
2018-07-24 18:26:29 +02:00
|
|
|
@staticmethod
|
|
|
|
def get_conf_setting(setting_name):
|
|
|
|
return conf.settings[setting_name]
|
2018-04-02 21:13:13 +02:00
|
|
|
|
|
|
|
@staticmethod
|
2018-07-24 18:26:29 +02:00
|
|
|
def get_blobfiles_dir():
|
|
|
|
if conf.settings['BLOBFILES_DIR'] == "blobfiles":
|
|
|
|
return os.path.join(GCS("data_dir"), "blobfiles")
|
|
|
|
else:
|
|
|
|
log.info("Using non-default blobfiles directory: %s", conf.settings['BLOBFILES_DIR'])
|
|
|
|
return conf.settings['BLOBFILES_DIR']
|
2018-04-02 21:13:13 +02:00
|
|
|
|
|
|
|
@staticmethod
|
2018-07-24 18:26:29 +02:00
|
|
|
def get_node_id():
|
|
|
|
return conf.settings.node_id
|
2018-04-02 21:13:13 +02:00
|
|
|
|
|
|
|
@staticmethod
|
2018-11-08 00:35:32 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_external_ip(): # used if upnp is disabled or non-functioning
|
|
|
|
try:
|
|
|
|
buf = []
|
|
|
|
response = yield treq.get("https://api.lbry.io/ip")
|
|
|
|
yield treq.collect(response, buf.append)
|
|
|
|
parsed = json.loads(b"".join(buf).decode())
|
|
|
|
if parsed['success']:
|
|
|
|
return parsed['data']['ip']
|
|
|
|
return
|
|
|
|
except Exception as err:
|
|
|
|
return
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
# Shorthand for common ConfigSettings methods
|
|
|
|
CS = ConfigSettings
|
|
|
|
GCS = ConfigSettings.get_conf_setting
|
|
|
|
|
|
|
|
|
|
|
|
class DatabaseComponent(Component):
|
|
|
|
component_name = DATABASE_COMPONENT
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
self.storage = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.storage
|
2018-04-02 21:13:13 +02:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_current_db_revision():
|
2018-07-24 18:26:29 +02:00
|
|
|
return 9
|
2018-04-02 21:13:13 +02:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_revision_filename():
|
|
|
|
return conf.settings.get_db_revision_filename()
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _write_db_revision_file(version_num):
|
|
|
|
with open(conf.settings.get_db_revision_filename(), mode='w') as db_revision:
|
|
|
|
db_revision.write(str(version_num))
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2018-07-24 18:26:29 +02:00
|
|
|
def start(self):
|
2018-04-02 21:13:13 +02:00
|
|
|
# check directories exist, create them if they don't
|
|
|
|
log.info("Loading databases")
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
if not os.path.exists(GCS('download_directory')):
|
|
|
|
os.mkdir(GCS('download_directory'))
|
|
|
|
|
|
|
|
if not os.path.exists(GCS('data_dir')):
|
|
|
|
os.mkdir(GCS('data_dir'))
|
|
|
|
self._write_db_revision_file(self.get_current_db_revision())
|
|
|
|
log.debug("Created the db revision file: %s", self.get_revision_filename())
|
|
|
|
|
|
|
|
if not os.path.exists(CS.get_blobfiles_dir()):
|
|
|
|
os.mkdir(CS.get_blobfiles_dir())
|
|
|
|
log.debug("Created the blobfile directory: %s", str(CS.get_blobfiles_dir()))
|
|
|
|
|
|
|
|
if not os.path.exists(self.get_revision_filename()):
|
2018-04-02 21:13:13 +02:00
|
|
|
log.warning("db_revision file not found. Creating it")
|
2018-07-24 18:26:29 +02:00
|
|
|
self._write_db_revision_file(self.get_current_db_revision())
|
2018-04-02 21:13:13 +02:00
|
|
|
|
|
|
|
# check the db migration and run any needed migrations
|
2018-07-24 18:26:29 +02:00
|
|
|
with open(self.get_revision_filename(), "r") as revision_read_handle:
|
2018-04-02 21:13:13 +02:00
|
|
|
old_revision = int(revision_read_handle.read().strip())
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
if old_revision > self.get_current_db_revision():
|
2018-04-02 21:13:13 +02:00
|
|
|
raise Exception('This version of lbrynet is not compatible with the database\n'
|
|
|
|
'Your database is revision %i, expected %i' %
|
2018-07-24 18:26:29 +02:00
|
|
|
(old_revision, self.get_current_db_revision()))
|
|
|
|
if old_revision < self.get_current_db_revision():
|
2018-11-04 20:00:57 +01:00
|
|
|
from lbrynet.extras.daemon.migrator import dbmigrator
|
2018-07-24 18:26:29 +02:00
|
|
|
log.info("Upgrading your databases (revision %i to %i)", old_revision, self.get_current_db_revision())
|
2018-04-02 21:13:13 +02:00
|
|
|
yield threads.deferToThread(
|
2018-07-24 18:26:29 +02:00
|
|
|
dbmigrator.migrate_db, GCS('data_dir'), old_revision, self.get_current_db_revision()
|
2018-04-02 21:13:13 +02:00
|
|
|
)
|
2018-07-24 18:26:29 +02:00
|
|
|
self._write_db_revision_file(self.get_current_db_revision())
|
2018-04-02 21:13:13 +02:00
|
|
|
log.info("Finished upgrading the databases.")
|
|
|
|
|
|
|
|
# start SQLiteStorage
|
2018-07-24 18:26:29 +02:00
|
|
|
self.storage = SQLiteStorage(GCS('data_dir'))
|
|
|
|
yield self.storage.setup()
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def stop(self):
|
|
|
|
yield self.storage.stop()
|
|
|
|
self.storage = None
|
|
|
|
|
|
|
|
|
2018-08-05 02:20:37 +02:00
|
|
|
HEADERS_URL = "https://headers.lbry.io/blockchain_headers_latest"
|
|
|
|
HEADER_SIZE = 112
|
|
|
|
|
|
|
|
|
2018-07-25 21:32:01 +02:00
|
|
|
class HeadersComponent(Component):
|
|
|
|
component_name = HEADERS_COMPONENT
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-24 09:06:53 +02:00
|
|
|
super().__init__(component_manager)
|
2018-08-05 02:20:37 +02:00
|
|
|
self.headers_dir = os.path.join(conf.settings['lbryum_wallet_dir'], 'lbc_mainnet')
|
|
|
|
self.headers_file = os.path.join(self.headers_dir, 'headers')
|
|
|
|
self.old_file = os.path.join(conf.settings['lbryum_wallet_dir'], 'blockchain_headers')
|
2018-07-25 21:32:01 +02:00
|
|
|
self._downloading_headers = None
|
|
|
|
self._headers_progress_percent = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
2018-07-25 21:32:01 +02:00
|
|
|
return self
|
|
|
|
|
|
|
|
def get_status(self):
|
2018-08-03 15:36:03 +02:00
|
|
|
return {} if not self._downloading_headers else {
|
2018-07-25 21:32:01 +02:00
|
|
|
'downloading_headers': self._downloading_headers,
|
|
|
|
'download_progress': self._headers_progress_percent
|
|
|
|
}
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def fetch_headers_from_s3(self):
|
2018-08-05 02:20:37 +02:00
|
|
|
def collector(data, h_file):
|
2018-08-13 20:03:15 +02:00
|
|
|
h_file.write(data)
|
|
|
|
local_size = float(h_file.tell())
|
|
|
|
final_size = float(final_size_after_download)
|
2018-08-05 02:20:37 +02:00
|
|
|
self._headers_progress_percent = math.ceil(local_size / final_size * 100)
|
2018-08-13 20:03:15 +02:00
|
|
|
|
2018-08-05 02:20:37 +02:00
|
|
|
local_header_size = self.local_header_file_size()
|
2018-10-18 12:42:45 +02:00
|
|
|
resume_header = {"Range": f"bytes={local_header_size}-"}
|
2018-08-05 02:20:37 +02:00
|
|
|
response = yield treq.get(HEADERS_URL, headers=resume_header)
|
|
|
|
got_406 = response.code == 406 # our file is bigger
|
|
|
|
final_size_after_download = response.length + local_header_size
|
|
|
|
if got_406:
|
2018-07-25 21:32:01 +02:00
|
|
|
log.warning("s3 is more out of date than we are")
|
|
|
|
# should have something to download and a final length divisible by the header size
|
|
|
|
elif final_size_after_download and not final_size_after_download % HEADER_SIZE:
|
|
|
|
s3_height = (final_size_after_download / HEADER_SIZE) - 1
|
|
|
|
local_height = self.local_header_file_height()
|
|
|
|
if s3_height > local_height:
|
|
|
|
if local_header_size:
|
|
|
|
log.info("Resuming download of %i bytes from s3", response.length)
|
2018-08-05 02:20:37 +02:00
|
|
|
with open(self.headers_file, "a+b") as headers_file:
|
|
|
|
yield treq.collect(response, lambda d: collector(d, headers_file))
|
2018-07-25 21:32:01 +02:00
|
|
|
else:
|
2018-08-05 02:20:37 +02:00
|
|
|
with open(self.headers_file, "wb") as headers_file:
|
|
|
|
yield treq.collect(response, lambda d: collector(d, headers_file))
|
2018-07-25 21:32:01 +02:00
|
|
|
log.info("fetched headers from s3 (s3 height: %i), now verifying integrity after download.", s3_height)
|
|
|
|
self._check_header_file_integrity()
|
|
|
|
else:
|
|
|
|
log.warning("s3 is more out of date than we are")
|
|
|
|
else:
|
|
|
|
log.error("invalid size for headers from s3")
|
|
|
|
|
|
|
|
def local_header_file_height(self):
|
|
|
|
return max((self.local_header_file_size() / HEADER_SIZE) - 1, 0)
|
|
|
|
|
|
|
|
def local_header_file_size(self):
|
2018-08-05 02:20:37 +02:00
|
|
|
if os.path.isfile(self.headers_file):
|
|
|
|
return os.stat(self.headers_file).st_size
|
2018-07-25 21:32:01 +02:00
|
|
|
return 0
|
|
|
|
|
2018-10-16 17:57:43 +02:00
|
|
|
async def get_remote_height(self):
|
2018-08-05 02:20:37 +02:00
|
|
|
ledger = SimpleNamespace()
|
2018-08-05 06:55:22 +02:00
|
|
|
ledger.config = {
|
|
|
|
'default_servers': conf.settings['lbryum_servers'],
|
|
|
|
'data_path': conf.settings['lbryum_wallet_dir']
|
|
|
|
}
|
2018-08-05 02:20:37 +02:00
|
|
|
net = Network(ledger)
|
2018-10-16 17:57:43 +02:00
|
|
|
first_connection = net.on_connected.first
|
|
|
|
asyncio.ensure_future(net.start())
|
|
|
|
await first_connection
|
|
|
|
remote_height = await net.get_server_height()
|
|
|
|
await net.stop()
|
|
|
|
return remote_height
|
|
|
|
|
|
|
|
async def should_download_headers_from_s3(self):
|
2018-07-25 21:32:01 +02:00
|
|
|
if conf.settings['blockchain_name'] != "lbrycrd_main":
|
2018-10-16 17:57:43 +02:00
|
|
|
return False
|
2018-07-25 21:32:01 +02:00
|
|
|
self._check_header_file_integrity()
|
|
|
|
s3_headers_depth = conf.settings['s3_headers_depth']
|
|
|
|
if not s3_headers_depth:
|
2018-10-16 17:57:43 +02:00
|
|
|
return False
|
2018-07-25 21:32:01 +02:00
|
|
|
local_height = self.local_header_file_height()
|
2018-10-16 17:57:43 +02:00
|
|
|
remote_height = await self.get_remote_height()
|
2018-10-09 23:39:28 +02:00
|
|
|
log.info("remote height: %i, local height: %i", remote_height, local_height)
|
2018-08-05 02:20:37 +02:00
|
|
|
if remote_height > (local_height + s3_headers_depth):
|
2018-10-16 17:57:43 +02:00
|
|
|
return True
|
|
|
|
return False
|
2018-07-25 21:32:01 +02:00
|
|
|
|
|
|
|
def _check_header_file_integrity(self):
|
|
|
|
# TODO: temporary workaround for usability. move to txlbryum and check headers instead of file integrity
|
|
|
|
if conf.settings['blockchain_name'] != "lbrycrd_main":
|
|
|
|
return
|
|
|
|
hashsum = sha256()
|
|
|
|
checksum_height, checksum = conf.settings['HEADERS_FILE_SHA256_CHECKSUM']
|
|
|
|
checksum_length_in_bytes = checksum_height * HEADER_SIZE
|
|
|
|
if self.local_header_file_size() < checksum_length_in_bytes:
|
|
|
|
return
|
2018-08-05 02:20:37 +02:00
|
|
|
with open(self.headers_file, "rb") as headers_file:
|
2018-07-25 21:32:01 +02:00
|
|
|
hashsum.update(headers_file.read(checksum_length_in_bytes))
|
|
|
|
current_checksum = hashsum.hexdigest()
|
|
|
|
if current_checksum != checksum:
|
2018-10-18 12:42:45 +02:00
|
|
|
msg = f"Expected checksum {checksum}, got {current_checksum}"
|
2018-07-25 21:32:01 +02:00
|
|
|
log.warning("Wallet file corrupted, checksum mismatch. " + msg)
|
|
|
|
log.warning("Deleting header file so it can be downloaded again.")
|
2018-08-05 02:20:37 +02:00
|
|
|
os.unlink(self.headers_file)
|
2018-07-25 21:32:01 +02:00
|
|
|
elif (self.local_header_file_size() % HEADER_SIZE) != 0:
|
|
|
|
log.warning("Header file is good up to checkpoint height, but incomplete. Truncating to checkpoint.")
|
2018-08-05 02:20:37 +02:00
|
|
|
with open(self.headers_file, "rb+") as headers_file:
|
2018-07-25 21:32:01 +02:00
|
|
|
headers_file.truncate(checksum_length_in_bytes)
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def start(self):
|
2018-09-28 23:02:04 +02:00
|
|
|
conf.settings.ensure_wallet_dir()
|
2018-08-05 02:20:37 +02:00
|
|
|
if not os.path.exists(self.headers_dir):
|
|
|
|
os.mkdir(self.headers_dir)
|
|
|
|
if os.path.exists(self.old_file):
|
2018-08-05 02:35:04 +02:00
|
|
|
log.warning("Moving old headers from %s to %s.", self.old_file, self.headers_file)
|
2018-08-05 02:20:37 +02:00
|
|
|
os.rename(self.old_file, self.headers_file)
|
2018-10-16 17:57:43 +02:00
|
|
|
self._downloading_headers = yield f2d(self.should_download_headers_from_s3())
|
2018-07-25 21:32:01 +02:00
|
|
|
if self._downloading_headers:
|
|
|
|
try:
|
|
|
|
yield self.fetch_headers_from_s3()
|
|
|
|
except Exception as err:
|
|
|
|
log.error("failed to fetch headers from s3: %s", err)
|
2018-08-09 15:24:04 +02:00
|
|
|
finally:
|
|
|
|
self._downloading_headers = False
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-07-25 21:32:01 +02:00
|
|
|
def stop(self):
|
|
|
|
return defer.succeed(None)
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
|
2018-10-16 17:03:56 +02:00
|
|
|
def d2f(deferred):
|
|
|
|
return deferred.asFuture(asyncio.get_event_loop())
|
|
|
|
|
|
|
|
|
|
|
|
def f2d(future):
|
2018-10-16 18:13:27 +02:00
|
|
|
return defer.Deferred.fromFuture(asyncio.ensure_future(future))
|
2018-10-16 17:03:56 +02:00
|
|
|
|
|
|
|
|
2018-07-25 21:32:01 +02:00
|
|
|
class WalletComponent(Component):
|
|
|
|
component_name = WALLET_COMPONENT
|
|
|
|
depends_on = [DATABASE_COMPONENT, HEADERS_COMPONENT]
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-07-25 21:32:01 +02:00
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-08-26 05:20:43 +02:00
|
|
|
self.wallet_manager = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-07-25 21:32:01 +02:00
|
|
|
@property
|
|
|
|
def component(self):
|
2018-08-26 05:20:43 +02:00
|
|
|
return self.wallet_manager
|
2018-07-25 21:32:01 +02:00
|
|
|
|
|
|
|
def get_status(self):
|
2018-11-09 16:44:53 +01:00
|
|
|
if self.wallet_manager and self.running:
|
2018-08-26 05:20:43 +02:00
|
|
|
local_height = self.wallet_manager.network.get_local_height()
|
|
|
|
remote_height = self.wallet_manager.network.get_server_height()
|
2018-10-16 22:00:12 +02:00
|
|
|
best_hash = self.wallet_manager.get_best_blockhash()
|
|
|
|
return {
|
2018-08-13 20:03:15 +02:00
|
|
|
'blocks': max(local_height, 0),
|
|
|
|
'blocks_behind': max(remote_height - local_height, 0),
|
2018-08-03 15:36:03 +02:00
|
|
|
'best_blockhash': best_hash,
|
2018-08-26 05:20:43 +02:00
|
|
|
'is_encrypted': self.wallet_manager.wallet.use_encryption,
|
|
|
|
'is_locked': not self.wallet_manager.is_wallet_unlocked,
|
2018-10-16 22:00:12 +02:00
|
|
|
}
|
2018-07-25 21:32:01 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def start(self):
|
2018-09-28 23:02:04 +02:00
|
|
|
conf.settings.ensure_wallet_dir()
|
2018-07-22 00:34:59 +02:00
|
|
|
log.info("Starting torba wallet")
|
2018-07-25 21:32:01 +02:00
|
|
|
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
2018-09-17 22:31:44 +02:00
|
|
|
lbrynet.schema.BLOCKCHAIN_NAME = conf.settings['blockchain_name']
|
2018-10-16 17:03:56 +02:00
|
|
|
self.wallet_manager = yield f2d(LbryWalletManager.from_lbrynet_config(conf.settings, storage))
|
2018-08-26 05:20:43 +02:00
|
|
|
self.wallet_manager.old_db = storage
|
2018-10-16 17:03:56 +02:00
|
|
|
yield f2d(self.wallet_manager.start())
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def stop(self):
|
2018-10-16 17:03:56 +02:00
|
|
|
yield f2d(self.wallet_manager.stop())
|
2018-08-26 05:20:43 +02:00
|
|
|
self.wallet_manager = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
class BlobComponent(Component):
|
|
|
|
component_name = BLOB_COMPONENT
|
2018-10-30 18:41:38 +01:00
|
|
|
depends_on = [DATABASE_COMPONENT]
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-25 21:33:43 +02:00
|
|
|
self.blob_manager = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
return self.blob_manager
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def start(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
2018-10-30 18:41:38 +01:00
|
|
|
datastore = None
|
|
|
|
if DHT_COMPONENT not in self.component_manager.skip_components:
|
|
|
|
dht_node = self.component_manager.get_component(DHT_COMPONENT)
|
|
|
|
if dht_node:
|
|
|
|
datastore = dht_node._dataStore
|
|
|
|
self.blob_manager = DiskBlobManager(CS.get_blobfiles_dir(), storage, datastore)
|
2018-07-25 21:33:43 +02:00
|
|
|
return self.blob_manager.setup()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def stop(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
return self.blob_manager.stop()
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-08-02 23:33:56 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_status(self):
|
2018-08-03 15:36:03 +02:00
|
|
|
count = 0
|
|
|
|
if self.blob_manager:
|
2018-08-02 23:33:56 +02:00
|
|
|
count = yield self.blob_manager.storage.count_finished_blobs()
|
|
|
|
defer.returnValue({
|
|
|
|
'finished_blobs': count
|
|
|
|
})
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
class DHTComponent(Component):
|
|
|
|
component_name = DHT_COMPONENT
|
|
|
|
depends_on = [UPNP_COMPONENT]
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
self.dht_node = None
|
|
|
|
self.upnp_component = None
|
2018-08-05 19:12:39 +02:00
|
|
|
self.external_udp_port = None
|
|
|
|
self.external_peer_port = None
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.dht_node
|
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
def get_status(self):
|
|
|
|
return {
|
2018-08-03 15:36:03 +02:00
|
|
|
'node_id': binascii.hexlify(CS.get_node_id()),
|
2018-07-25 21:33:43 +02:00
|
|
|
'peers_in_routing_table': 0 if not self.dht_node else len(self.dht_node.contacts)
|
|
|
|
}
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def start(self):
|
|
|
|
self.upnp_component = self.component_manager.get_component(UPNP_COMPONENT)
|
2018-08-05 19:12:39 +02:00
|
|
|
self.external_peer_port = self.upnp_component.upnp_redirects.get("TCP", GCS("peer_port"))
|
|
|
|
self.external_udp_port = self.upnp_component.upnp_redirects.get("UDP", GCS("dht_node_port"))
|
2018-07-24 18:26:29 +02:00
|
|
|
node_id = CS.get_node_id()
|
|
|
|
if node_id is None:
|
|
|
|
node_id = generate_id()
|
2018-10-29 18:41:14 +01:00
|
|
|
external_ip = self.upnp_component.external_ip
|
|
|
|
if not external_ip:
|
|
|
|
log.warning("UPnP component failed to get external ip")
|
2018-11-08 00:35:32 +01:00
|
|
|
external_ip = yield CS.get_external_ip()
|
2018-10-29 18:41:14 +01:00
|
|
|
if not external_ip:
|
|
|
|
log.warning("failed to get external ip")
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-11-07 21:15:05 +01:00
|
|
|
self.dht_node = Node(
|
2018-07-24 18:26:29 +02:00
|
|
|
node_id=node_id,
|
2018-08-05 19:12:39 +02:00
|
|
|
udpPort=GCS('dht_node_port'),
|
|
|
|
externalUDPPort=self.external_udp_port,
|
2018-10-29 18:41:14 +01:00
|
|
|
externalIP=external_ip,
|
2018-08-05 19:12:39 +02:00
|
|
|
peerPort=self.external_peer_port
|
2018-07-24 18:26:29 +02:00
|
|
|
)
|
2018-07-23 22:13:56 +02:00
|
|
|
|
2018-11-08 00:35:00 +01:00
|
|
|
yield self.dht_node.start(GCS('known_dht_nodes'), block_on_join=False)
|
2018-07-23 22:13:56 +02:00
|
|
|
log.info("Started the dht")
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def stop(self):
|
|
|
|
yield self.dht_node.stop()
|
|
|
|
|
|
|
|
|
|
|
|
class HashAnnouncerComponent(Component):
|
|
|
|
component_name = HASH_ANNOUNCER_COMPONENT
|
|
|
|
depends_on = [DHT_COMPONENT, DATABASE_COMPONENT]
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
self.hash_announcer = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.hash_announcer
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def start(self):
|
|
|
|
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
|
|
|
dht_node = self.component_manager.get_component(DHT_COMPONENT)
|
2018-11-07 15:54:09 +01:00
|
|
|
self.hash_announcer = DHTHashAnnouncer(dht_node, storage)
|
2018-07-24 18:26:29 +02:00
|
|
|
yield self.hash_announcer.start()
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def stop(self):
|
|
|
|
yield self.hash_announcer.stop()
|
|
|
|
|
2018-08-02 23:33:56 +02:00
|
|
|
def get_status(self):
|
|
|
|
return {
|
|
|
|
'announce_queue_size': 0 if not self.hash_announcer else len(self.hash_announcer.hash_queue)
|
|
|
|
}
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
class RateLimiterComponent(Component):
|
|
|
|
component_name = RATE_LIMITER_COMPONENT
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-25 21:33:43 +02:00
|
|
|
self.rate_limiter = RateLimiter()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.rate_limiter
|
|
|
|
|
|
|
|
def start(self):
|
|
|
|
self.rate_limiter.start()
|
|
|
|
return defer.succeed(None)
|
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
self.rate_limiter.stop()
|
|
|
|
return defer.succeed(None)
|
|
|
|
|
|
|
|
|
|
|
|
class PaymentRateComponent(Component):
|
|
|
|
component_name = PAYMENT_RATE_COMPONENT
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-25 21:33:43 +02:00
|
|
|
self.payment_rate_manager = OnlyFreePaymentsManager()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.payment_rate_manager
|
|
|
|
|
|
|
|
def start(self):
|
|
|
|
return defer.succeed(None)
|
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
return defer.succeed(None)
|
|
|
|
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
class FileManagerComponent(Component):
|
|
|
|
component_name = FILE_MANAGER_COMPONENT
|
2018-10-30 18:41:38 +01:00
|
|
|
depends_on = [RATE_LIMITER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT, WALLET_COMPONENT,
|
|
|
|
PAYMENT_RATE_COMPONENT]
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
self.file_manager = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.file_manager
|
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
def get_status(self):
|
|
|
|
if not self.file_manager:
|
|
|
|
return
|
|
|
|
return {
|
2018-08-02 23:33:56 +02:00
|
|
|
'managed_files': len(self.file_manager.lbry_files)
|
2018-07-25 21:33:43 +02:00
|
|
|
}
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def start(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
rate_limiter = self.component_manager.get_component(RATE_LIMITER_COMPONENT)
|
|
|
|
blob_manager = self.component_manager.get_component(BLOB_COMPONENT)
|
|
|
|
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
|
|
|
wallet = self.component_manager.get_component(WALLET_COMPONENT)
|
2018-10-30 18:41:38 +01:00
|
|
|
|
|
|
|
sd_identifier = StreamDescriptorIdentifier()
|
|
|
|
add_lbry_file_to_sd_identifier(sd_identifier)
|
|
|
|
file_saver_factory = EncryptedFileSaverFactory(
|
|
|
|
self.component_manager.peer_finder,
|
|
|
|
rate_limiter,
|
|
|
|
blob_manager,
|
|
|
|
storage,
|
|
|
|
wallet,
|
|
|
|
GCS('download_directory')
|
|
|
|
)
|
|
|
|
yield sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType, file_saver_factory)
|
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
payment_rate_manager = self.component_manager.get_component(PAYMENT_RATE_COMPONENT)
|
2018-07-24 18:26:29 +02:00
|
|
|
log.info('Starting the file manager')
|
2018-10-30 18:41:38 +01:00
|
|
|
self.file_manager = EncryptedFileManager(self.component_manager.peer_finder, rate_limiter, blob_manager, wallet,
|
2018-07-25 21:33:43 +02:00
|
|
|
payment_rate_manager, storage, sd_identifier)
|
2018-07-24 18:26:29 +02:00
|
|
|
yield self.file_manager.setup()
|
|
|
|
log.info('Done setting up file manager')
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def stop(self):
|
|
|
|
yield self.file_manager.stop()
|
|
|
|
|
|
|
|
|
|
|
|
class PeerProtocolServerComponent(Component):
|
|
|
|
component_name = PEER_PROTOCOL_SERVER_COMPONENT
|
2018-10-30 18:41:38 +01:00
|
|
|
depends_on = [UPNP_COMPONENT, RATE_LIMITER_COMPONENT, BLOB_COMPONENT, WALLET_COMPONENT,
|
2018-07-25 21:33:43 +02:00
|
|
|
PAYMENT_RATE_COMPONENT]
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
self.lbry_server_port = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.lbry_server_port
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def start(self):
|
2018-07-25 21:33:43 +02:00
|
|
|
wallet = self.component_manager.get_component(WALLET_COMPONENT)
|
2018-08-05 19:12:39 +02:00
|
|
|
upnp = self.component_manager.get_component(UPNP_COMPONENT)
|
|
|
|
peer_port = GCS('peer_port')
|
2018-07-30 23:58:17 +02:00
|
|
|
query_handlers = {
|
|
|
|
handler.get_primary_query_identifier(): handler for handler in [
|
|
|
|
BlobRequestHandlerFactory(
|
|
|
|
self.component_manager.get_component(BLOB_COMPONENT),
|
|
|
|
wallet,
|
|
|
|
self.component_manager.get_component(PAYMENT_RATE_COMPONENT),
|
|
|
|
self.component_manager.analytics_manager
|
|
|
|
),
|
|
|
|
wallet.get_wallet_info_query_handler_factory(),
|
|
|
|
]
|
|
|
|
}
|
|
|
|
server_factory = ServerProtocolFactory(
|
|
|
|
self.component_manager.get_component(RATE_LIMITER_COMPONENT), query_handlers,
|
2018-10-30 18:41:38 +01:00
|
|
|
self.component_manager.peer_manager
|
2018-07-30 23:58:17 +02:00
|
|
|
)
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-07-30 23:58:17 +02:00
|
|
|
try:
|
2018-08-05 19:12:39 +02:00
|
|
|
log.info("Peer protocol listening on TCP %i (ext port %i)", peer_port,
|
|
|
|
upnp.upnp_redirects.get("TCP", peer_port))
|
2018-07-30 23:58:17 +02:00
|
|
|
self.lbry_server_port = yield reactor.listenTCP(peer_port, server_factory)
|
|
|
|
except error.CannotListenError as e:
|
|
|
|
import traceback
|
|
|
|
log.error("Couldn't bind to port %d. Visit lbry.io/faq/how-to-change-port for"
|
|
|
|
" more details.", peer_port)
|
|
|
|
log.error("%s", traceback.format_exc())
|
|
|
|
raise ValueError("%s lbrynet may already be running on your computer." % str(e))
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def stop(self):
|
|
|
|
if self.lbry_server_port is not None:
|
|
|
|
self.lbry_server_port, old_port = None, self.lbry_server_port
|
|
|
|
log.info('Stop listening on port %s', old_port.port)
|
|
|
|
yield old_port.stopListening()
|
|
|
|
|
|
|
|
|
|
|
|
class ReflectorComponent(Component):
|
|
|
|
component_name = REFLECTOR_COMPONENT
|
2018-10-30 18:41:38 +01:00
|
|
|
depends_on = [BLOB_COMPONENT, FILE_MANAGER_COMPONENT]
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
self.reflector_server_port = GCS('reflector_port')
|
|
|
|
self.reflector_server = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.reflector_server
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def start(self):
|
|
|
|
log.info("Starting reflector server")
|
2018-07-25 21:33:43 +02:00
|
|
|
blob_manager = self.component_manager.get_component(BLOB_COMPONENT)
|
2018-07-24 18:26:29 +02:00
|
|
|
file_manager = self.component_manager.get_component(FILE_MANAGER_COMPONENT)
|
2018-10-30 18:41:38 +01:00
|
|
|
reflector_factory = reflector_server_factory(self.component_manager.peer_manager, blob_manager, file_manager)
|
2018-07-24 18:26:29 +02:00
|
|
|
try:
|
|
|
|
self.reflector_server = yield reactor.listenTCP(self.reflector_server_port, reflector_factory)
|
|
|
|
log.info('Started reflector on port %s', self.reflector_server_port)
|
|
|
|
except error.CannotListenError as e:
|
|
|
|
log.exception("Couldn't bind reflector to port %d", self.reflector_server_port)
|
2018-10-18 12:42:45 +02:00
|
|
|
raise ValueError(f"{e} lbrynet may already be running on your computer.")
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def stop(self):
|
|
|
|
if self.reflector_server is not None:
|
|
|
|
log.info("Stopping reflector server")
|
|
|
|
self.reflector_server, p = None, self.reflector_server
|
|
|
|
yield p.stopListening
|
|
|
|
|
|
|
|
|
|
|
|
class UPnPComponent(Component):
|
|
|
|
component_name = UPNP_COMPONENT
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(component_manager)
|
2018-08-05 19:12:39 +02:00
|
|
|
self._int_peer_port = GCS('peer_port')
|
|
|
|
self._int_dht_node_port = GCS('dht_node_port')
|
2018-07-24 18:26:29 +02:00
|
|
|
self.use_upnp = GCS('use_upnp')
|
2018-08-05 19:12:39 +02:00
|
|
|
self.upnp = None
|
2018-07-30 23:58:17 +02:00
|
|
|
self.upnp_redirects = {}
|
2018-08-05 19:12:39 +02:00
|
|
|
self.external_ip = None
|
2018-10-18 23:41:49 +02:00
|
|
|
self._maintain_redirects_lc = task.LoopingCall(self._maintain_redirects)
|
|
|
|
self._maintain_redirects_lc.clock = self.component_manager.reactor
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self
|
|
|
|
|
2018-07-30 23:58:17 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _setup_redirects(self):
|
2018-10-30 18:41:38 +01:00
|
|
|
d = {}
|
|
|
|
if PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components:
|
|
|
|
d["TCP"] = from_future(self.upnp.get_next_mapping(self._int_peer_port, "TCP", "LBRY peer port"))
|
|
|
|
if DHT_COMPONENT not in self.component_manager.skip_components:
|
|
|
|
d["UDP"] = from_future(self.upnp.get_next_mapping(self._int_dht_node_port, "UDP", "LBRY DHT port"))
|
|
|
|
upnp_redirects = yield DeferredDict(d)
|
2018-07-30 23:58:17 +02:00
|
|
|
self.upnp_redirects.update(upnp_redirects)
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-10-18 23:41:49 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _maintain_redirects(self):
|
|
|
|
# setup the gateway if necessary
|
|
|
|
if not self.upnp:
|
|
|
|
try:
|
|
|
|
self.upnp = yield from_future(UPnP.discover())
|
|
|
|
log.info("found upnp gateway: %s", self.upnp.gateway.manufacturer_string)
|
|
|
|
except Exception as err:
|
|
|
|
log.warning("upnp discovery failed: %s", err)
|
2018-10-29 18:41:14 +01:00
|
|
|
self.upnp = None
|
2018-10-18 23:41:49 +02:00
|
|
|
|
|
|
|
# update the external ip
|
2018-10-29 18:41:14 +01:00
|
|
|
external_ip = None
|
|
|
|
if self.upnp:
|
|
|
|
try:
|
|
|
|
external_ip = yield from_future(self.upnp.get_external_ip())
|
|
|
|
if external_ip != "0.0.0.0":
|
|
|
|
log.info("got external ip from UPnP: %s", external_ip)
|
|
|
|
except (asyncio.TimeoutError, UPnPError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
if external_ip == "0.0.0.0" or not external_ip:
|
|
|
|
log.warning("unable to get external ip from UPnP, checking lbry.io fallback")
|
2018-11-08 00:35:32 +01:00
|
|
|
external_ip = yield CS.get_external_ip()
|
2018-10-29 18:41:14 +01:00
|
|
|
if self.external_ip and self.external_ip != external_ip:
|
|
|
|
log.info("external ip changed from %s to %s", self.external_ip, external_ip)
|
|
|
|
self.external_ip = external_ip
|
|
|
|
assert self.external_ip is not None # TODO: handle going/starting offline
|
|
|
|
|
|
|
|
if not self.upnp_redirects and self.upnp: # setup missing redirects
|
2018-10-18 23:41:49 +02:00
|
|
|
try:
|
2018-10-29 18:41:14 +01:00
|
|
|
log.info("add UPnP port mappings")
|
2018-10-30 18:41:38 +01:00
|
|
|
d = {}
|
|
|
|
if PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components:
|
|
|
|
d["TCP"] = from_future(self.upnp.get_next_mapping(self._int_peer_port, "TCP", "LBRY peer port"))
|
|
|
|
if DHT_COMPONENT not in self.component_manager.skip_components:
|
|
|
|
d["UDP"] = from_future(self.upnp.get_next_mapping(self._int_dht_node_port, "UDP", "LBRY DHT port"))
|
|
|
|
upnp_redirects = yield DeferredDict(d)
|
2018-10-29 18:41:14 +01:00
|
|
|
log.info("set up redirects: %s", upnp_redirects)
|
2018-10-18 23:41:49 +02:00
|
|
|
self.upnp_redirects.update(upnp_redirects)
|
|
|
|
except (asyncio.TimeoutError, UPnPError):
|
|
|
|
self.upnp = None
|
|
|
|
return self._maintain_redirects()
|
2018-10-29 18:41:14 +01:00
|
|
|
elif self.upnp: # check existing redirects are still active
|
2018-10-18 23:41:49 +02:00
|
|
|
found = set()
|
|
|
|
mappings = yield from_future(self.upnp.get_redirects())
|
|
|
|
for mapping in mappings:
|
|
|
|
proto = mapping['NewProtocol']
|
|
|
|
if proto in self.upnp_redirects and mapping['NewExternalPort'] == self.upnp_redirects[proto]:
|
|
|
|
if mapping['NewInternalClient'] == self.upnp.lan_address:
|
|
|
|
found.add(proto)
|
2018-10-30 18:41:38 +01:00
|
|
|
if 'UDP' not in found and DHT_COMPONENT not in self.component_manager.skip_components:
|
2018-10-18 23:41:49 +02:00
|
|
|
try:
|
|
|
|
udp_port = yield from_future(
|
|
|
|
self.upnp.get_next_mapping(self._int_dht_node_port, "UDP", "LBRY DHT port")
|
|
|
|
)
|
|
|
|
self.upnp_redirects['UDP'] = udp_port
|
|
|
|
log.info("refreshed upnp redirect for dht port: %i", udp_port)
|
|
|
|
except (asyncio.TimeoutError, UPnPError):
|
|
|
|
del self.upnp_redirects['UDP']
|
2018-10-30 18:41:38 +01:00
|
|
|
if 'TCP' not in found and PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components:
|
2018-10-18 23:41:49 +02:00
|
|
|
try:
|
|
|
|
tcp_port = yield from_future(
|
|
|
|
self.upnp.get_next_mapping(self._int_peer_port, "TCP", "LBRY peer port")
|
|
|
|
)
|
|
|
|
self.upnp_redirects['TCP'] = tcp_port
|
|
|
|
log.info("refreshed upnp redirect for peer port: %i", tcp_port)
|
|
|
|
except (asyncio.TimeoutError, UPnPError):
|
|
|
|
del self.upnp_redirects['TCP']
|
2018-10-30 18:41:38 +01:00
|
|
|
if ('TCP' in self.upnp_redirects
|
|
|
|
and PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components) and (
|
|
|
|
'UDP' in self.upnp_redirects and DHT_COMPONENT not in self.component_manager.skip_components):
|
|
|
|
if self.upnp_redirects:
|
|
|
|
log.debug("upnp redirects are still active")
|
2018-10-18 23:41:49 +02:00
|
|
|
|
2018-07-30 23:58:17 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-07-24 18:26:29 +02:00
|
|
|
def start(self):
|
2018-10-29 18:41:14 +01:00
|
|
|
log.info("detecting external ip")
|
2018-08-05 19:12:39 +02:00
|
|
|
if not self.use_upnp:
|
2018-11-08 00:35:32 +01:00
|
|
|
self.external_ip = yield CS.get_external_ip()
|
2018-08-05 19:12:39 +02:00
|
|
|
return
|
2018-10-18 23:41:49 +02:00
|
|
|
success = False
|
|
|
|
yield self._maintain_redirects()
|
2018-10-18 21:10:00 +02:00
|
|
|
if self.upnp:
|
2018-10-30 18:41:38 +01:00
|
|
|
if not self.upnp_redirects and not all([x in self.component_manager.skip_components for x in
|
|
|
|
(DHT_COMPONENT, PEER_PROTOCOL_SERVER_COMPONENT)]):
|
2018-10-18 21:10:00 +02:00
|
|
|
log.error("failed to setup upnp, debugging infomation: %s", self.upnp.zipped_debugging_info)
|
|
|
|
else:
|
2018-10-18 23:41:49 +02:00
|
|
|
success = True
|
2018-10-30 18:41:38 +01:00
|
|
|
if self.upnp_redirects:
|
|
|
|
log.debug("set up upnp port redirects for gateway: %s", self.upnp.gateway.manufacturer_string)
|
2018-10-18 21:10:00 +02:00
|
|
|
else:
|
|
|
|
log.error("failed to setup upnp")
|
2018-10-18 23:41:49 +02:00
|
|
|
self.component_manager.analytics_manager.send_upnp_setup_success_fail(success, self.get_status())
|
|
|
|
self._maintain_redirects_lc.start(360, now=False)
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-07-30 23:58:17 +02:00
|
|
|
def stop(self):
|
2018-10-18 23:41:49 +02:00
|
|
|
if self._maintain_redirects_lc.running:
|
|
|
|
self._maintain_redirects_lc.stop()
|
2018-07-30 23:58:17 +02:00
|
|
|
return defer.DeferredList(
|
2018-10-17 16:57:10 +02:00
|
|
|
[from_future(self.upnp.delete_port_mapping(port, protocol))
|
|
|
|
for protocol, port in self.upnp_redirects.items()]
|
2018-07-30 23:58:17 +02:00
|
|
|
)
|
2018-07-24 18:26:29 +02:00
|
|
|
|
2018-10-18 21:10:00 +02:00
|
|
|
def get_status(self):
|
|
|
|
return {
|
2018-10-26 18:42:12 +02:00
|
|
|
'aioupnp_version': aioupnp_version,
|
2018-10-18 21:10:00 +02:00
|
|
|
'redirects': self.upnp_redirects,
|
2018-10-26 18:42:12 +02:00
|
|
|
'gateway': 'No gateway found' if not self.upnp else self.upnp.gateway.manufacturer_string,
|
2018-10-18 23:41:49 +02:00
|
|
|
'dht_redirect_set': 'UDP' in self.upnp_redirects,
|
|
|
|
'peer_redirect_set': 'TCP' in self.upnp_redirects,
|
|
|
|
'external_ip': self.external_ip
|
2018-10-18 21:10:00 +02:00
|
|
|
}
|
|
|
|
|
2018-07-24 18:26:29 +02:00
|
|
|
|
|
|
|
class ExchangeRateManagerComponent(Component):
|
|
|
|
component_name = EXCHANGE_RATE_MANAGER_COMPONENT
|
|
|
|
|
|
|
|
def __init__(self, component_manager):
|
|
|
|
Component.__init__(self, component_manager)
|
|
|
|
self.exchange_rate_manager = ExchangeRateManager()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def component(self):
|
|
|
|
return self.exchange_rate_manager
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def start(self):
|
|
|
|
yield self.exchange_rate_manager.start()
|
2018-04-02 21:13:13 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2018-07-24 18:26:29 +02:00
|
|
|
def stop(self):
|
|
|
|
yield self.exchange_rate_manager.stop()
|