forked from LBRYCommunity/lbry-sdk
conf no longer global
This commit is contained in:
parent
150bcb1116
commit
7a28171a72
52 changed files with 917 additions and 1704 deletions
|
@ -24,8 +24,8 @@ jobs:
|
|||
- pip install git+https://github.com/lbryio/torba.git#egg=torba
|
||||
- pip install -e .[test]
|
||||
script:
|
||||
- HOME=/tmp coverage run -p --source=lbrynet -m twisted.trial --reactor=asyncio tests.unit.analytics tests.unit.core tests.unit.cryptstream tests.unit.database tests.unit.dht tests.unit.lbryfilemanager tests.unit.lbrynet_daemon tests.unit.schema tests.unit.wallet tests.unit.components
|
||||
- HOME=/tmp coverage run -p --source=lbrynet -m twisted.trial --reactor=asyncio tests.unit.test_cli
|
||||
- HOME=/tmp coverage run -p --source=lbrynet -m twisted.trial --reactor=asyncio tests.unit.analytics tests.unit.core tests.unit.cryptstream tests.unit.database tests.unit.dht tests.unit.lbryfilemanager tests.unit.lbrynet_daemon tests.unit.schema tests.unit.wallet tests.unit.components tests.unit.test_conf
|
||||
#- HOME=/tmp coverage run -p --source=lbrynet -m twisted.trial --reactor=asyncio tests.unit.test_cli
|
||||
after_success:
|
||||
- coverage combine
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
|
|
|
@ -5,13 +5,12 @@ import logging
|
|||
from binascii import hexlify, unhexlify
|
||||
|
||||
from twisted.internet import defer
|
||||
from lbrynet import conf
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.extras.compat import f2d
|
||||
from lbrynet.p2p.client.StreamProgressManager import FullStreamProgressManager
|
||||
from lbrynet.p2p.HTTPBlobDownloader import HTTPBlobDownloader
|
||||
from lbrynet.utils import short_hash
|
||||
from lbrynet.blob.client.EncryptedFileDownloader import EncryptedFileSaver
|
||||
from lbrynet.blob.client.EncryptedFileDownloader import EncryptedFileDownloader
|
||||
from lbrynet.blob.EncryptedFileStatusReport import EncryptedFileStatusReport
|
||||
from lbrynet.p2p.StreamDescriptor import save_sd_info
|
||||
|
||||
|
@ -35,12 +34,12 @@ class ManagedEncryptedFileDownloader(EncryptedFileSaver):
|
|||
STATUS_STOPPED = "stopped"
|
||||
STATUS_FINISHED = "finished"
|
||||
|
||||
def __init__(self, rowid, stream_hash, peer_finder, rate_limiter, blob_manager, storage, lbry_file_manager,
|
||||
payment_rate_manager, wallet, download_directory, file_name, stream_name, sd_hash, key,
|
||||
suggested_file_name, download_mirrors=None):
|
||||
def __init__(self, conf: Config, rowid, stream_hash, peer_finder, rate_limiter, blob_manager, storage,
|
||||
lbry_file_manager, payment_rate_manager, wallet, download_directory, file_name, stream_name,
|
||||
sd_hash, key, suggested_file_name, download_mirrors=None):
|
||||
super().__init__(
|
||||
stream_hash, peer_finder, rate_limiter, blob_manager, storage, payment_rate_manager, wallet,
|
||||
download_directory, key, stream_name, file_name
|
||||
conf, stream_hash, peer_finder, rate_limiter, blob_manager, storage, payment_rate_manager,
|
||||
wallet, download_directory, key, stream_name, file_name
|
||||
)
|
||||
self.sd_hash = sd_hash
|
||||
self.rowid = rowid
|
||||
|
@ -56,9 +55,9 @@ class ManagedEncryptedFileDownloader(EncryptedFileSaver):
|
|||
self.channel_name = None
|
||||
self.metadata = None
|
||||
self.mirror = None
|
||||
if download_mirrors or conf.settings['download_mirrors']:
|
||||
if download_mirrors or conf.download_mirrors:
|
||||
self.mirror = HTTPBlobDownloader(
|
||||
self.blob_manager, servers=download_mirrors or conf.settings['download_mirrors']
|
||||
self.blob_manager, servers=download_mirrors or conf.download_mirrors
|
||||
)
|
||||
|
||||
def set_claim_info(self, claim_info):
|
||||
|
@ -100,7 +99,7 @@ class ManagedEncryptedFileDownloader(EncryptedFileSaver):
|
|||
if self.mirror:
|
||||
self.mirror.stop()
|
||||
# EncryptedFileSaver deletes metadata when it's stopped. We don't want that here.
|
||||
yield EncryptedFileDownloader.stop(self, err=err)
|
||||
yield super().stop(err)
|
||||
if change_status is True:
|
||||
status = yield self._save_status()
|
||||
defer.returnValue(status)
|
||||
|
|
|
@ -3,11 +3,12 @@ Keep track of which LBRY Files are downloading and store their LBRY File specifi
|
|||
"""
|
||||
import os
|
||||
import logging
|
||||
import random
|
||||
from binascii import hexlify, unhexlify
|
||||
|
||||
from twisted.internet import defer, task, reactor
|
||||
from twisted.python.failure import Failure
|
||||
from lbrynet import conf
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.extras.compat import f2d
|
||||
from lbrynet.extras.reflector.reupload import reflect_file
|
||||
from lbrynet.blob.EncryptedFileDownloader import ManagedEncryptedFileDownloader
|
||||
|
@ -28,9 +29,11 @@ class EncryptedFileManager:
|
|||
# when reflecting files, reflect up to this many files at a time
|
||||
CONCURRENT_REFLECTS = 5
|
||||
|
||||
def __init__(self, peer_finder, rate_limiter, blob_manager, wallet, payment_rate_manager, storage, sd_identifier):
|
||||
self.auto_re_reflect = conf.settings['reflect_uploads'] and conf.settings['auto_re_reflect_interval'] > 0
|
||||
self.auto_re_reflect_interval = conf.settings['auto_re_reflect_interval']
|
||||
def __init__(self, conf: Config, peer_finder, rate_limiter, blob_manager, wallet,
|
||||
payment_rate_manager, storage, sd_identifier):
|
||||
self.conf = conf
|
||||
self.auto_re_reflect = conf.reflect_uploads and conf.auto_re_reflect_interval > 0
|
||||
self.auto_re_reflect_interval = conf.auto_re_reflect_interval
|
||||
self.peer_finder = peer_finder
|
||||
self.rate_limiter = rate_limiter
|
||||
self.blob_manager = blob_manager
|
||||
|
@ -78,6 +81,7 @@ class EncryptedFileManager:
|
|||
def _get_lbry_file(self, rowid, stream_hash, payment_rate_manager, sd_hash, key,
|
||||
stream_name, file_name, download_directory, suggested_file_name, download_mirrors=None):
|
||||
return ManagedEncryptedFileDownloader(
|
||||
self.conf,
|
||||
rowid,
|
||||
stream_hash,
|
||||
self.peer_finder,
|
||||
|
@ -239,7 +243,7 @@ class EncryptedFileManager:
|
|||
sd_hashes_to_reflect = yield f2d(self.storage.get_streams_to_re_reflect())
|
||||
for lbry_file in self.lbry_files:
|
||||
if lbry_file.sd_hash in sd_hashes_to_reflect:
|
||||
ds.append(sem.run(reflect_file, lbry_file))
|
||||
ds.append(sem.run(reflect_file, lbry_file, random.choice(self.conf.reflector_servers)))
|
||||
yield defer.DeferredList(ds)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
|
|
@ -3,6 +3,7 @@ from binascii import unhexlify
|
|||
from twisted.internet import defer
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.p2p.client.BlobRequester import BlobRequester
|
||||
from lbrynet.p2p.client.ConnectionManager import ConnectionManager
|
||||
from lbrynet.p2p.client.DownloadManager import DownloadManager
|
||||
|
@ -37,7 +38,7 @@ class CryptStreamDownloader:
|
|||
|
||||
#implements(IStreamDownloader)
|
||||
|
||||
def __init__(self, peer_finder, rate_limiter, blob_manager, payment_rate_manager, wallet,
|
||||
def __init__(self, conf: Config, peer_finder, rate_limiter, blob_manager, payment_rate_manager, wallet,
|
||||
key, stream_name):
|
||||
"""Initialize a CryptStreamDownloader
|
||||
|
||||
|
@ -55,7 +56,7 @@ class CryptStreamDownloader:
|
|||
@return:
|
||||
|
||||
"""
|
||||
|
||||
self.conf = conf
|
||||
self.peer_finder = peer_finder
|
||||
self.rate_limiter = rate_limiter
|
||||
self.blob_manager = blob_manager
|
||||
|
|
|
@ -4,6 +4,7 @@ import traceback
|
|||
from binascii import hexlify, unhexlify
|
||||
from twisted.internet import defer, threads
|
||||
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.extras.compat import f2d
|
||||
from lbrynet.p2p.StreamDescriptor import save_sd_info
|
||||
from lbrynet.blob.client.CryptStreamDownloader import CryptStreamDownloader
|
||||
|
@ -18,9 +19,9 @@ log = logging.getLogger(__name__)
|
|||
class EncryptedFileDownloader(CryptStreamDownloader):
|
||||
"""Classes which inherit from this class download LBRY files"""
|
||||
|
||||
def __init__(self, stream_hash, peer_finder, rate_limiter, blob_manager,
|
||||
def __init__(self, conf: Config, stream_hash, peer_finder, rate_limiter, blob_manager,
|
||||
storage, payment_rate_manager, wallet, key, stream_name, file_name):
|
||||
super().__init__(peer_finder, rate_limiter, blob_manager,
|
||||
super().__init__(conf, peer_finder, rate_limiter, blob_manager,
|
||||
payment_rate_manager, wallet, key, stream_name)
|
||||
self.stream_hash = stream_hash
|
||||
self.storage = storage
|
||||
|
@ -37,7 +38,7 @@ class EncryptedFileDownloader(CryptStreamDownloader):
|
|||
|
||||
def stop(self, err=None):
|
||||
self._close_output()
|
||||
return CryptStreamDownloader.stop(self, err=err)
|
||||
return super().stop(err=err)
|
||||
|
||||
def _get_progress_manager(self, download_manager):
|
||||
return FullStreamProgressManager(self._finished_downloading,
|
||||
|
@ -97,7 +98,8 @@ class EncryptedFileDownloader(CryptStreamDownloader):
|
|||
class EncryptedFileDownloaderFactory:
|
||||
#implements(IStreamDownloaderFactory)
|
||||
|
||||
def __init__(self, peer_finder, rate_limiter, blob_manager, storage, wallet):
|
||||
def __init__(self, conf: Config, peer_finder, rate_limiter, blob_manager, storage, wallet):
|
||||
self.conf = conf
|
||||
self.peer_finder = peer_finder
|
||||
self.rate_limiter = rate_limiter
|
||||
self.blob_manager = blob_manager
|
||||
|
@ -130,9 +132,9 @@ class EncryptedFileDownloaderFactory:
|
|||
|
||||
|
||||
class EncryptedFileSaver(EncryptedFileDownloader):
|
||||
def __init__(self, stream_hash, peer_finder, rate_limiter, blob_manager, storage, payment_rate_manager, wallet,
|
||||
download_directory, key, stream_name, file_name):
|
||||
super().__init__(stream_hash, peer_finder, rate_limiter,
|
||||
def __init__(self, conf: Config, stream_hash, peer_finder, rate_limiter, blob_manager, storage,
|
||||
payment_rate_manager, wallet, download_directory, key, stream_name, file_name):
|
||||
super().__init__(conf, stream_hash, peer_finder, rate_limiter,
|
||||
blob_manager, storage, payment_rate_manager,
|
||||
wallet, key, stream_name, file_name)
|
||||
self.download_directory = unhexlify(download_directory).decode()
|
||||
|
@ -142,10 +144,6 @@ class EncryptedFileSaver(EncryptedFileDownloader):
|
|||
def __str__(self):
|
||||
return str(self.file_written_to)
|
||||
|
||||
def stop(self, err=None):
|
||||
d = EncryptedFileDownloader.stop(self, err=err)
|
||||
return d
|
||||
|
||||
def _get_progress_manager(self, download_manager):
|
||||
return FullStreamProgressManager(self._finished_downloading,
|
||||
self.blob_manager,
|
||||
|
@ -182,8 +180,8 @@ class EncryptedFileSaver(EncryptedFileDownloader):
|
|||
|
||||
|
||||
class EncryptedFileSaverFactory(EncryptedFileDownloaderFactory):
|
||||
def __init__(self, peer_finder, rate_limiter, blob_manager, storage, wallet, download_directory):
|
||||
super().__init__(peer_finder, rate_limiter, blob_manager, storage, wallet)
|
||||
def __init__(self, conf: Config, peer_finder, rate_limiter, blob_manager, storage, wallet, download_directory):
|
||||
super().__init__(conf, peer_finder, rate_limiter, blob_manager, storage, wallet)
|
||||
self.download_directory = hexlify(download_directory.encode())
|
||||
|
||||
def _make_downloader(self, stream_hash, payment_rate_manager, stream_info):
|
||||
|
@ -191,8 +189,9 @@ class EncryptedFileSaverFactory(EncryptedFileDownloaderFactory):
|
|||
key = stream_info.raw_info['key']
|
||||
suggested_file_name = stream_info.raw_info['suggested_file_name']
|
||||
return EncryptedFileSaver(
|
||||
stream_hash, self.peer_finder, self.rate_limiter, self.blob_manager, self.storage, payment_rate_manager,
|
||||
self.wallet, self.download_directory, key=key, stream_name=stream_name, file_name=suggested_file_name
|
||||
self.conf, stream_hash, self.peer_finder, self.rate_limiter, self.blob_manager, self.storage,
|
||||
payment_rate_manager, self.wallet, self.download_directory, key=key, stream_name=stream_name,
|
||||
file_name=suggested_file_name
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
|
845
lbrynet/conf.py
845
lbrynet/conf.py
|
@ -4,77 +4,62 @@ import sys
|
|||
import typing
|
||||
import json
|
||||
import logging
|
||||
import base58
|
||||
import yaml
|
||||
from argparse import ArgumentParser
|
||||
from contextlib import contextmanager
|
||||
from appdirs import user_data_dir, user_config_dir
|
||||
from lbrynet import utils
|
||||
from lbrynet.p2p.Error import InvalidCurrencyError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_windows_directories() -> typing.Tuple[str, str, str]:
|
||||
from lbrynet.winpaths import get_path, FOLDERID, UserHandle
|
||||
|
||||
download_dir = get_path(FOLDERID.Downloads, UserHandle.current)
|
||||
|
||||
# old
|
||||
appdata = get_path(FOLDERID.RoamingAppData, UserHandle.current)
|
||||
data_dir = os.path.join(appdata, 'lbrynet')
|
||||
lbryum_dir = os.path.join(appdata, 'lbryum')
|
||||
if os.path.isdir(data_dir) or os.path.isdir(lbryum_dir):
|
||||
return data_dir, lbryum_dir, download_dir
|
||||
|
||||
# new
|
||||
data_dir = user_data_dir('lbrynet', 'lbry')
|
||||
lbryum_dir = user_data_dir('lbryum', 'lbry')
|
||||
download_dir = get_path(FOLDERID.Downloads, UserHandle.current)
|
||||
return data_dir, lbryum_dir, download_dir
|
||||
|
||||
|
||||
def get_darwin_directories() -> typing.Tuple[str, str, str]:
|
||||
data_dir = user_data_dir('LBRY')
|
||||
lbryum_dir = os.path.expanduser('~/.lbryum')
|
||||
download_dir = os.path.expanduser('~/Downloads')
|
||||
return data_dir, lbryum_dir, download_dir
|
||||
|
||||
|
||||
def get_linux_directories() -> typing.Tuple[str, str, str]:
|
||||
try:
|
||||
with open(os.path.join(user_config_dir(), 'user-dirs.dirs'), 'r') as xdg:
|
||||
down_dir = re.search(r'XDG_DOWNLOAD_DIR=(.+)', xdg.read()).group(1)
|
||||
down_dir = re.sub('\$HOME', os.getenv('HOME'), down_dir)
|
||||
download_dir = re.sub('\"', '', down_dir)
|
||||
except EnvironmentError:
|
||||
download_dir = os.getenv('XDG_DOWNLOAD_DIR')
|
||||
|
||||
if not download_dir:
|
||||
download_dir = os.path.expanduser('~/Downloads')
|
||||
|
||||
# old
|
||||
data_dir = os.path.expanduser('~/.lbrynet')
|
||||
lbryum_dir = os.path.expanduser('~/.lbryum')
|
||||
if os.path.isdir(data_dir) or os.path.isdir(lbryum_dir):
|
||||
return data_dir, lbryum_dir, download_dir
|
||||
|
||||
# new
|
||||
return user_data_dir('lbry/lbrynet'), user_data_dir('lbry/lbryum'), download_dir
|
||||
|
||||
|
||||
NOT_SET = type(str('NoValue'), (object,), {})
|
||||
NOT_SET = type(str('NOT_SET'), (object,), {})
|
||||
T = typing.TypeVar('T')
|
||||
|
||||
KB = 2 ** 10
|
||||
MB = 2 ** 20
|
||||
|
||||
ANALYTICS_ENDPOINT = 'https://api.segment.io/v1'
|
||||
ANALYTICS_TOKEN = 'Ax5LZzR1o3q3Z3WjATASDwR5rKyHH0qOIRIbLmMXn2H='
|
||||
API_ADDRESS = 'lbryapi'
|
||||
APP_NAME = 'LBRY'
|
||||
BLOBFILES_DIR = 'blobfiles'
|
||||
CRYPTSD_FILE_EXTENSION = '.cryptsd'
|
||||
CURRENCIES = {
|
||||
'BTC': {'type': 'crypto'},
|
||||
'LBC': {'type': 'crypto'},
|
||||
'USD': {'type': 'fiat'},
|
||||
}
|
||||
ICON_PATH = 'icons' if 'win' in sys.platform else 'app.icns'
|
||||
LOG_FILE_NAME = 'lbrynet.log'
|
||||
LOG_POST_URL = 'https://lbry.io/log-upload'
|
||||
MAX_BLOB_REQUEST_SIZE = 64 * KB
|
||||
MAX_HANDSHAKE_SIZE = 64 * KB
|
||||
MAX_REQUEST_SIZE = 64 * KB
|
||||
MAX_RESPONSE_INFO_SIZE = 64 * KB
|
||||
MAX_BLOB_INFOS_TO_REQUEST = 20
|
||||
PROTOCOL_PREFIX = 'lbry'
|
||||
SLACK_WEBHOOK = (
|
||||
'nUE0pUZ6Yl9bo29epl5moTSwnl5wo20ip2IlqzywMKZiIQSFZR5'
|
||||
'AHx4mY0VmF0WQZ1ESEP9kMHZlp1WzJwWOoKN3ImR1M2yUAaMyqGZ='
|
||||
)
|
||||
HEADERS_FILE_SHA256_CHECKSUM = (
|
||||
366295, 'b0c8197153a33ccbc52fb81a279588b6015b68b7726f73f6a2b81f7e25bfe4b9'
|
||||
)
|
||||
|
||||
|
||||
class Setting(typing.Generic[T]):
|
||||
|
||||
def __init__(self, default: typing.Optional[T]):
|
||||
def __init__(self, doc: str, default: typing.Optional[T] = None,
|
||||
previous_names: typing.Optional[typing.List[str]] = None):
|
||||
self.doc = doc
|
||||
self.default = default
|
||||
self.previous_names = previous_names or []
|
||||
|
||||
def __set_name__(self, owner, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj: typing.Optional['Configuration'], owner) -> T:
|
||||
def __get__(self, obj: typing.Optional['BaseConfig'], owner) -> T:
|
||||
if obj is None:
|
||||
return self
|
||||
for location in obj.search_order:
|
||||
|
@ -82,7 +67,7 @@ class Setting(typing.Generic[T]):
|
|||
return location[self.name]
|
||||
return self.default
|
||||
|
||||
def __set__(self, obj: 'Configuration', val: typing.Union[T, NOT_SET]):
|
||||
def __set__(self, obj: 'BaseConfig', val: typing.Union[T, NOT_SET]):
|
||||
if val == NOT_SET:
|
||||
for location in obj.modify_order:
|
||||
if self.name in location:
|
||||
|
@ -127,8 +112,8 @@ class Toggle(Setting[bool]):
|
|||
|
||||
|
||||
class Path(String):
|
||||
def __init__(self):
|
||||
super().__init__('')
|
||||
def __init__(self, doc: str, default: str = '', *args, **kwargs):
|
||||
super().__init__(doc, default, *args, **kwargs)
|
||||
|
||||
def __get__(self, obj, owner):
|
||||
value = super().__get__(obj, owner)
|
||||
|
@ -224,7 +209,7 @@ class ArgumentAccess:
|
|||
|
||||
class ConfigFileAccess:
|
||||
|
||||
def __init__(self, config: 'Configuration', path: str):
|
||||
def __init__(self, config: 'BaseConfig', path: str):
|
||||
self.configuration = config
|
||||
self.path = path
|
||||
self.data = {}
|
||||
|
@ -242,6 +227,11 @@ class ConfigFileAccess:
|
|||
serialized = yaml.load(raw) or {}
|
||||
for key, value in serialized.items():
|
||||
attr = getattr(cls, key, None)
|
||||
if attr is None:
|
||||
for setting in self.configuration.settings:
|
||||
if key in setting.previous_names:
|
||||
attr = setting
|
||||
break
|
||||
if attr is not None:
|
||||
self.data[key] = attr.deserialize(value)
|
||||
|
||||
|
@ -254,6 +244,17 @@ class ConfigFileAccess:
|
|||
with open(self.path, 'w') as config_file:
|
||||
config_file.write(yaml.safe_dump(serialized, default_flow_style=False))
|
||||
|
||||
def upgrade(self) -> bool:
|
||||
upgraded = False
|
||||
for key in list(self.data):
|
||||
for setting in self.configuration.settings:
|
||||
if key in setting.previous_names:
|
||||
self.data[setting.name] = self.data[key]
|
||||
del self.data[key]
|
||||
upgraded = True
|
||||
break
|
||||
return upgraded
|
||||
|
||||
def __contains__(self, item: str):
|
||||
return item in self.data
|
||||
|
||||
|
@ -267,31 +268,18 @@ class ConfigFileAccess:
|
|||
del self.data[key]
|
||||
|
||||
|
||||
class Configuration:
|
||||
class BaseConfig:
|
||||
|
||||
config = Path()
|
||||
config = Path("Path to configuration file.")
|
||||
|
||||
data_dir = Path()
|
||||
wallet_dir = Path()
|
||||
lbryum_wallet_dir = Path()
|
||||
download_dir = Path()
|
||||
|
||||
# Changing this value is not-advised as it could potentially
|
||||
# expose the lbrynet daemon to the outside world which would
|
||||
# give an attacker access to your wallet and you could lose
|
||||
# all of your credits.
|
||||
api_host = String('localhost')
|
||||
api_port = Integer(5279)
|
||||
|
||||
share_usage_data = Toggle(True) # whether to share usage stats and diagnostic info with LBRY
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, **kwargs):
|
||||
self.runtime = {} # set internally or by various API calls
|
||||
self.arguments = {} # from command line arguments
|
||||
self.environment = {} # from environment variables
|
||||
self.persisted = {} # from config file
|
||||
self.set_default_paths()
|
||||
self._updating_config = False
|
||||
for key, value in kwargs.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
@contextmanager
|
||||
def update_config(self):
|
||||
|
@ -318,6 +306,156 @@ class Configuration:
|
|||
self.persisted
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def get_settings(cls):
|
||||
for setting in cls.__dict__.values():
|
||||
if isinstance(setting, Setting):
|
||||
yield setting
|
||||
|
||||
@property
|
||||
def settings(self):
|
||||
return self.get_settings()
|
||||
|
||||
@property
|
||||
def settings_dict(self):
|
||||
return {
|
||||
setting.name: getattr(self, setting.name) for setting in self.settings
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def create_from_arguments(cls, args):
|
||||
conf = cls()
|
||||
conf.set_arguments(args)
|
||||
conf.set_environment()
|
||||
conf.set_persisted()
|
||||
return conf
|
||||
|
||||
@classmethod
|
||||
def contribute_args(cls, parser: ArgumentParser):
|
||||
for setting in cls.get_settings():
|
||||
if isinstance(setting, Toggle):
|
||||
parser.add_argument(
|
||||
f"--{setting.name.replace('_', '-')}",
|
||||
help=setting.doc,
|
||||
action="store_true"
|
||||
)
|
||||
else:
|
||||
parser.add_argument(
|
||||
f"--{setting.name.replace('_', '-')}",
|
||||
help=setting.doc
|
||||
)
|
||||
|
||||
def set_arguments(self, args):
|
||||
self.arguments = ArgumentAccess(args)
|
||||
|
||||
def set_environment(self, environ=None):
|
||||
self.environment = EnvironmentAccess(environ or os.environ)
|
||||
|
||||
def set_persisted(self, config_file_path=None):
|
||||
if config_file_path is None:
|
||||
config_file_path = self.config
|
||||
|
||||
if not config_file_path:
|
||||
return
|
||||
|
||||
ext = os.path.splitext(config_file_path)[1]
|
||||
assert ext in ('.yml', '.yaml'),\
|
||||
f"File extension '{ext}' is not supported, " \
|
||||
f"configuration file must be in YAML (.yaml)."
|
||||
|
||||
self.persisted = ConfigFileAccess(self, config_file_path)
|
||||
if self.persisted.upgrade():
|
||||
self.persisted.save()
|
||||
|
||||
|
||||
class CLIConfig(BaseConfig):
|
||||
|
||||
# Changing this value is not-advised as it could potentially
|
||||
# expose the lbrynet daemon to the outside world which would
|
||||
# give an attacker access to your wallet and you could lose
|
||||
# all of your credits.
|
||||
api_host = String(
|
||||
'Host name for lbrynet daemon API.', 'localhost',
|
||||
previous_names=['API_INTERFACE']
|
||||
)
|
||||
api_port = Integer('Port for lbrynet daemon API.', 5279)
|
||||
|
||||
@property
|
||||
def api_connection_url(self) -> str:
|
||||
return f"http://{self.api_host}:{self.api_port}/lbryapi"
|
||||
|
||||
|
||||
class Config(CLIConfig):
|
||||
|
||||
data_dir = Path("Directory path to store blobs.")
|
||||
download_dir = Path("Directory path to place assembled files downloaded from LBRY.")
|
||||
wallet_dir = Path(
|
||||
"Directory containing a 'wallets' subdirectory with 'default_wallet' file.",
|
||||
previous_names=['lbryum_wallet_dir']
|
||||
)
|
||||
|
||||
share_usage_data = Toggle(
|
||||
"Whether to share usage stats and diagnostic info with LBRY.", True,
|
||||
previous_names=['upload_log', 'upload_log', 'share_debug_info']
|
||||
)
|
||||
|
||||
# claims set to expire within this many blocks will be
|
||||
# automatically renewed after startup (if set to 0, renews
|
||||
# will not be made automatically)
|
||||
auto_renew_claim_height_delta = Integer("", 0)
|
||||
cache_time = Integer("", 150)
|
||||
data_rate = Float("points/megabyte", .0001)
|
||||
delete_blobs_on_remove = Toggle("", True)
|
||||
dht_node_port = Integer("", 4444)
|
||||
download_timeout = Integer("", 180)
|
||||
download_mirrors = Servers("", [
|
||||
('blobs.lbry.io', 80)
|
||||
])
|
||||
is_generous_host = Toggle("", True)
|
||||
announce_head_blobs_only = Toggle("", True)
|
||||
concurrent_announcers = Integer("", 10)
|
||||
known_dht_nodes = Servers("", [
|
||||
('lbrynet1.lbry.io', 4444), # US EAST
|
||||
('lbrynet2.lbry.io', 4444), # US WEST
|
||||
('lbrynet3.lbry.io', 4444), # EU
|
||||
('lbrynet4.lbry.io', 4444) # ASIA
|
||||
])
|
||||
max_connections_per_stream = Integer("", 5)
|
||||
seek_head_blob_first = Toggle("", True)
|
||||
# TODO: writing json on the cmd line is a pain, come up with a nicer
|
||||
# parser for this data structure. maybe 'USD:25'
|
||||
max_key_fee = MaxKeyFee("", {'currency': 'USD', 'amount': 50.0})
|
||||
disable_max_key_fee = Toggle("", False)
|
||||
min_info_rate = Float("points/1000 infos", .02)
|
||||
min_valuable_hash_rate = Float("points/1000 infos", .05)
|
||||
min_valuable_info_rate = Float("points/1000 infos", .05)
|
||||
peer_port = Integer("", 3333)
|
||||
pointtrader_server = String("", 'http://127.0.0.1:2424')
|
||||
reflector_port = Integer("", 5566)
|
||||
# if reflect_uploads is True, send files to reflector after publishing (as well as a periodic check in the
|
||||
# event the initial upload failed or was disconnected part way through, provided the auto_re_reflect_interval > 0)
|
||||
reflect_uploads = Toggle("", True)
|
||||
auto_re_reflect_interval = Integer("set to 0 to disable", 86400)
|
||||
reflector_servers = Servers("", [
|
||||
('reflector.lbry.io', 5566)
|
||||
])
|
||||
run_reflector_server = Toggle("adds reflector to components_to_skip unless True", False)
|
||||
sd_download_timeout = Integer("", 3)
|
||||
peer_search_timeout = Integer("", 60)
|
||||
use_upnp = Toggle("", True)
|
||||
use_keyring = Toggle("", False)
|
||||
blockchain_name = String("", 'lbrycrd_main')
|
||||
lbryum_servers = Servers("", [
|
||||
('lbryumx1.lbry.io', 50001),
|
||||
('lbryumx2.lbry.io', 50001)
|
||||
])
|
||||
s3_headers_depth = Integer("download headers from s3 when the local height is more than 10 chunks behind", 96 * 10)
|
||||
components_to_skip = Strings("components which will be skipped during start-up of daemon", [])
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.set_default_paths()
|
||||
|
||||
def set_default_paths(self):
|
||||
if 'win' in sys.platform:
|
||||
get_directories = get_windows_directories
|
||||
|
@ -333,523 +471,54 @@ class Configuration:
|
|||
self.data_dir, 'daemon_settings.yml'
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def create_from_arguments(cls, args):
|
||||
conf = cls()
|
||||
conf.set_arguments(args)
|
||||
conf.set_environment()
|
||||
conf.set_persisted()
|
||||
return conf
|
||||
|
||||
def set_arguments(self, args):
|
||||
self.arguments = ArgumentAccess(args)
|
||||
|
||||
def set_environment(self, environ=None):
|
||||
self.environment = EnvironmentAccess(environ or os.environ)
|
||||
|
||||
def set_persisted(self, config_file_path=None):
|
||||
if config_file_path is None:
|
||||
config_file_path = self.config
|
||||
|
||||
ext = os.path.splitext(config_file_path)[1]
|
||||
assert ext in ('.yml', '.yaml'),\
|
||||
f"File extension '{ext}' is not supported, " \
|
||||
f"configuration file must be in YAML (.yaml)."
|
||||
|
||||
self.persisted = ConfigFileAccess(self, config_file_path)
|
||||
|
||||
|
||||
class CommandLineConfiguration(Configuration):
|
||||
pass
|
||||
|
||||
|
||||
class ServerConfiguration(Configuration):
|
||||
|
||||
# claims set to expire within this many blocks will be
|
||||
# automatically renewed after startup (if set to 0, renews
|
||||
# will not be made automatically)
|
||||
auto_renew_claim_height_delta = Integer(0)
|
||||
cache_time = Integer(150)
|
||||
data_rate = Float(.0001) # points/megabyte
|
||||
delete_blobs_on_remove = Toggle(True)
|
||||
dht_node_port = Integer(4444)
|
||||
download_timeout = Integer(180)
|
||||
download_mirrors = Servers([
|
||||
('blobs.lbry.io', 80)
|
||||
])
|
||||
is_generous_host = Toggle(True)
|
||||
announce_head_blobs_only = Toggle(True)
|
||||
concurrent_announcers = Integer(10)
|
||||
known_dht_nodes = Servers([
|
||||
('lbrynet1.lbry.io', 4444), # US EAST
|
||||
('lbrynet2.lbry.io', 4444), # US WEST
|
||||
('lbrynet3.lbry.io', 4444), # EU
|
||||
('lbrynet4.lbry.io', 4444) # ASIA
|
||||
])
|
||||
max_connections_per_stream = Integer(5)
|
||||
seek_head_blob_first = Toggle(True)
|
||||
# TODO: writing json on the cmd line is a pain, come up with a nicer
|
||||
# parser for this data structure. maybe 'USD:25'
|
||||
max_key_fee = MaxKeyFee({'currency': 'USD', 'amount': 50.0})
|
||||
disable_max_key_fee = Toggle(False)
|
||||
min_info_rate = Float(.02) # points/1000 infos
|
||||
min_valuable_hash_rate = Float(.05) # points/1000 infos
|
||||
min_valuable_info_rate = Float(.05) # points/1000 infos
|
||||
peer_port = Integer(3333)
|
||||
pointtrader_server = String('http://127.0.0.1:2424')
|
||||
reflector_port = Integer(5566)
|
||||
# if reflect_uploads is True, send files to reflector after publishing (as well as a periodic check in the
|
||||
# event the initial upload failed or was disconnected part way through, provided the auto_re_reflect_interval > 0)
|
||||
reflect_uploads = Toggle(True)
|
||||
auto_re_reflect_interval = Integer(86400) # set to 0 to disable
|
||||
reflector_servers = Servers([
|
||||
('reflector.lbry.io', 5566)
|
||||
])
|
||||
run_reflector_server = Toggle(False) # adds `reflector` to components_to_skip unless True
|
||||
sd_download_timeout = Integer(3)
|
||||
peer_search_timeout = Integer(60)
|
||||
use_upnp = Toggle(True)
|
||||
use_keyring = Toggle(False)
|
||||
blockchain_name = String('lbrycrd_main')
|
||||
lbryum_servers = Servers([
|
||||
('lbryumx1.lbry.io', 50001),
|
||||
('lbryumx2.lbry.io', 50001)
|
||||
])
|
||||
s3_headers_depth = Integer(96 * 10) # download headers from s3 when the local height is more than 10 chunks behind
|
||||
components_to_skip = Strings([]) # components which will be skipped during start-up of daemon
|
||||
|
||||
|
||||
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
KB = 2 ** 10
|
||||
MB = 2 ** 20
|
||||
|
||||
|
||||
ANALYTICS_ENDPOINT = 'https://api.segment.io/v1'
|
||||
ANALYTICS_TOKEN = 'Ax5LZzR1o3q3Z3WjATASDwR5rKyHH0qOIRIbLmMXn2H='
|
||||
API_ADDRESS = 'lbryapi'
|
||||
APP_NAME = 'LBRY'
|
||||
BLOBFILES_DIR = 'blobfiles'
|
||||
CRYPTSD_FILE_EXTENSION = '.cryptsd'
|
||||
CURRENCIES = {
|
||||
'BTC': {'type': 'crypto'},
|
||||
'LBC': {'type': 'crypto'},
|
||||
'USD': {'type': 'fiat'},
|
||||
}
|
||||
DB_REVISION_FILE_NAME = 'db_revision'
|
||||
ICON_PATH = 'icons' if 'win' in sys.platform else 'app.icns'
|
||||
LOGGLY_TOKEN = 'BQEzZmMzLJHgAGxkBF00LGD0YGuyATVgAmqxAQEuAQZ2BQH4'
|
||||
LOG_FILE_NAME = 'lbrynet.log'
|
||||
LOG_POST_URL = 'https://lbry.io/log-upload'
|
||||
MAX_BLOB_REQUEST_SIZE = 64 * KB
|
||||
MAX_HANDSHAKE_SIZE = 64 * KB
|
||||
MAX_REQUEST_SIZE = 64 * KB
|
||||
MAX_RESPONSE_INFO_SIZE = 64 * KB
|
||||
MAX_BLOB_INFOS_TO_REQUEST = 20
|
||||
PROTOCOL_PREFIX = 'lbry'
|
||||
SLACK_WEBHOOK = (
|
||||
'nUE0pUZ6Yl9bo29epl5moTSwnl5wo20ip2IlqzywMKZiIQSFZR5'
|
||||
'AHx4mY0VmF0WQZ1ESEP9kMHZlp1WzJwWOoKN3ImR1M2yUAaMyqGZ='
|
||||
)
|
||||
HEADERS_FILE_SHA256_CHECKSUM = (
|
||||
366295, 'b0c8197153a33ccbc52fb81a279588b6015b68b7726f73f6a2b81f7e25bfe4b9'
|
||||
)
|
||||
|
||||
|
||||
optional_str = typing.Optional[str]
|
||||
|
||||
|
||||
class Config:
|
||||
def __init__(self, fixed_defaults, adjustable_defaults: typing.Dict, persisted_settings=None, environment=None,
|
||||
cli_settings=None, data_dir: optional_str = None, wallet_dir: optional_str = None,
|
||||
download_dir: optional_str = None, file_name: optional_str = None):
|
||||
self._installation_id = None
|
||||
self._session_id = base58.b58encode(utils.generate_id()).decode()
|
||||
self._node_id = None
|
||||
|
||||
self._fixed_defaults = fixed_defaults
|
||||
|
||||
# copy the default adjustable settings
|
||||
self._adjustable_defaults = {k: v for k, v in adjustable_defaults.items()}
|
||||
|
||||
|
||||
self._data = {
|
||||
TYPE_DEFAULT: {}, # defaults
|
||||
TYPE_PERSISTED: {}, # stored settings from daemon_settings.yml (or from a db, etc)
|
||||
TYPE_ENV: {}, # settings from environment variables
|
||||
TYPE_CLI: {}, # command-line arguments
|
||||
TYPE_RUNTIME: {}, # set during runtime (using self.set(), etc)
|
||||
}
|
||||
|
||||
# the order in which a piece of data is searched for. earlier types override later types
|
||||
self._search_order = (
|
||||
TYPE_RUNTIME, TYPE_CLI, TYPE_ENV, TYPE_PERSISTED, TYPE_DEFAULT
|
||||
)
|
||||
|
||||
# types of data where user specified config values can be stored
|
||||
self._user_specified = (
|
||||
TYPE_RUNTIME, TYPE_CLI, TYPE_ENV, TYPE_PERSISTED
|
||||
)
|
||||
|
||||
self._data[TYPE_DEFAULT].update(self._fixed_defaults)
|
||||
self._data[TYPE_DEFAULT].update(
|
||||
{k: v[1] for (k, v) in self._adjustable_defaults.items()})
|
||||
|
||||
if persisted_settings is None:
|
||||
persisted_settings = {}
|
||||
self._validate_settings(persisted_settings)
|
||||
self._data[TYPE_PERSISTED].update(persisted_settings)
|
||||
|
||||
env_settings = self._parse_environment(environment)
|
||||
self._validate_settings(env_settings)
|
||||
self._data[TYPE_ENV].update(env_settings)
|
||||
|
||||
if cli_settings is None:
|
||||
cli_settings = {}
|
||||
self._validate_settings(cli_settings)
|
||||
self._data[TYPE_CLI].update(cli_settings)
|
||||
self.file_name = file_name or 'daemon_settings.yml'
|
||||
|
||||
@property
|
||||
def data_dir(self) -> optional_str:
|
||||
data_dir = self.get('data_dir')
|
||||
if not data_dir:
|
||||
return
|
||||
return os.path.expanduser(os.path.expandvars(data_dir))
|
||||
|
||||
@property
|
||||
def download_dir(self) -> optional_str:
|
||||
download_dir = self.get('download_directory')
|
||||
if not download_dir:
|
||||
return
|
||||
return os.path.expanduser(os.path.expandvars(download_dir))
|
||||
|
||||
@property
|
||||
def wallet_dir(self) -> optional_str:
|
||||
if self.get('lbryum_wallet_dir') and not self.get('wallet_dir'):
|
||||
log.warning("'lbryum_wallet_dir' setting will be deprecated, please update to 'wallet_dir'")
|
||||
self['wallet_dir'] = self['lbryum_wallet_dir']
|
||||
wallet_dir = self.get('wallet_dir')
|
||||
if not wallet_dir:
|
||||
return
|
||||
return os.path.expanduser(os.path.expandvars(wallet_dir))
|
||||
|
||||
def __repr__(self):
|
||||
return self.get_current_settings_dict().__repr__()
|
||||
|
||||
def __iter__(self):
|
||||
for k in self._data[TYPE_DEFAULT].keys():
|
||||
yield k
|
||||
|
||||
def __getitem__(self, name):
|
||||
return self.get(name)
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
return self.set(name, value)
|
||||
|
||||
def __contains__(self, name):
|
||||
return name in self._data[TYPE_DEFAULT]
|
||||
|
||||
@staticmethod
|
||||
def _parse_environment(environment):
|
||||
env_settings = {}
|
||||
if environment is not None:
|
||||
assert isinstance(environment, Env)
|
||||
for opt in environment.original_schema:
|
||||
if environment(opt) is not None:
|
||||
env_settings[opt] = environment(opt)
|
||||
return env_settings
|
||||
|
||||
def _assert_valid_data_type(self, data_type):
|
||||
if data_type not in self._data:
|
||||
raise KeyError(f'{data_type} in is not a valid data type')
|
||||
|
||||
def get_valid_setting_names(self):
|
||||
return self._data[TYPE_DEFAULT].keys()
|
||||
|
||||
def _is_valid_setting(self, name):
|
||||
return name in self.get_valid_setting_names()
|
||||
|
||||
def _assert_valid_setting(self, name):
|
||||
if not self._is_valid_setting(name):
|
||||
raise KeyError(f'{name} is not a valid setting')
|
||||
|
||||
def _validate_settings(self, data):
|
||||
invalid_settings = set(data.keys()) - set(self.get_valid_setting_names())
|
||||
if len(invalid_settings) > 0:
|
||||
raise KeyError('invalid settings: {}'.format(', '.join(invalid_settings)))
|
||||
|
||||
def _assert_editable_setting(self, name):
|
||||
self._assert_valid_setting(name)
|
||||
if name in self._fixed_defaults:
|
||||
raise ValueError(f'{name} is not an editable setting')
|
||||
|
||||
def _assert_valid_setting_value(self, name, value):
|
||||
if name == "max_key_fee":
|
||||
currency = str(value["currency"]).upper()
|
||||
if currency not in self._fixed_defaults['CURRENCIES'].keys():
|
||||
raise InvalidCurrencyError(currency)
|
||||
elif name == "download_directory":
|
||||
directory = str(value)
|
||||
if not os.path.exists(directory):
|
||||
log.warning("download directory '%s' does not exist", directory)
|
||||
|
||||
def is_default(self, name):
|
||||
"""Check if a config value is wasn't specified by the user
|
||||
|
||||
Args:
|
||||
name: the name of the value to check
|
||||
|
||||
Returns: true if config value is the default one, false if it was specified by
|
||||
the user
|
||||
|
||||
Sometimes it may be helpful to understand if a config value was specified
|
||||
by the user or if it still holds its default value. This function will return
|
||||
true when the config value is still the default. Note that when the user
|
||||
specifies a value that is equal to the default one, it will still be considered
|
||||
as 'user specified'
|
||||
"""
|
||||
|
||||
self._assert_valid_setting(name)
|
||||
for possible_data_type in self._user_specified:
|
||||
if name in self._data[possible_data_type]:
|
||||
return False
|
||||
return True
|
||||
|
||||
def get(self, name, data_type=None):
|
||||
"""Get a config value
|
||||
|
||||
Args:
|
||||
name: the name of the value to get
|
||||
data_type: if given, get the value from a specific data set (see below)
|
||||
|
||||
Returns: the config value for the given name
|
||||
|
||||
If data_type is None, get() will search for the given name in each data set, in
|
||||
order of precedence. It will return the first value it finds. This is the "effective"
|
||||
value of a config name. For example, ENV values take precedence over DEFAULT values,
|
||||
so if a value is present in ENV and in DEFAULT, the ENV value will be returned
|
||||
"""
|
||||
self._assert_valid_setting(name)
|
||||
if data_type is not None:
|
||||
self._assert_valid_data_type(data_type)
|
||||
return self._data[data_type][name]
|
||||
for possible_data_type in self._search_order:
|
||||
if name in self._data[possible_data_type]:
|
||||
return self._data[possible_data_type][name]
|
||||
raise KeyError(f'{name} is not a valid setting')
|
||||
|
||||
def set(self, name, value, data_types):
|
||||
"""Set a config value
|
||||
|
||||
Args:
|
||||
name: the name of the value to set
|
||||
value: the value
|
||||
data_types: what type(s) of data this is
|
||||
|
||||
Returns: None
|
||||
|
||||
By default, this sets the RUNTIME value of a config. If you wish to set other
|
||||
data types (e.g. PERSISTED values to save to a file, CLI values from parsed
|
||||
command-line options, etc), you can specify that with the data_types param
|
||||
"""
|
||||
self._assert_editable_setting(name)
|
||||
self._assert_valid_setting_value(name, value)
|
||||
|
||||
for data_type in data_types:
|
||||
self._assert_valid_data_type(data_type)
|
||||
self._data[data_type][name] = value
|
||||
|
||||
def update(self, updated_settings):
|
||||
for k, v in updated_settings.items():
|
||||
try:
|
||||
self.set(k, v, data_types=data_types)
|
||||
except (KeyError, AssertionError):
|
||||
pass
|
||||
|
||||
def get_current_settings_dict(self):
|
||||
current_settings = {}
|
||||
for key in self.get_valid_setting_names():
|
||||
current_settings[key] = self.get(key)
|
||||
return current_settings
|
||||
|
||||
def get_adjustable_settings_dict(self):
|
||||
return {
|
||||
key: val for key, val in self.get_current_settings_dict().items()
|
||||
if key in self._adjustable_defaults
|
||||
}
|
||||
|
||||
def save_conf_file_settings(self):
|
||||
# reverse the conversions done after loading the settings from the conf
|
||||
# file
|
||||
rev = self._convert_conf_file_lists_reverse(self._data[TYPE_PERSISTED])
|
||||
ext = os.path.splitext(self.file_name)[1]
|
||||
encoder = settings_encoders.get(ext, False)
|
||||
if not encoder:
|
||||
raise ValueError('Unknown settings format: {}. Available formats: {}'
|
||||
.format(ext, list(settings_encoders.keys())))
|
||||
with open(os.path.join(self.data_dir, self.file_name), 'w') as settings_file:
|
||||
settings_file.write(encoder(rev))
|
||||
|
||||
@staticmethod
|
||||
def _convert_conf_file_lists_reverse(converted):
|
||||
rev = {}
|
||||
for k in converted.keys():
|
||||
if k in ADJUSTABLE_SETTINGS and len(ADJUSTABLE_SETTINGS[k]) == 4:
|
||||
rev[k] = ADJUSTABLE_SETTINGS[k][3](converted[k])
|
||||
else:
|
||||
rev[k] = converted[k]
|
||||
return rev
|
||||
|
||||
@staticmethod
|
||||
def _convert_conf_file_lists(decoded):
|
||||
converted = {}
|
||||
for k, v in decoded.items():
|
||||
if k in ADJUSTABLE_SETTINGS and len(ADJUSTABLE_SETTINGS[k]) >= 3:
|
||||
converted[k] = ADJUSTABLE_SETTINGS[k][2](v)
|
||||
else:
|
||||
converted[k] = v
|
||||
return converted
|
||||
|
||||
def initialize_post_conf_load(self):
|
||||
settings.installation_id = settings.get_installation_id()
|
||||
settings.node_id = settings.get_node_id()
|
||||
|
||||
def load_conf_file_settings(self):
|
||||
path = os.path.join(self.data_dir or self.default_data_dir, self.file_name)
|
||||
if os.path.isfile(path):
|
||||
self._read_conf_file(path)
|
||||
self['data_dir'] = self.data_dir or self.default_data_dir
|
||||
self['download_directory'] = self.download_dir or self.default_download_dir
|
||||
self['wallet_dir'] = self.wallet_dir or self.default_wallet_dir
|
||||
# initialize members depending on config file
|
||||
self.initialize_post_conf_load()
|
||||
|
||||
def _read_conf_file(self, path):
|
||||
if not path or not os.path.exists(path):
|
||||
raise FileNotFoundError(path)
|
||||
ext = os.path.splitext(path)[1]
|
||||
decoder = settings_decoders.get(ext, False)
|
||||
if not decoder:
|
||||
raise ValueError('Unknown settings format: {}. Available formats: {}'
|
||||
.format(ext, list(settings_decoders.keys())))
|
||||
with open(path, 'r') as settings_file:
|
||||
data = settings_file.read()
|
||||
decoded = self._fix_old_conf_file_settings(decoder(data))
|
||||
log.info('Loaded settings file: %s', path)
|
||||
self._validate_settings(decoded)
|
||||
self._data[TYPE_PERSISTED].update(self._convert_conf_file_lists(decoded))
|
||||
|
||||
def _fix_old_conf_file_settings(self, settings_dict):
|
||||
if 'API_INTERFACE' in settings_dict:
|
||||
settings_dict['api_host'] = settings_dict['API_INTERFACE']
|
||||
del settings_dict['API_INTERFACE']
|
||||
if 'startup_scripts' in settings_dict:
|
||||
del settings_dict['startup_scripts']
|
||||
if 'upload_log' in settings_dict:
|
||||
settings_dict['share_usage_data'] = settings_dict['upload_log']
|
||||
del settings_dict['upload_log']
|
||||
if 'share_debug_info' in settings_dict:
|
||||
settings_dict['share_usage_data'] = settings_dict['share_debug_info']
|
||||
del settings_dict['share_debug_info']
|
||||
for key in list(settings_dict.keys()):
|
||||
if not self._is_valid_setting(key):
|
||||
log.warning('Ignoring invalid conf file setting: %s', key)
|
||||
del settings_dict[key]
|
||||
return settings_dict
|
||||
|
||||
def ensure_data_dir(self):
|
||||
# although there is a risk of a race condition here we don't
|
||||
# expect there to be multiple processes accessing this
|
||||
# directory so the risk can be ignored
|
||||
if not os.path.isdir(self.data_dir):
|
||||
os.makedirs(self.data_dir)
|
||||
if not os.path.isdir(os.path.join(self.data_dir, "blobfiles")):
|
||||
os.makedirs(os.path.join(self.data_dir, "blobfiles"))
|
||||
return self.data_dir
|
||||
|
||||
def ensure_wallet_dir(self):
|
||||
if not os.path.isdir(self.wallet_dir):
|
||||
os.makedirs(self.wallet_dir)
|
||||
|
||||
def ensure_download_dir(self):
|
||||
if not os.path.isdir(self.download_dir):
|
||||
os.makedirs(self.download_dir)
|
||||
|
||||
def get_log_filename(self):
|
||||
"""
|
||||
Return the log file for this platform.
|
||||
Also ensure the containing directory exists.
|
||||
"""
|
||||
return os.path.join(self.ensure_data_dir(), self['LOG_FILE_NAME'])
|
||||
|
||||
def get_api_connection_string(self, user: str = None, password: str = None) -> str:
|
||||
return 'http://%s%s:%i/%s' % (
|
||||
"" if not (user and password) else f"{user}:{password}@",
|
||||
self['api_host'],
|
||||
self['api_port'],
|
||||
self['API_ADDRESS']
|
||||
)
|
||||
|
||||
def get_db_revision_filename(self):
|
||||
return os.path.join(self.ensure_data_dir(), self['DB_REVISION_FILE_NAME'])
|
||||
|
||||
def get_installation_id(self):
|
||||
install_id_filename = os.path.join(self.ensure_data_dir(), "install_id")
|
||||
if not self._installation_id:
|
||||
if os.path.isfile(install_id_filename):
|
||||
with open(install_id_filename, "r") as install_id_file:
|
||||
self._installation_id = str(install_id_file.read()).strip()
|
||||
if not self._installation_id:
|
||||
self._installation_id = base58.b58encode(utils.generate_id()).decode()
|
||||
with open(install_id_filename, "w") as install_id_file:
|
||||
install_id_file.write(self._installation_id)
|
||||
return self._installation_id
|
||||
|
||||
def get_node_id(self):
|
||||
node_id_filename = os.path.join(self.ensure_data_dir(), "node_id")
|
||||
if not self._node_id:
|
||||
if os.path.isfile(node_id_filename):
|
||||
with open(node_id_filename, "r") as node_id_file:
|
||||
self._node_id = base58.b58decode(str(node_id_file.read()).strip())
|
||||
if not self._node_id:
|
||||
self._node_id = utils.generate_id()
|
||||
with open(node_id_filename, "w") as node_id_file:
|
||||
node_id_file.write(base58.b58encode(self._node_id).decode())
|
||||
return self._node_id
|
||||
|
||||
def get_session_id(self):
|
||||
return self._session_id
|
||||
def log_file_path(self):
|
||||
return os.path.join(self.data_dir, 'lbrynet.log')
|
||||
|
||||
|
||||
settings: Config = None
|
||||
def get_windows_directories() -> typing.Tuple[str, str, str]:
|
||||
from lbrynet.winpaths import get_path, FOLDERID, UserHandle
|
||||
|
||||
download_dir = get_path(FOLDERID.Downloads, UserHandle.current)
|
||||
|
||||
# old
|
||||
appdata = get_path(FOLDERID.RoamingAppData, UserHandle.current)
|
||||
data_dir = os.path.join(appdata, 'lbrynet')
|
||||
lbryum_dir = os.path.join(appdata, 'lbryum')
|
||||
if os.path.isdir(data_dir) or os.path.isdir(lbryum_dir):
|
||||
return data_dir, lbryum_dir, download_dir
|
||||
|
||||
# new
|
||||
data_dir = user_data_dir('lbrynet', 'lbry')
|
||||
lbryum_dir = user_data_dir('lbryum', 'lbry')
|
||||
download_dir = get_path(FOLDERID.Downloads, UserHandle.current)
|
||||
return data_dir, lbryum_dir, download_dir
|
||||
|
||||
|
||||
def get_default_env():
|
||||
env_defaults = {}
|
||||
for k, v in ADJUSTABLE_SETTINGS.items():
|
||||
if len(v) == 3:
|
||||
env_defaults[k] = (v[0], None, v[2])
|
||||
elif len(v) == 4:
|
||||
env_defaults[k] = (v[0], None, v[2], v[3])
|
||||
else:
|
||||
env_defaults[k] = (v[0], None)
|
||||
return Env(**env_defaults)
|
||||
def get_darwin_directories() -> typing.Tuple[str, str, str]:
|
||||
data_dir = user_data_dir('LBRY')
|
||||
lbryum_dir = os.path.expanduser('~/.lbryum')
|
||||
download_dir = os.path.expanduser('~/Downloads')
|
||||
return data_dir, lbryum_dir, download_dir
|
||||
|
||||
|
||||
def initialize_settings(load_conf_file: typing.Optional[bool] = True,
|
||||
data_dir: optional_str = None, wallet_dir: optional_str = None,
|
||||
download_dir: optional_str = None):
|
||||
global settings
|
||||
if settings is None:
|
||||
settings = Config(FIXED_SETTINGS, ADJUSTABLE_SETTINGS,
|
||||
environment=get_default_env(), data_dir=data_dir, wallet_dir=wallet_dir,
|
||||
download_dir=download_dir)
|
||||
if load_conf_file:
|
||||
settings.load_conf_file_settings()
|
||||
settings['data_dir'] = settings.data_dir or settings.default_data_dir
|
||||
settings['download_directory'] = settings.download_dir or settings.default_download_dir
|
||||
settings['wallet_dir'] = settings.wallet_dir or settings.default_wallet_dir
|
||||
settings.ensure_data_dir()
|
||||
settings.ensure_wallet_dir()
|
||||
settings.ensure_download_dir()
|
||||
def get_linux_directories() -> typing.Tuple[str, str, str]:
|
||||
try:
|
||||
with open(os.path.join(user_config_dir(), 'user-dirs.dirs'), 'r') as xdg:
|
||||
down_dir = re.search(r'XDG_DOWNLOAD_DIR=(.+)', xdg.read()).group(1)
|
||||
down_dir = re.sub('\$HOME', os.getenv('HOME'), down_dir)
|
||||
download_dir = re.sub('\"', '', down_dir)
|
||||
except EnvironmentError:
|
||||
download_dir = os.getenv('XDG_DOWNLOAD_DIR')
|
||||
|
||||
if not download_dir:
|
||||
download_dir = os.path.expanduser('~/Downloads')
|
||||
|
||||
# old
|
||||
data_dir = os.path.expanduser('~/.lbrynet')
|
||||
lbryum_dir = os.path.expanduser('~/.lbryum')
|
||||
if os.path.isdir(data_dir) or os.path.isdir(lbryum_dir):
|
||||
return data_dir, lbryum_dir, download_dir
|
||||
|
||||
# new
|
||||
return user_data_dir('lbry/lbrynet'), user_data_dir('lbry/lbryum'), download_dir
|
||||
|
|
|
@ -1,133 +1,47 @@
|
|||
import sys
|
||||
import os
|
||||
import json
|
||||
import asyncio
|
||||
import argparse
|
||||
import typing
|
||||
|
||||
from twisted.internet import asyncioreactor
|
||||
if 'twisted.internet.reactor' not in sys.modules:
|
||||
asyncioreactor.install()
|
||||
else:
|
||||
from twisted.internet import reactor
|
||||
if not isinstance(reactor, asyncioreactor.AsyncioSelectorReactor) and getattr(sys, 'frozen', False):
|
||||
# pyinstaller hooks install the default reactor before
|
||||
# any of our code runs, see kivy for similar problem:
|
||||
# https://github.com/kivy/kivy/issues/4182
|
||||
del sys.modules['twisted.internet.reactor']
|
||||
asyncioreactor.install()
|
||||
from twisted.internet import reactor
|
||||
import logging
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from requests.exceptions import ConnectionError
|
||||
from docopt import docopt
|
||||
from textwrap import dedent
|
||||
|
||||
from lbrynet import conf, log_support, __name__ as lbrynet_name
|
||||
from lbrynet.utils import check_connection, json_dumps_pretty
|
||||
import aiohttp
|
||||
from lbrynet.extras.compat import force_asyncioreactor_install
|
||||
force_asyncioreactor_install()
|
||||
|
||||
from lbrynet import log_support, __name__ as lbrynet_name, __version__ as lbrynet_version
|
||||
from lbrynet.extras.daemon.loggly_handler import get_loggly_handler
|
||||
from lbrynet.conf import Config, CLIConfig
|
||||
from lbrynet.utils import check_connection
|
||||
from lbrynet.extras.daemon.Daemon import Daemon
|
||||
from lbrynet.extras.daemon.DaemonConsole import main as daemon_console, LBRYAPIClient
|
||||
from lbrynet.extras.system_info import get_platform
|
||||
|
||||
log = logging.getLogger(lbrynet_name)
|
||||
log.addHandler(logging.NullHandler())
|
||||
|
||||
optional_path_getter_type = typing.Optional[typing.Callable[[], str]]
|
||||
|
||||
def display(data):
|
||||
print(json.dumps(data["result"], indent=2))
|
||||
|
||||
|
||||
def start_daemon(settings: typing.Optional[typing.Dict] = None,
|
||||
console_output: typing.Optional[bool] = True, verbose: typing.Optional[typing.List[str]] = None,
|
||||
data_dir: typing.Optional[str] = None, wallet_dir: typing.Optional[str] = None,
|
||||
download_dir: typing.Optional[str] = None):
|
||||
|
||||
settings = settings or {}
|
||||
conf.initialize_settings(data_dir=data_dir, wallet_dir=wallet_dir, download_dir=download_dir)
|
||||
for k, v in settings.items():
|
||||
conf.settings.update({k, v}, data_types=(conf.TYPE_CLI,))
|
||||
|
||||
log_support.configure_logging(conf.settings.get_log_filename(), console_output, verbose)
|
||||
|
||||
if conf.settings['share_usage_data']:
|
||||
loggly_handler = get_loggly_handler(conf.settings['LOGGLY_TOKEN'])
|
||||
loggly_handler.setLevel(logging.ERROR)
|
||||
log.addHandler(loggly_handler)
|
||||
|
||||
log.debug('Final Settings: %s', conf.settings.get_current_settings_dict())
|
||||
log.info("Starting lbrynet-daemon from command line")
|
||||
|
||||
if check_connection():
|
||||
daemon = Daemon()
|
||||
reactor._asyncioEventloop.create_task(daemon.start_listening())
|
||||
reactor.run()
|
||||
else:
|
||||
log.info("Not connected to internet, unable to start")
|
||||
|
||||
|
||||
def start_daemon_with_cli_args(argv=None, data_dir: typing.Optional[str] = None,
|
||||
wallet_dir: typing.Optional[str] = None, download_dir: typing.Optional[str] = None):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--http-auth", dest="useauth", action="store_true", default=False
|
||||
)
|
||||
parser.add_argument(
|
||||
'--quiet', dest='quiet', action="store_true",
|
||||
help='Disable all console output.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--verbose', nargs="*",
|
||||
help=('Enable debug output. Optionally specify loggers for which debug output '
|
||||
'should selectively be applied.')
|
||||
)
|
||||
parser.add_argument(
|
||||
'--version', action="store_true",
|
||||
help='Show daemon version and quit'
|
||||
)
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
settings = {}
|
||||
if args.useauth:
|
||||
print('--http-auth is no longer supported; an alternative solution using IPC is forthcoming.')
|
||||
return
|
||||
|
||||
verbose = None
|
||||
if args.verbose:
|
||||
verbose = args.verbose
|
||||
|
||||
console_output = not args.quiet
|
||||
|
||||
if args.version:
|
||||
print(json_dumps_pretty(get_platform()))
|
||||
return
|
||||
|
||||
return start_daemon(settings, console_output, verbose, data_dir, wallet_dir, download_dir)
|
||||
|
||||
|
||||
async def execute_command(method, params, data_dir: typing.Optional[str] = None,
|
||||
wallet_dir: typing.Optional[str] = None, download_dir: typing.Optional[str] = None):
|
||||
# this check if the daemon is running or not
|
||||
conf.initialize_settings(data_dir=data_dir, wallet_dir=wallet_dir, download_dir=download_dir)
|
||||
api = None
|
||||
try:
|
||||
api = await LBRYAPIClient.get_client()
|
||||
await api.status()
|
||||
except (ClientConnectorError, ConnectionError):
|
||||
if api:
|
||||
await api.session.close()
|
||||
print("Could not connect to daemon. Are you sure it's running?")
|
||||
return 1
|
||||
|
||||
# this actually executes the method
|
||||
resp = await api.call(method, params)
|
||||
|
||||
try:
|
||||
await api.session.close()
|
||||
print(json.dumps(resp["result"], indent=2))
|
||||
except KeyError:
|
||||
if resp["error"]["code"] == -32500:
|
||||
print(json.dumps(resp["error"], indent=2))
|
||||
else:
|
||||
print(json.dumps(resp["error"]["message"], indent=2))
|
||||
async def execute_command(conf, method, params):
|
||||
async with aiohttp.ClientSession() as session:
|
||||
try:
|
||||
message = {'method': method, 'params': params}
|
||||
async with session.get(conf.api_connection_url, json=message) as resp:
|
||||
try:
|
||||
data = await resp.json()
|
||||
if 'result' in data:
|
||||
display(data['result'])
|
||||
elif 'error' in data:
|
||||
if 'message' in data['error']:
|
||||
display(data['error']['message'])
|
||||
else:
|
||||
display(data['error'])
|
||||
except Exception as e:
|
||||
log.exception('Could not process response from server:', exc_info=e)
|
||||
except aiohttp.ClientConnectionError:
|
||||
print("Could not connect to daemon. Are you sure it's running?")
|
||||
|
||||
|
||||
def print_help():
|
||||
|
@ -201,82 +115,88 @@ def set_kwargs(parsed_args):
|
|||
return kwargs
|
||||
|
||||
|
||||
def get_argument_parser():
|
||||
main = argparse.ArgumentParser('lbrynet')
|
||||
main.add_argument(
|
||||
'--version', dest='cli_version', action="store_true",
|
||||
help='Show lbrynet CLI version and exit.'
|
||||
)
|
||||
CLIConfig.contribute_args(main)
|
||||
sub = main.add_subparsers(dest='command')
|
||||
start = sub.add_parser('start', help='Start lbrynet server.')
|
||||
start.add_argument(
|
||||
'--quiet', dest='quiet', action="store_true",
|
||||
help='Disable all console output.'
|
||||
)
|
||||
start.add_argument(
|
||||
'--verbose', nargs="*",
|
||||
help=('Enable debug output. Optionally specify loggers for which debug output '
|
||||
'should selectively be applied.')
|
||||
)
|
||||
start.add_argument(
|
||||
'--version', action="store_true",
|
||||
help='Show daemon version and quit'
|
||||
)
|
||||
Config.contribute_args(start)
|
||||
api = Daemon.get_api_definitions()
|
||||
for group in sorted(api):
|
||||
group_command = sub.add_parser(group, help=api[group]['doc'])
|
||||
group_command.set_defaults(group_doc=group_command)
|
||||
commands = group_command.add_subparsers(dest='subcommand')
|
||||
for command in api[group]['commands']:
|
||||
commands.add_parser(command['name'], help=command['doc'].strip().splitlines()[0])
|
||||
return main
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
argv = argv or sys.argv[1:]
|
||||
if not argv:
|
||||
print_help()
|
||||
return 1
|
||||
parser = get_argument_parser()
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
dir_args = {}
|
||||
if len(argv) >= 2:
|
||||
dir_arg_keys = [
|
||||
'data_dir',
|
||||
'wallet_dir',
|
||||
'download_directory'
|
||||
]
|
||||
conf = Config()
|
||||
|
||||
for arg in argv:
|
||||
found_dir_arg = False
|
||||
for key in dir_arg_keys:
|
||||
if arg.startswith(f'--{key}='):
|
||||
if key in dir_args:
|
||||
print(f"Multiple values provided for '{key}' argument")
|
||||
print_help()
|
||||
return 1
|
||||
dir_args[key] = os.path.expanduser(os.path.expandvars(arg.lstrip(f'--{key}=')))
|
||||
found_dir_arg = True
|
||||
if not found_dir_arg:
|
||||
break
|
||||
argv = argv[len(dir_args):]
|
||||
if args.cli_version:
|
||||
print(f"{lbrynet_name} {lbrynet_version}")
|
||||
return 0
|
||||
|
||||
data_dir = dir_args.get('data_dir')
|
||||
wallet_dir = dir_args.get('wallet_dir')
|
||||
download_dir = dir_args.get('download_directory')
|
||||
elif args.command == 'start':
|
||||
console_output = True
|
||||
verbose = True
|
||||
|
||||
for k, v in dir_args.items():
|
||||
if not os.path.isdir(v):
|
||||
print(f"'{data_dir}' is not a directory, cannot use it for {k}")
|
||||
return 1
|
||||
log_support.configure_logging(conf.log_file_path, console_output, verbose)
|
||||
|
||||
method, args = argv[0], argv[1:]
|
||||
if conf.share_usage_data:
|
||||
loggly_handler = get_loggly_handler()
|
||||
loggly_handler.setLevel(logging.ERROR)
|
||||
log.addHandler(loggly_handler)
|
||||
|
||||
if method in ['help', '--help', '-h']:
|
||||
if len(args) == 1:
|
||||
print_help_for_command(args[0])
|
||||
log.debug('Final Settings: %s', conf.settings_dict)
|
||||
log.info("Starting lbrynet-daemon from command line")
|
||||
|
||||
daemon = Daemon(conf)
|
||||
|
||||
if check_connection():
|
||||
from twisted.internet import reactor
|
||||
reactor._asyncioEventloop.create_task(daemon.start())
|
||||
reactor.run()
|
||||
else:
|
||||
print_help()
|
||||
return 0
|
||||
log.info("Not connected to internet, unable to start")
|
||||
|
||||
elif method in ['version', '--version', '-v']:
|
||||
print("{lbrynet_name} {lbrynet_version}".format(
|
||||
lbrynet_name=lbrynet_name, **get_platform()
|
||||
))
|
||||
return 0
|
||||
elif args.command is not None:
|
||||
|
||||
elif method == 'start':
|
||||
sys.exit(start_daemon_with_cli_args(args, data_dir, wallet_dir, download_dir))
|
||||
if args.subcommand is None:
|
||||
args.group_doc.print_help()
|
||||
|
||||
elif method == 'console':
|
||||
sys.exit(daemon_console())
|
||||
else:
|
||||
method = f'{args.command}_{args.subcommand}'
|
||||
fn = Daemon.callable_methods[method]
|
||||
parsed = docopt(fn.__doc__, [method]+argv[2:])
|
||||
params = set_kwargs(parsed)
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(execute_command(conf, method, params))
|
||||
|
||||
elif method not in Daemon.callable_methods:
|
||||
if method not in Daemon.deprecated_methods:
|
||||
print(f'{method} is not a valid command.')
|
||||
return 1
|
||||
|
||||
new_method = Daemon.deprecated_methods[method].new_command
|
||||
if new_method is None:
|
||||
print(f"{method} is permanently deprecated and does not have a replacement command.")
|
||||
return 0
|
||||
|
||||
print(f"{method} is deprecated, using {new_method}.")
|
||||
method = new_method
|
||||
|
||||
fn = Daemon.callable_methods[method]
|
||||
parsed = docopt(fn.__doc__, args)
|
||||
params = set_kwargs(parsed)
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(execute_command(method, params, data_dir, wallet_dir, download_dir))
|
||||
else:
|
||||
parser.print_help()
|
||||
|
||||
return 0
|
||||
|
||||
|
|
|
@ -1,5 +1,19 @@
|
|||
import asyncio
|
||||
from twisted.internet import defer
|
||||
|
||||
|
||||
def force_asyncioreactor_install():
|
||||
import sys
|
||||
from twisted.internet import asyncioreactor
|
||||
if 'twisted.internet.reactor' not in sys.modules:
|
||||
asyncioreactor.install()
|
||||
else:
|
||||
from twisted.internet import reactor
|
||||
if not isinstance(reactor, asyncioreactor.AsyncioSelectorReactor) and getattr(sys, 'frozen', False):
|
||||
# pyinstaller hooks install the default reactor before
|
||||
# any of our code runs, see kivy for similar problem:
|
||||
# https://github.com/kivy/kivy/issues/4182
|
||||
del sys.modules['twisted.internet.reactor']
|
||||
asyncioreactor.install()
|
||||
|
||||
|
||||
def d2f(deferred):
|
||||
|
@ -7,4 +21,5 @@ def d2f(deferred):
|
|||
|
||||
|
||||
def f2d(future):
|
||||
from twisted.internet import defer
|
||||
return defer.Deferred.fromFuture(asyncio.ensure_future(future))
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import logging
|
||||
from twisted.internet import defer
|
||||
from twisted._threads import AlreadyQuit
|
||||
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.extras.daemon.ComponentManager import ComponentManager
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -26,6 +28,7 @@ class Component(metaclass=ComponentType):
|
|||
component_name = None
|
||||
|
||||
def __init__(self, component_manager):
|
||||
self.conf: Config = component_manager.conf
|
||||
self.component_manager = component_manager
|
||||
self._running = False
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ import logging
|
|||
from lbrynet.p2p.Error import ComponentStartConditionNotMet
|
||||
from lbrynet.extras.daemon.PeerManager import PeerManager
|
||||
from lbrynet.extras.daemon.PeerFinder import DHTPeerFinder
|
||||
from lbrynet.conf import Config
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -34,8 +35,9 @@ class RequiredCondition(metaclass=RequiredConditionType):
|
|||
class ComponentManager:
|
||||
default_component_classes = {}
|
||||
|
||||
def __init__(self, reactor=None, analytics_manager=None, skip_components=None,
|
||||
def __init__(self, conf: Config, reactor=None, analytics_manager=None, skip_components=None,
|
||||
peer_manager=None, peer_finder=None, **override_components):
|
||||
self.conf = conf
|
||||
self.skip_components = skip_components or []
|
||||
self.reactor = reactor
|
||||
self.component_classes = {}
|
||||
|
@ -55,6 +57,7 @@ class ComponentManager:
|
|||
|
||||
for component_class in self.component_classes.values():
|
||||
self.components.add(component_class(self))
|
||||
self.daemon = None
|
||||
|
||||
def evaluate_condition(self, condition_name):
|
||||
if condition_name not in RegisteredConditions.conditions:
|
||||
|
|
|
@ -14,8 +14,8 @@ from aioupnp.upnp import UPnP
|
|||
from aioupnp.fault import UPnPError
|
||||
|
||||
import lbrynet.schema
|
||||
from lbrynet import conf
|
||||
|
||||
from lbrynet.conf import HEADERS_FILE_SHA256_CHECKSUM
|
||||
from lbrynet.extras.compat import d2f
|
||||
from lbrynet.blob.EncryptedFileManager import EncryptedFileManager
|
||||
from lbrynet.blob.client.EncryptedFileDownloader import EncryptedFileSaverFactory
|
||||
|
@ -90,25 +90,24 @@ class DatabaseComponent(Component):
|
|||
def get_current_db_revision():
|
||||
return 9
|
||||
|
||||
@staticmethod
|
||||
def get_revision_filename():
|
||||
return conf.settings.get_db_revision_filename()
|
||||
@property
|
||||
def revision_filename(self):
|
||||
return self.component_manager.daemon.db_revision_file_path
|
||||
|
||||
@staticmethod
|
||||
def _write_db_revision_file(version_num):
|
||||
with open(conf.settings.get_db_revision_filename(), mode='w') as db_revision:
|
||||
def _write_db_revision_file(self, version_num):
|
||||
with open(self.revision_filename, mode='w') as db_revision:
|
||||
db_revision.write(str(version_num))
|
||||
|
||||
async def start(self):
|
||||
# check directories exist, create them if they don't
|
||||
log.info("Loading databases")
|
||||
|
||||
if not os.path.exists(self.get_revision_filename()):
|
||||
if not os.path.exists(self.revision_filename):
|
||||
log.warning("db_revision file not found. Creating it")
|
||||
self._write_db_revision_file(self.get_current_db_revision())
|
||||
|
||||
# check the db migration and run any needed migrations
|
||||
with open(self.get_revision_filename(), "r") as revision_read_handle:
|
||||
with open(self.revision_filename, "r") as revision_read_handle:
|
||||
old_revision = int(revision_read_handle.read().strip())
|
||||
|
||||
if old_revision > self.get_current_db_revision():
|
||||
|
@ -119,13 +118,13 @@ class DatabaseComponent(Component):
|
|||
from lbrynet.extras.daemon.migrator import dbmigrator
|
||||
log.info("Upgrading your databases (revision %i to %i)", old_revision, self.get_current_db_revision())
|
||||
await asyncio.get_event_loop().run_in_executor(
|
||||
None, dbmigrator.migrate_db, conf.settings.data_dir, old_revision, self.get_current_db_revision()
|
||||
None, dbmigrator.migrate_db, self.conf.data_dir, old_revision, self.get_current_db_revision()
|
||||
)
|
||||
self._write_db_revision_file(self.get_current_db_revision())
|
||||
log.info("Finished upgrading the databases.")
|
||||
|
||||
self.storage = SQLiteStorage(
|
||||
os.path.join(conf.settings.data_dir, "lbrynet.sqlite")
|
||||
self.conf, os.path.join(self.conf.data_dir, "lbrynet.sqlite")
|
||||
)
|
||||
await self.storage.open()
|
||||
|
||||
|
@ -143,9 +142,9 @@ class HeadersComponent(Component):
|
|||
|
||||
def __init__(self, component_manager):
|
||||
super().__init__(component_manager)
|
||||
self.headers_dir = os.path.join(conf.settings.wallet_dir, 'lbc_mainnet')
|
||||
self.headers_dir = os.path.join(self.conf.wallet_dir, 'lbc_mainnet')
|
||||
self.headers_file = os.path.join(self.headers_dir, 'headers')
|
||||
self.old_file = os.path.join(conf.settings.wallet_dir, 'blockchain_headers')
|
||||
self.old_file = os.path.join(self.conf.wallet_dir, 'blockchain_headers')
|
||||
self._downloading_headers = None
|
||||
self._headers_progress_percent = 0
|
||||
|
||||
|
@ -204,8 +203,8 @@ class HeadersComponent(Component):
|
|||
async def get_remote_height(self):
|
||||
ledger = SimpleNamespace()
|
||||
ledger.config = {
|
||||
'default_servers': conf.settings['lbryum_servers'],
|
||||
'data_path': conf.settings.wallet_dir
|
||||
'default_servers': self.conf.lbryum_servers,
|
||||
'data_path': self.conf.wallet_dir
|
||||
}
|
||||
net = Network(ledger)
|
||||
first_connection = net.on_connected.first
|
||||
|
@ -216,10 +215,10 @@ class HeadersComponent(Component):
|
|||
return remote_height
|
||||
|
||||
async def should_download_headers_from_s3(self):
|
||||
if conf.settings['blockchain_name'] != "lbrycrd_main":
|
||||
if self.conf.blockchain_name != "lbrycrd_main":
|
||||
return False
|
||||
self._check_header_file_integrity()
|
||||
s3_headers_depth = conf.settings['s3_headers_depth']
|
||||
s3_headers_depth = self.conf.s3_headers_depth
|
||||
if not s3_headers_depth:
|
||||
return False
|
||||
local_height = self.local_header_file_height()
|
||||
|
@ -231,10 +230,10 @@ class HeadersComponent(Component):
|
|||
|
||||
def _check_header_file_integrity(self):
|
||||
# TODO: temporary workaround for usability. move to txlbryum and check headers instead of file integrity
|
||||
if conf.settings['blockchain_name'] != "lbrycrd_main":
|
||||
if self.conf.blockchain_name != "lbrycrd_main":
|
||||
return
|
||||
hashsum = sha256()
|
||||
checksum_height, checksum = conf.settings['HEADERS_FILE_SHA256_CHECKSUM']
|
||||
checksum_height, checksum = HEADERS_FILE_SHA256_CHECKSUM
|
||||
checksum_length_in_bytes = checksum_height * HEADER_SIZE
|
||||
if self.local_header_file_size() < checksum_length_in_bytes:
|
||||
return
|
||||
|
@ -252,7 +251,6 @@ class HeadersComponent(Component):
|
|||
headers_file.truncate(checksum_length_in_bytes)
|
||||
|
||||
async def start(self):
|
||||
conf.settings.ensure_wallet_dir()
|
||||
if not os.path.exists(self.headers_dir):
|
||||
os.mkdir(self.headers_dir)
|
||||
if os.path.exists(self.old_file):
|
||||
|
@ -297,11 +295,10 @@ class WalletComponent(Component):
|
|||
}
|
||||
|
||||
async def start(self):
|
||||
conf.settings.ensure_wallet_dir()
|
||||
log.info("Starting torba wallet")
|
||||
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
||||
lbrynet.schema.BLOCKCHAIN_NAME = conf.settings['blockchain_name']
|
||||
self.wallet_manager = await LbryWalletManager.from_lbrynet_config(conf.settings, storage)
|
||||
lbrynet.schema.BLOCKCHAIN_NAME = self.conf.blockchain_name
|
||||
self.wallet_manager = await LbryWalletManager.from_lbrynet_config(self.conf, storage)
|
||||
self.wallet_manager.old_db = storage
|
||||
await self.wallet_manager.start()
|
||||
|
||||
|
@ -329,7 +326,7 @@ class BlobComponent(Component):
|
|||
dht_node = self.component_manager.get_component(DHT_COMPONENT)
|
||||
if dht_node:
|
||||
datastore = dht_node._dataStore
|
||||
self.blob_manager = DiskBlobManager(os.path.join(conf.settings.data_dir, "blobfiles"), storage, datastore)
|
||||
self.blob_manager = DiskBlobManager(os.path.join(self.conf.data_dir, "blobfiles"), storage, datastore)
|
||||
return self.blob_manager.setup()
|
||||
|
||||
def stop(self):
|
||||
|
@ -359,15 +356,15 @@ class DHTComponent(Component):
|
|||
|
||||
async def get_status(self):
|
||||
return {
|
||||
'node_id': binascii.hexlify(conf.settings.get_node_id()),
|
||||
'node_id': binascii.hexlify(self.component_manager.daemon.node_id),
|
||||
'peers_in_routing_table': 0 if not self.dht_node else len(self.dht_node.contacts)
|
||||
}
|
||||
|
||||
async def start(self):
|
||||
self.upnp_component = self.component_manager.get_component(UPNP_COMPONENT)
|
||||
self.external_peer_port = self.upnp_component.upnp_redirects.get("TCP", conf.settings["peer_port"])
|
||||
self.external_udp_port = self.upnp_component.upnp_redirects.get("UDP", conf.settings["dht_node_port"])
|
||||
node_id = conf.settings.get_node_id()
|
||||
self.external_peer_port = self.upnp_component.upnp_redirects.get("TCP", self.conf.peer_port)
|
||||
self.external_udp_port = self.upnp_component.upnp_redirects.get("UDP", self.conf.dht_node_port)
|
||||
node_id = self.component_manager.daemon.node_id
|
||||
if node_id is None:
|
||||
node_id = generate_id()
|
||||
external_ip = self.upnp_component.external_ip
|
||||
|
@ -379,13 +376,13 @@ class DHTComponent(Component):
|
|||
|
||||
self.dht_node = Node(
|
||||
node_id=node_id,
|
||||
udpPort=conf.settings['dht_node_port'],
|
||||
udpPort=self.conf.dht_node_port,
|
||||
externalUDPPort=self.external_udp_port,
|
||||
externalIP=external_ip,
|
||||
peerPort=self.external_peer_port
|
||||
)
|
||||
|
||||
await d2f(self.dht_node.start(conf.settings['known_dht_nodes'], block_on_join=False))
|
||||
await d2f(self.dht_node.start(self.conf.known_dht_nodes, block_on_join=False))
|
||||
log.info("Started the dht")
|
||||
|
||||
def stop(self):
|
||||
|
@ -407,7 +404,7 @@ class HashAnnouncerComponent(Component):
|
|||
async def start(self):
|
||||
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
||||
dht_node = self.component_manager.get_component(DHT_COMPONENT)
|
||||
self.hash_announcer = DHTHashAnnouncer(dht_node, storage)
|
||||
self.hash_announcer = DHTHashAnnouncer(self.conf, dht_node, storage)
|
||||
self.hash_announcer.start()
|
||||
|
||||
def stop(self):
|
||||
|
@ -484,19 +481,22 @@ class FileManagerComponent(Component):
|
|||
sd_identifier = StreamDescriptorIdentifier()
|
||||
add_lbry_file_to_sd_identifier(sd_identifier)
|
||||
file_saver_factory = EncryptedFileSaverFactory(
|
||||
self.conf,
|
||||
self.component_manager.peer_finder,
|
||||
rate_limiter,
|
||||
blob_manager,
|
||||
storage,
|
||||
wallet,
|
||||
conf.settings.download_dir
|
||||
self.conf.download_dir
|
||||
)
|
||||
sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType, file_saver_factory)
|
||||
|
||||
payment_rate_manager = self.component_manager.get_component(PAYMENT_RATE_COMPONENT)
|
||||
log.info('Starting the file manager')
|
||||
self.file_manager = EncryptedFileManager(self.component_manager.peer_finder, rate_limiter, blob_manager, wallet,
|
||||
payment_rate_manager, storage, sd_identifier)
|
||||
self.file_manager = EncryptedFileManager(
|
||||
self.conf, self.component_manager.peer_finder, rate_limiter,
|
||||
blob_manager, wallet, payment_rate_manager, storage, sd_identifier
|
||||
)
|
||||
return self.file_manager.setup()
|
||||
|
||||
def stop(self):
|
||||
|
@ -519,7 +519,7 @@ class PeerProtocolServerComponent(Component):
|
|||
async def start(self):
|
||||
wallet = self.component_manager.get_component(WALLET_COMPONENT)
|
||||
upnp = self.component_manager.get_component(UPNP_COMPONENT)
|
||||
peer_port = conf.settings['peer_port']
|
||||
peer_port = self.conf.peer_port
|
||||
query_handlers = {
|
||||
handler.get_primary_query_identifier(): handler for handler in [
|
||||
BlobRequestHandlerFactory(
|
||||
|
@ -560,7 +560,7 @@ class ReflectorComponent(Component):
|
|||
|
||||
def __init__(self, component_manager):
|
||||
super().__init__(component_manager)
|
||||
self.reflector_server_port = conf.settings['reflector_port']
|
||||
self.reflector_server_port = self.conf.reflector_port
|
||||
self.reflector_server = None
|
||||
|
||||
@property
|
||||
|
@ -591,9 +591,9 @@ class UPnPComponent(Component):
|
|||
|
||||
def __init__(self, component_manager):
|
||||
super().__init__(component_manager)
|
||||
self._int_peer_port = conf.settings['peer_port']
|
||||
self._int_dht_node_port = conf.settings['dht_node_port']
|
||||
self.use_upnp = conf.settings['use_upnp']
|
||||
self._int_peer_port = self.conf.peer_port
|
||||
self._int_dht_node_port = self.conf.dht_node_port
|
||||
self.use_upnp = self.conf.use_upnp
|
||||
self.upnp = None
|
||||
self.upnp_redirects = {}
|
||||
self.external_ip = None
|
||||
|
|
|
@ -2,6 +2,7 @@ import os
|
|||
import requests
|
||||
import urllib
|
||||
import textwrap
|
||||
import random
|
||||
|
||||
from typing import Callable, Optional, List
|
||||
from operator import itemgetter
|
||||
|
@ -45,6 +46,7 @@ from lbrynet.extras.daemon.ComponentManager import ComponentManager
|
|||
from lbrynet.extras.looping_call_manager import LoopingCallManager
|
||||
from lbrynet.p2p.Error import ComponentsNotStarted, ComponentStartConditionNotMet
|
||||
from lbrynet.extras.daemon.json_response_encoder import JSONResponseEncoder
|
||||
import base58
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
@ -56,7 +58,7 @@ from twisted.internet import defer
|
|||
|
||||
from lbrynet import utils
|
||||
from lbrynet.extras.daemon.undecorated import undecorated
|
||||
from lbrynet import conf
|
||||
from lbrynet.conf import Config, Setting, SLACK_WEBHOOK
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
|
@ -211,16 +213,6 @@ def sort_claim_results(claims):
|
|||
return claims
|
||||
|
||||
|
||||
def is_first_run():
|
||||
if os.path.isfile(conf.settings.get_db_revision_filename()):
|
||||
return False
|
||||
if os.path.isfile(os.path.join(conf.settings.data_dir, 'lbrynet.sqlite')):
|
||||
return False
|
||||
if os.path.isfile(os.path.join(conf.settings.wallet_dir, 'blockchain_headers')):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
DHT_HAS_CONTACTS = "dht_has_contacts"
|
||||
WALLET_IS_UNLOCKED = "wallet_is_unlocked"
|
||||
|
||||
|
@ -354,26 +346,29 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
|
||||
allowed_during_startup = []
|
||||
|
||||
def __init__(self, analytics_manager=None, component_manager=None):
|
||||
to_skip = conf.settings['components_to_skip']
|
||||
if 'reflector' not in to_skip and not conf.settings['run_reflector_server']:
|
||||
def __init__(self, conf: Config, component_manager: ComponentManager = None):
|
||||
self.conf = conf
|
||||
to_skip = conf.components_to_skip
|
||||
if 'reflector' not in to_skip and not conf.run_reflector_server:
|
||||
to_skip.append('reflector')
|
||||
looping_calls = {
|
||||
Checker.INTERNET_CONNECTION[0]: (LoopingCall(CheckInternetConnection(self)),
|
||||
Checker.INTERNET_CONNECTION[1])
|
||||
}
|
||||
self.analytics_manager = analytics_manager or analytics.Manager.new_instance()
|
||||
self._node_id = None
|
||||
self._installation_id = None
|
||||
self.session_id = base58.b58encode(utils.generate_id()).decode()
|
||||
self.analytics_manager = analytics.Manager(conf, self.installation_id, self.session_id)
|
||||
self.component_manager = component_manager or ComponentManager(
|
||||
analytics_manager=self.analytics_manager,
|
||||
skip_components=to_skip or [],
|
||||
conf, analytics_manager=self.analytics_manager, skip_components=to_skip or []
|
||||
)
|
||||
self.component_manager.daemon = self
|
||||
self.looping_call_manager = LoopingCallManager({n: lc for n, (lc, t) in (looping_calls or {}).items()})
|
||||
self._looping_call_times = {n: t for n, (lc, t) in (looping_calls or {}).items()}
|
||||
self.listening_port = None
|
||||
self._component_setup_task = None
|
||||
self.announced_startup = False
|
||||
self.sessions = {}
|
||||
self.is_first_run = is_first_run()
|
||||
|
||||
# TODO: move this to a component
|
||||
self.connected_to_internet = True
|
||||
|
@ -402,17 +397,86 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
self.handler = self.app.make_handler()
|
||||
self.server = None
|
||||
|
||||
async def start_listening(self):
|
||||
@classmethod
|
||||
def get_api_definitions(cls):
|
||||
groups = {}
|
||||
for method in dir(cls):
|
||||
if method.startswith('jsonrpc_'):
|
||||
parts = method.split('_', 2)
|
||||
group = command = parts[1]
|
||||
if len(parts) == 3:
|
||||
command = parts[2]
|
||||
group_dict = {'doc': getattr(cls, f'{group.upper()}_DOC', ''), 'commands': []}
|
||||
groups.setdefault(group, group_dict)['commands'].append({
|
||||
'name': command,
|
||||
'doc': getattr(cls, method).__doc__
|
||||
})
|
||||
del groups['commands']
|
||||
del groups['help']
|
||||
return groups
|
||||
|
||||
@property
|
||||
def db_revision_file_path(self):
|
||||
return os.path.join(self.conf.data_dir, 'db_revision')
|
||||
|
||||
@property
|
||||
def installation_id(self):
|
||||
install_id_filename = os.path.join(self.conf.data_dir, "install_id")
|
||||
if not self._installation_id:
|
||||
if os.path.isfile(install_id_filename):
|
||||
with open(install_id_filename, "r") as install_id_file:
|
||||
self._installation_id = str(install_id_file.read()).strip()
|
||||
if not self._installation_id:
|
||||
self._installation_id = base58.b58encode(utils.generate_id()).decode()
|
||||
with open(install_id_filename, "w") as install_id_file:
|
||||
install_id_file.write(self._installation_id)
|
||||
return self._installation_id
|
||||
|
||||
@property
|
||||
def node_id(self):
|
||||
node_id_filename = os.path.join(self.conf.data_dir, "node_id")
|
||||
if not self._node_id:
|
||||
if os.path.isfile(node_id_filename):
|
||||
with open(node_id_filename, "r") as node_id_file:
|
||||
self._node_id = base58.b58decode(str(node_id_file.read()).strip())
|
||||
if not self._node_id:
|
||||
self._node_id = utils.generate_id()
|
||||
with open(node_id_filename, "w") as node_id_file:
|
||||
node_id_file.write(base58.b58encode(self._node_id).decode())
|
||||
return self._node_id
|
||||
|
||||
def ensure_data_dir(self):
|
||||
# although there is a risk of a race condition here we don't
|
||||
# expect there to be multiple processes accessing this
|
||||
# directory so the risk can be ignored
|
||||
if not os.path.isdir(self.conf.data_dir):
|
||||
os.makedirs(self.conf.data_dir)
|
||||
if not os.path.isdir(os.path.join(self.conf.data_dir, "blobfiles")):
|
||||
os.makedirs(os.path.join(self.conf.data_dir, "blobfiles"))
|
||||
return self.conf.data_dir
|
||||
|
||||
def ensure_wallet_dir(self):
|
||||
if not os.path.isdir(self.conf.wallet_dir):
|
||||
os.makedirs(self.conf.wallet_dir)
|
||||
|
||||
def ensure_download_dir(self):
|
||||
if not os.path.isdir(self.conf.download_dir):
|
||||
os.makedirs(self.conf.download_dir)
|
||||
|
||||
async def start(self):
|
||||
self.ensure_data_dir()
|
||||
self.ensure_wallet_dir()
|
||||
self.ensure_download_dir()
|
||||
try:
|
||||
self.server = await asyncio.get_event_loop().create_server(
|
||||
self.handler, conf.settings['api_host'], conf.settings['api_port']
|
||||
self.handler, self.conf.api_host, self.conf.api_port
|
||||
)
|
||||
log.info('lbrynet API listening on TCP %s:%i', *self.server.sockets[0].getsockname()[:2])
|
||||
await self.setup()
|
||||
await self.analytics_manager.send_server_startup_success()
|
||||
except OSError:
|
||||
log.error('lbrynet API failed to bind TCP %s:%i for listening. Daemon is already running or this port is '
|
||||
'already in use by another application.', conf.settings['api_host'], conf.settings['api_port'])
|
||||
'already in use by another application.', self.conf.api_host, self.conf.api_port)
|
||||
except defer.CancelledError:
|
||||
log.info("shutting down before finished starting")
|
||||
except Exception as err:
|
||||
|
@ -673,10 +737,9 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
await self.analytics_manager.send_download_started(download_id, name, claim_dict)
|
||||
await self.analytics_manager.send_new_download_start(download_id, name, claim_dict)
|
||||
self.streams[sd_hash] = GetStream(
|
||||
self.file_manager.sd_identifier, self.wallet_manager, self.exchange_rate_manager, self.blob_manager,
|
||||
self.component_manager.peer_finder, self.rate_limiter, self.payment_rate_manager, self.storage,
|
||||
conf.settings['max_key_fee'], conf.settings['disable_max_key_fee'], conf.settings['data_rate'],
|
||||
timeout
|
||||
self.conf, self.file_manager.sd_identifier, self.wallet_manager, self.exchange_rate_manager,
|
||||
self.blob_manager, self.component_manager.peer_finder, self.rate_limiter, self.payment_rate_manager,
|
||||
self.storage, self.conf.max_key_fee, self.conf.disable_max_key_fee, self.conf.data_rate, timeout
|
||||
)
|
||||
try:
|
||||
lbry_file, finished_deferred = await d2f(self.streams[sd_hash].start(
|
||||
|
@ -713,8 +776,8 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
tx = await publisher.publish_stream(name, bid, claim_dict, stream_hash, claim_address)
|
||||
else:
|
||||
tx = await publisher.create_and_publish_stream(name, bid, claim_dict, file_path, claim_address)
|
||||
if conf.settings['reflect_uploads']:
|
||||
d = reupload.reflect_file(publisher.lbry_file)
|
||||
if self.conf.reflect_uploads:
|
||||
d = reupload.reflect_file(publisher.lbry_file, random.choice(self.conf.reflector_servers))
|
||||
d.addCallbacks(lambda _: log.info("Reflected new publication to lbry://%s", name),
|
||||
log.exception)
|
||||
await self.analytics_manager.send_claim_action('publish')
|
||||
|
@ -734,8 +797,8 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
return self.blob_manager.get_blob(blob[0])
|
||||
return await d2f(download_sd_blob(
|
||||
sd_hash.decode(), self.blob_manager, self.component_manager.peer_finder, self.rate_limiter,
|
||||
self.payment_rate_manager, self.wallet_manager, timeout=conf.settings['peer_search_timeout'],
|
||||
download_mirrors=conf.settings['download_mirrors']
|
||||
self.payment_rate_manager, self.wallet_manager, timeout=self.conf.peer_search_timeout,
|
||||
download_mirrors=self.conf.download_mirrors
|
||||
))
|
||||
|
||||
def get_or_download_sd_blob(self, sd_hash):
|
||||
|
@ -763,7 +826,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
|
||||
if self.payment_rate_manager.generous:
|
||||
return 0.0
|
||||
return size / (10 ** 6) * conf.settings['data_rate']
|
||||
return size / (10 ** 6) * self.conf.data_rate
|
||||
|
||||
async def get_est_cost_using_known_size(self, uri, size):
|
||||
"""
|
||||
|
@ -832,7 +895,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
key = hexlify(lbry_file.key) if lbry_file.key else None
|
||||
download_directory = lbry_file.download_directory
|
||||
if not os.path.exists(download_directory):
|
||||
download_directory = conf.settings.download_dir
|
||||
download_directory = self.conf.download_dir
|
||||
full_path = os.path.join(download_directory, lbry_file.file_name)
|
||||
mime_type = guess_media_type(lbry_file.file_name)
|
||||
if os.path.isfile(full_path):
|
||||
|
@ -925,15 +988,15 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
return field, direction
|
||||
|
||||
def _get_single_peer_downloader(self):
|
||||
downloader = SinglePeerDownloader()
|
||||
downloader = SinglePeerDownloader(self.conf)
|
||||
downloader.setup(self.wallet_manager)
|
||||
return downloader
|
||||
|
||||
async def _blob_availability(self, blob_hash, search_timeout, blob_timeout, downloader=None):
|
||||
if not downloader:
|
||||
downloader = self._get_single_peer_downloader()
|
||||
search_timeout = search_timeout or conf.settings['peer_search_timeout']
|
||||
blob_timeout = blob_timeout or conf.settings['sd_download_timeout']
|
||||
search_timeout = search_timeout or self.conf.peer_search_timeout
|
||||
blob_timeout = blob_timeout or self.conf.sd_download_timeout
|
||||
reachable_peers = []
|
||||
unreachable_peers = []
|
||||
try:
|
||||
|
@ -1000,7 +1063,6 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
{
|
||||
'installation_id': (str) installation id - base58,
|
||||
'is_running': (bool),
|
||||
'is_first_run': bool,
|
||||
'skipped_components': (list) [names of skipped components (str)],
|
||||
'startup_status': { Does not include components which have been skipped
|
||||
'database': (bool),
|
||||
|
@ -1060,9 +1122,8 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
|
||||
connection_code = CONNECTION_STATUS_CONNECTED if self.connected_to_internet else CONNECTION_STATUS_NETWORK
|
||||
response = {
|
||||
'installation_id': conf.settings.installation_id,
|
||||
'installation_id': self.installation_id,
|
||||
'is_running': all(self.component_manager.get_components_status().values()),
|
||||
'is_first_run': self.is_first_run,
|
||||
'skipped_components': self.component_manager.skip_components,
|
||||
'startup_status': self.component_manager.get_components_status(),
|
||||
'connection_status': {
|
||||
|
@ -1118,16 +1179,25 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
Returns:
|
||||
(bool) true if successful
|
||||
"""
|
||||
|
||||
platform_name = system_info.get_platform()['platform']
|
||||
report_bug_to_slack(
|
||||
message,
|
||||
conf.settings.installation_id,
|
||||
platform_name,
|
||||
__version__
|
||||
query = get_loggly_query_string(self.installation_id)
|
||||
requests.post(
|
||||
utils.deobfuscate(SLACK_WEBHOOK),
|
||||
json.dumps({
|
||||
"text": (
|
||||
f"os: {platform_name}\n "
|
||||
f"version: {__version__}\n"
|
||||
f"<{query}|loggly>\n"
|
||||
f"{message}"
|
||||
)
|
||||
})
|
||||
)
|
||||
return True
|
||||
|
||||
SETTINGS_DOC = """
|
||||
Settings management.
|
||||
"""
|
||||
|
||||
def jsonrpc_settings_get(self):
|
||||
"""
|
||||
Get daemon settings
|
||||
|
@ -1142,7 +1212,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
(dict) Dictionary of daemon settings
|
||||
See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings
|
||||
"""
|
||||
return conf.settings.get_adjustable_settings_dict()
|
||||
return self.conf.settings_dict
|
||||
|
||||
def jsonrpc_settings_set(self, **kwargs):
|
||||
"""
|
||||
|
@ -1194,42 +1264,11 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
Returns:
|
||||
(dict) Updated dictionary of daemon settings
|
||||
"""
|
||||
|
||||
# TODO: improve upon the current logic, it could be made better
|
||||
new_settings = kwargs
|
||||
|
||||
setting_types = {
|
||||
'download_directory': str,
|
||||
'data_rate': float,
|
||||
'download_timeout': int,
|
||||
'peer_port': int,
|
||||
'max_key_fee': dict,
|
||||
'use_upnp': bool,
|
||||
'run_reflector_server': bool,
|
||||
'cache_time': int,
|
||||
'reflect_uploads': bool,
|
||||
'share_usage_data': bool,
|
||||
'disable_max_key_fee': bool,
|
||||
'peer_search_timeout': int,
|
||||
'sd_download_timeout': int,
|
||||
'auto_renew_claim_height_delta': int
|
||||
}
|
||||
|
||||
for key, setting_type in setting_types.items():
|
||||
if key in new_settings:
|
||||
if isinstance(new_settings[key], setting_type):
|
||||
conf.settings.update({key: new_settings[key]},
|
||||
data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED))
|
||||
elif setting_type is dict and isinstance(new_settings[key], str):
|
||||
decoded = json.loads(str(new_settings[key]))
|
||||
conf.settings.update({key: decoded},
|
||||
data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED))
|
||||
else:
|
||||
converted = setting_type(new_settings[key])
|
||||
conf.settings.update({key: converted},
|
||||
data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED))
|
||||
conf.settings.save_conf_file_settings()
|
||||
return conf.settings.get_adjustable_settings_dict()
|
||||
with self.conf.update_config() as c:
|
||||
for key, value in kwargs:
|
||||
attr: Setting = getattr(type(c), key)
|
||||
setattr(c, key, attr.deserialize(value))
|
||||
return self.jsonrpc_settings_get()
|
||||
|
||||
def jsonrpc_help(self, command=None):
|
||||
"""
|
||||
|
@ -1281,7 +1320,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
|
||||
@deprecated("account_balance")
|
||||
def jsonrpc_wallet_balance(self, address=None):
|
||||
pass
|
||||
""" deprecated """
|
||||
|
||||
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
||||
async def jsonrpc_wallet_send(self, amount, address=None, claim_id=None, account_id=None):
|
||||
|
@ -1349,6 +1388,10 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
result = await self.jsonrpc_claim_tip(claim_id=claim_id, amount=amount, account_id=account_id)
|
||||
return result
|
||||
|
||||
ACCOUNT_DOC = """
|
||||
Account management.
|
||||
"""
|
||||
|
||||
@requires("wallet")
|
||||
def jsonrpc_account_list(self, account_id=None, confirmations=6,
|
||||
include_claims=False, show_seed=False):
|
||||
|
@ -1728,6 +1771,10 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
await self.analytics_manager.send_credits_sent()
|
||||
return result
|
||||
|
||||
ADDRESS_DOC = """
|
||||
Address management.
|
||||
"""
|
||||
|
||||
@requires(WALLET_COMPONENT)
|
||||
def jsonrpc_address_is_mine(self, address, account_id=None):
|
||||
"""
|
||||
|
@ -1789,6 +1836,10 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
"""
|
||||
return self.get_account_or_default(account_id).receiving.get_or_create_usable_address()
|
||||
|
||||
FILE_DOC = """
|
||||
File management.
|
||||
"""
|
||||
|
||||
@requires(FILE_MANAGER_COMPONENT)
|
||||
async def jsonrpc_file_list(self, sort=None, **kwargs):
|
||||
"""
|
||||
|
@ -1882,6 +1933,10 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
except UnknownNameError:
|
||||
log.info('Name %s is not known', name)
|
||||
|
||||
CLAIM_DOC = """
|
||||
Claim management.
|
||||
"""
|
||||
|
||||
@requires(WALLET_COMPONENT)
|
||||
async def jsonrpc_claim_show(self, txid=None, nout=None, claim_id=None):
|
||||
"""
|
||||
|
@ -2061,7 +2116,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
}
|
||||
"""
|
||||
|
||||
timeout = timeout if timeout is not None else conf.settings['download_timeout']
|
||||
timeout = timeout if timeout is not None else self.conf.download_timeout
|
||||
|
||||
parsed_uri = parse_lbry_uri(uri)
|
||||
if parsed_uri.is_channel:
|
||||
|
@ -2214,6 +2269,10 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
"""
|
||||
return self.get_est_cost(uri, size)
|
||||
|
||||
CHANNEL_DOC = """
|
||||
Channel management.
|
||||
"""
|
||||
|
||||
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
||||
async def jsonrpc_channel_new(self, channel_name, amount, account_id=None):
|
||||
"""
|
||||
|
@ -2849,6 +2908,10 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
results[u]['claims_in_channel'] = resolved[u].get('claims_in_channel', [])
|
||||
return results
|
||||
|
||||
CHANNEL_DOC = """
|
||||
Transaction management.
|
||||
"""
|
||||
|
||||
@requires(WALLET_COMPONENT)
|
||||
def jsonrpc_transaction_list(self, account_id=None, page=None, page_size=None):
|
||||
"""
|
||||
|
@ -2932,6 +2995,10 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
"""
|
||||
return self.wallet_manager.get_transaction(txid)
|
||||
|
||||
UTXO_DOC = """
|
||||
Unspent transaction management.
|
||||
"""
|
||||
|
||||
@requires(WALLET_COMPONENT)
|
||||
def jsonrpc_utxo_list(self, account_id=None, page=None, page_size=None):
|
||||
"""
|
||||
|
@ -3007,6 +3074,10 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
"""
|
||||
return self.wallet_manager.get_block(blockhash, height)
|
||||
|
||||
BLOB_DOC = """
|
||||
Blob management.
|
||||
"""
|
||||
|
||||
@requires(WALLET_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT, RATE_LIMITER_COMPONENT, PAYMENT_RATE_COMPONENT,
|
||||
conditions=[WALLET_IS_UNLOCKED])
|
||||
async def jsonrpc_blob_get(self, blob_hash, timeout=None, encoding=None, payment_rate_manager=None):
|
||||
|
@ -3100,7 +3171,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
err.trap(defer.TimeoutError)
|
||||
return []
|
||||
|
||||
finished_deferred.addTimeout(timeout or conf.settings['peer_search_timeout'], self.dht_node.clock)
|
||||
finished_deferred.addTimeout(timeout or self.conf.peer_search_timeout, self.dht_node.clock)
|
||||
finished_deferred.addErrback(trap_timeout)
|
||||
peers = await d2f(finished_deferred)
|
||||
results = [
|
||||
|
@ -3175,7 +3246,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
elif not lbry_files:
|
||||
raise Exception('No file found')
|
||||
return await d2f(reupload.reflect_file(
|
||||
lbry_files[0], reflector_server=kwargs.get('reflector', None)
|
||||
lbry_files[0], reflector_server=kwargs.get('reflector', random.choice(self.conf.reflector_servers))
|
||||
))
|
||||
|
||||
@requires(BLOB_COMPONENT, WALLET_COMPONENT)
|
||||
|
@ -3253,8 +3324,9 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
Returns:
|
||||
(list) reflected blob hashes
|
||||
"""
|
||||
result = await d2f(reupload.reflect_blob_hashes(blob_hashes, self.blob_manager, reflector_server))
|
||||
return result
|
||||
return await d2f(reupload.reflect_blob_hashes(
|
||||
blob_hashes, self.blob_manager, reflector_server or random.choice(self.conf.reflector_servers)
|
||||
))
|
||||
|
||||
@requires(BLOB_COMPONENT)
|
||||
async def jsonrpc_blob_reflect_all(self):
|
||||
|
@ -3271,7 +3343,9 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
(bool) true if successful
|
||||
"""
|
||||
blob_hashes = await d2f(self.blob_manager.get_all_verified_blobs())
|
||||
return await d2f(reupload.reflect_blob_hashes(blob_hashes, self.blob_manager))
|
||||
return await d2f(reupload.reflect_blob_hashes(
|
||||
blob_hashes, self.blob_manager, random.choice(self.conf.reflector_servers)
|
||||
))
|
||||
|
||||
@requires(DHT_COMPONENT)
|
||||
async def jsonrpc_peer_ping(self, node_id, address=None, port=None):
|
||||
|
@ -3427,8 +3501,8 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
}
|
||||
"""
|
||||
|
||||
search_timeout = search_timeout or conf.settings['peer_search_timeout']
|
||||
blob_timeout = blob_timeout or conf.settings['sd_download_timeout']
|
||||
search_timeout = search_timeout or self.conf.peer_search_timeout
|
||||
blob_timeout = blob_timeout or self.conf.sd_download_timeout
|
||||
|
||||
response = {
|
||||
'is_available': False,
|
||||
|
@ -3440,7 +3514,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
'sd_blob_availability': {},
|
||||
'head_blob_hash': None,
|
||||
'head_blob_availability': {},
|
||||
'use_upnp': conf.settings['use_upnp'],
|
||||
'use_upnp': self.conf.use_upnp,
|
||||
'upnp_redirect_is_set': len(self.upnp.upnp_redirects),
|
||||
'error': None
|
||||
}
|
||||
|
@ -3557,21 +3631,6 @@ def get_loggly_query_string(installation_id):
|
|||
return base_loggly_search_url + data
|
||||
|
||||
|
||||
def report_bug_to_slack(message, installation_id, platform_name, app_version):
|
||||
webhook = utils.deobfuscate(conf.settings['SLACK_WEBHOOK'])
|
||||
payload_template = "os: %s\n version: %s\n<%s|loggly>\n%s"
|
||||
payload_params = (
|
||||
platform_name,
|
||||
app_version,
|
||||
get_loggly_query_string(installation_id),
|
||||
message
|
||||
)
|
||||
payload = {
|
||||
"text": payload_template % payload_params
|
||||
}
|
||||
requests.post(webhook, json.dumps(payload))
|
||||
|
||||
|
||||
def get_lbry_file_search_value(search_fields):
|
||||
for searchtype in FileID:
|
||||
value = search_fields.get(searchtype, None)
|
||||
|
|
|
@ -1,284 +0,0 @@
|
|||
import sys
|
||||
import code
|
||||
import argparse
|
||||
import asyncio
|
||||
import logging.handlers
|
||||
from twisted.internet import defer, reactor, threads
|
||||
from aiohttp import client_exceptions
|
||||
|
||||
from lbrynet import utils, conf, log_support
|
||||
from lbrynet.extras.daemon import analytics
|
||||
from lbrynet.extras.daemon.Daemon import Daemon
|
||||
import aiohttp
|
||||
import logging
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
USER_AGENT = "AuthServiceProxy/0.1"
|
||||
TWISTED_SECURE_SESSION = "TWISTED_SECURE_SESSION"
|
||||
TWISTED_SESSION = "TWISTED_SESSION"
|
||||
LBRY_SECRET = "LBRY_SECRET"
|
||||
HTTP_TIMEOUT = 30
|
||||
|
||||
|
||||
class JSONRPCException(Exception):
|
||||
def __init__(self, rpc_error):
|
||||
super().__init__()
|
||||
self.error = rpc_error
|
||||
|
||||
|
||||
class UnAuthAPIClient:
|
||||
def __init__(self, host, port, session):
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.session = session
|
||||
|
||||
def __getattr__(self, method):
|
||||
async def f(*args, **kwargs):
|
||||
return await self.call(method, [args, kwargs])
|
||||
|
||||
return f
|
||||
|
||||
@classmethod
|
||||
async def from_url(cls, url):
|
||||
url_fragment = urlparse(url)
|
||||
host = url_fragment.hostname
|
||||
port = url_fragment.port
|
||||
connector = aiohttp.TCPConnector()
|
||||
session = aiohttp.ClientSession(connector=connector)
|
||||
return cls(host, port, session)
|
||||
|
||||
async def call(self, method, params=None):
|
||||
message = {'method': method, 'params': params}
|
||||
async with self.session.get(conf.settings.get_api_connection_string(), json=message) as resp:
|
||||
return await resp.json()
|
||||
|
||||
|
||||
class LBRYAPIClient:
|
||||
@staticmethod
|
||||
def get_client(conf_path=None):
|
||||
conf.conf_file = conf_path
|
||||
if not conf.settings:
|
||||
conf.initialize_settings()
|
||||
return UnAuthAPIClient.from_url(conf.settings.get_api_connection_string())
|
||||
|
||||
|
||||
if sys.platform.startswith('darwin') or sys.platform.startswith('linux'):
|
||||
def color(msg, c="white"):
|
||||
_colors = {
|
||||
"normal": (0, 37),
|
||||
"underlined": (2, 37),
|
||||
"red": (1, 31),
|
||||
"green": (1, 32),
|
||||
"yellow": (1, 33),
|
||||
"blue": (1, 33),
|
||||
"magenta": (1, 34),
|
||||
"cyan": (1, 35),
|
||||
"white": (1, 36),
|
||||
"grey": (1, 37)
|
||||
}
|
||||
i, j = _colors[c]
|
||||
return "\033[%i;%i;40m%s\033[0m" % (i, j, msg)
|
||||
|
||||
|
||||
logo = """\
|
||||
╓▄█▄ç
|
||||
,▄█▓▓▀▀▀▓▓▓▌▄,
|
||||
▄▄▓▓▓▀¬ ╙▀█▓▓▓▄▄
|
||||
,▄█▓▓▀▀ ^▀▀▓▓▓▌▄,
|
||||
▄█▓▓█▀` ╙▀█▓▓▓▄▄
|
||||
╓▄▓▓▓▀╙ ▀▀▓▓▓▌▄,
|
||||
▄█▓▓█▀ ╙▀▓▓
|
||||
╓▄▓▓▓▀` ▄█▓▓▓▀
|
||||
▓▓█▀ ,▄▓▓▓▀╙
|
||||
▓▓m ╟▌▄, ▄█▓▓█▀ ,,╓µ
|
||||
▓▓m ^▀█▓▓▓▄▄ ╓▄▓▓▓▀╙ █▓▓▓▓▓▀
|
||||
▓▓Q '▀▀▓▓▓▌▄, ,▄█▓▓█▀ ▄█▓▓▓▓▓▀
|
||||
▀▓▓▓▌▄, ╙▀█▓▓█▄╗ ╓▄▓▓▓▀ ╓▄▓▓▓▀▀ ▀▀
|
||||
╙▀█▓▓█▄╗ ^▀▀▓▓▓▌▄▄█▓▓▀▀ ▄█▓▓█▀`
|
||||
'▀▀▓▓▓▌▄, ╙▀██▀` ╓▄▓▓▓▀╙
|
||||
╙▀█▓▓█▄╥ ,▄█▓▓▀▀
|
||||
└▀▀▓▓▓▌▄ ▄▒▓▓▓▀╙
|
||||
╙▀█▓▓█▓▓▓▀▀
|
||||
╙▀▀`
|
||||
|
||||
"""
|
||||
else:
|
||||
def color(msg, c=None):
|
||||
return msg
|
||||
|
||||
logo = """\
|
||||
'.
|
||||
++++.
|
||||
+++,;+++,
|
||||
:+++ :+++:
|
||||
+++ ,+++;
|
||||
'++; .+++'
|
||||
`+++ `++++
|
||||
+++. `++++
|
||||
;+++ ++++
|
||||
+++ +++
|
||||
+++: '+
|
||||
,+++ +++
|
||||
+++` +++:
|
||||
`+' ,+++
|
||||
`+ + +++
|
||||
`+ +++ '++' :'+++:
|
||||
`+ ++++ `+++ ++++
|
||||
`+ ++++ +++. :+++'
|
||||
`+, ++++ ;+++ +++++
|
||||
`+++, ++++ +++ +++; +
|
||||
,+++, ++++ +++: .+++
|
||||
,+++: '++++++ +++`
|
||||
,+++: '++ '++'
|
||||
,+++: `+++
|
||||
.+++; +++,
|
||||
.+++; ;+++
|
||||
.+++; +++
|
||||
`+++++:
|
||||
`++
|
||||
|
||||
"""
|
||||
|
||||
welcometext = """\
|
||||
For a list of available commands:
|
||||
>>>help()
|
||||
|
||||
To see the documentation for a given command:
|
||||
>>>help("resolve")
|
||||
|
||||
To exit:
|
||||
>>>exit()
|
||||
"""
|
||||
|
||||
welcome = "{:*^60}\n".format(" Welcome to the lbrynet interactive console! ")
|
||||
welcome += "\n".join([f"{w:<60}" for w in welcometext.splitlines()])
|
||||
welcome += "\n%s" % ("*" * 60)
|
||||
welcome = color(welcome, "grey")
|
||||
banner = color(logo, "green") + color(welcome, "grey")
|
||||
|
||||
|
||||
def get_methods(daemon):
|
||||
locs = {}
|
||||
|
||||
def wrapped(name, fn):
|
||||
client = LBRYAPIClient.get_client()
|
||||
_fn = getattr(client, name)
|
||||
_fn.__doc__ = fn.__doc__
|
||||
return {name: _fn}
|
||||
|
||||
for method_name, method in daemon.callable_methods.items():
|
||||
locs.update(wrapped(method_name, method))
|
||||
return locs
|
||||
|
||||
|
||||
def run_terminal(callable_methods, started_daemon, quiet=False):
|
||||
locs = {}
|
||||
locs.update(callable_methods)
|
||||
|
||||
def help(method_name=None):
|
||||
if not method_name:
|
||||
print("Available api functions: ")
|
||||
for name in callable_methods:
|
||||
print("\t%s" % name)
|
||||
return
|
||||
if method_name not in callable_methods:
|
||||
print("\"%s\" is not a recognized api function")
|
||||
return
|
||||
print(callable_methods[method_name].__doc__)
|
||||
return
|
||||
|
||||
locs.update({'help': help})
|
||||
|
||||
if started_daemon:
|
||||
def exit(status=None):
|
||||
if not quiet:
|
||||
print("Stopping lbrynet-daemon...")
|
||||
callable_methods['daemon_stop']()
|
||||
return sys.exit(status)
|
||||
|
||||
locs.update({'exit': exit})
|
||||
else:
|
||||
def exit(status=None):
|
||||
try:
|
||||
reactor.callLater(0, reactor.stop)
|
||||
except Exception as err:
|
||||
print(f"error stopping reactor: {err}")
|
||||
return sys.exit(status)
|
||||
|
||||
locs.update({'exit': exit})
|
||||
|
||||
code.interact(banner if not quiet else "", local=locs)
|
||||
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def start_server_and_listen(use_auth, analytics_manager, quiet):
|
||||
log_support.configure_console()
|
||||
logging.getLogger("lbrynet").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("lbryum").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("requests").setLevel(logging.CRITICAL)
|
||||
|
||||
# TODO: turn this all into async. Until then this routine can't be called
|
||||
# analytics_manager.send_server_startup()
|
||||
yield Daemon().start_listening()
|
||||
|
||||
|
||||
def threaded_terminal(started_daemon, quiet):
|
||||
callable_methods = get_methods(Daemon)
|
||||
d = threads.deferToThread(run_terminal, callable_methods, started_daemon, quiet)
|
||||
d.addErrback(lambda err: err.trap(SystemExit))
|
||||
d.addErrback(log.exception)
|
||||
|
||||
|
||||
async def start_lbrynet_console(quiet, use_existing_daemon, useauth):
|
||||
if not utils.check_connection():
|
||||
print("Not connected to internet, unable to start")
|
||||
raise Exception("Not connected to internet, unable to start")
|
||||
if not quiet:
|
||||
print("Starting lbrynet-console...")
|
||||
try:
|
||||
await LBRYAPIClient.get_client().status()
|
||||
d = defer.succeed(False)
|
||||
if not quiet:
|
||||
print("lbrynet-daemon is already running, connecting to it...")
|
||||
except client_exceptions.ClientConnectorError:
|
||||
if not use_existing_daemon:
|
||||
if not quiet:
|
||||
print("Starting lbrynet-daemon...")
|
||||
analytics_manager = analytics.Manager.new_instance()
|
||||
d = start_server_and_listen(useauth, analytics_manager, quiet)
|
||||
else:
|
||||
raise Exception("cannot connect to an existing daemon instance, "
|
||||
"and set to not start a new one")
|
||||
d.addCallback(threaded_terminal, quiet)
|
||||
d.addErrback(log.exception)
|
||||
|
||||
|
||||
def main():
|
||||
conf.initialize_settings()
|
||||
parser = argparse.ArgumentParser(description="Launch lbrynet-daemon")
|
||||
parser.add_argument(
|
||||
"--use_existing_daemon",
|
||||
help="Start lbrynet-daemon if it isn't already running",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="use_existing_daemon"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--quiet", dest="quiet", action="store_true", default=False
|
||||
)
|
||||
parser.add_argument(
|
||||
"--http-auth", dest="useauth", action="store_true", default=conf.settings['use_auth_http']
|
||||
)
|
||||
args = parser.parse_args()
|
||||
if args.useauth:
|
||||
print('--http-auth is no longer supported; an alternative solution using IPC is forthcoming.')
|
||||
return
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(start_lbrynet_console(args.quiet, args.use_existing_daemon, args.useauth))
|
||||
reactor.run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -2,7 +2,7 @@ import logging
|
|||
import os
|
||||
from twisted.internet import defer
|
||||
|
||||
from lbrynet import conf
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.schema.fee import Fee
|
||||
|
||||
from lbrynet.p2p.Error import InsufficientFundsError, KeyFeeAboveMaxAllowed, InvalidStreamDescriptorError
|
||||
|
@ -31,17 +31,18 @@ log = logging.getLogger(__name__)
|
|||
|
||||
|
||||
class GetStream:
|
||||
def __init__(self, sd_identifier, wallet, exchange_rate_manager, blob_manager, peer_finder, rate_limiter,
|
||||
payment_rate_manager, storage, max_key_fee, disable_max_key_fee, data_rate=None, timeout=None,
|
||||
reactor=None):
|
||||
def __init__(self, conf: Config, sd_identifier, wallet, exchange_rate_manager, blob_manager, peer_finder,
|
||||
rate_limiter, payment_rate_manager, storage, max_key_fee, disable_max_key_fee, data_rate=None,
|
||||
timeout=None, reactor=None):
|
||||
if not reactor:
|
||||
from twisted.internet import reactor
|
||||
self.conf = conf
|
||||
self.reactor = reactor
|
||||
self.timeout = timeout or conf.settings['download_timeout']
|
||||
self.data_rate = data_rate or conf.settings['data_rate']
|
||||
self.max_key_fee = max_key_fee or conf.settings['max_key_fee'][1]
|
||||
self.disable_max_key_fee = disable_max_key_fee or conf.settings['disable_max_key_fee']
|
||||
self.download_directory = conf.settings.download_dir
|
||||
self.timeout = timeout or conf.download_timeout
|
||||
self.data_rate = data_rate or conf.data_rate
|
||||
self.max_key_fee = max_key_fee or conf.max_key_fee
|
||||
self.disable_max_key_fee = disable_max_key_fee or conf.disable_max_key_fee
|
||||
self.download_directory = conf.download_dir
|
||||
self.timeout_counter = 0
|
||||
self.code = None
|
||||
self.sd_hash = None
|
||||
|
@ -154,7 +155,7 @@ class GetStream:
|
|||
def _download_sd_blob(self):
|
||||
sd_blob = yield download_sd_blob(
|
||||
self.sd_hash, self.blob_manager, self.peer_finder, self.rate_limiter, self.payment_rate_manager,
|
||||
self.wallet, self.timeout, conf.settings['download_mirrors']
|
||||
self.wallet, self.timeout, self.conf.download_mirrors
|
||||
)
|
||||
defer.returnValue(sd_blob)
|
||||
|
||||
|
|
|
@ -3,27 +3,29 @@ import logging
|
|||
|
||||
from twisted.internet import defer, task
|
||||
from lbrynet.extras.compat import f2d
|
||||
from lbrynet import utils, conf
|
||||
from lbrynet import utils
|
||||
from lbrynet.conf import Config
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DHTHashAnnouncer:
|
||||
def __init__(self, dht_node, storage, concurrent_announcers=None):
|
||||
def __init__(self, conf: Config, dht_node, storage, concurrent_announcers=None):
|
||||
self.conf = conf
|
||||
self.dht_node = dht_node
|
||||
self.storage = storage
|
||||
self.clock = dht_node.clock
|
||||
self.peer_port = dht_node.peerPort
|
||||
self.hash_queue = []
|
||||
if concurrent_announcers is None:
|
||||
self.concurrent_announcers = conf.settings['concurrent_announcers']
|
||||
self.concurrent_announcers = conf.concurrent_announcers
|
||||
else:
|
||||
self.concurrent_announcers = concurrent_announcers
|
||||
self._manage_lc = None
|
||||
if self.concurrent_announcers:
|
||||
self._manage_lc = task.LoopingCall(self.manage)
|
||||
self._manage_lc.clock = self.clock
|
||||
self.sem = defer.DeferredSemaphore(self.concurrent_announcers or conf.settings['concurrent_announcers'] or 1)
|
||||
self.sem = defer.DeferredSemaphore(self.concurrent_announcers or conf.concurrent_announcers or 1)
|
||||
|
||||
def start(self):
|
||||
if self._manage_lc:
|
||||
|
|
|
@ -2,7 +2,6 @@ import binascii
|
|||
import logging
|
||||
|
||||
from twisted.internet import defer
|
||||
from lbrynet import conf
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -59,7 +58,7 @@ class DHTPeerFinder(DummyPeerFinder):
|
|||
def _execute_peer_search(self, dht_node, blob_hash, timeout):
|
||||
bin_hash = binascii.unhexlify(blob_hash)
|
||||
finished_deferred = dht_node.iterativeFindValue(bin_hash, exclude=self.peers[blob_hash])
|
||||
timeout = timeout or conf.settings['peer_search_timeout']
|
||||
timeout = timeout or self.component_manager.conf.peer_search_timeout
|
||||
if timeout:
|
||||
finished_deferred.addTimeout(timeout, dht_node.clock)
|
||||
try:
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import asyncio
|
||||
import collections
|
||||
import logging
|
||||
|
||||
import asyncio
|
||||
import aiohttp
|
||||
|
||||
from lbrynet import conf, utils
|
||||
from lbrynet import utils
|
||||
from lbrynet.conf import Config, ANALYTICS_ENDPOINT, ANALYTICS_TOKEN
|
||||
from lbrynet.extras import system_info
|
||||
from lbrynet.extras.daemon.storage import looping_call
|
||||
|
||||
# Things We Track
|
||||
SERVER_STARTUP = 'Server Startup'
|
||||
|
@ -28,22 +28,64 @@ log = logging.getLogger(__name__)
|
|||
|
||||
|
||||
class Manager:
|
||||
def __init__(self, analytics_api, context=None, installation_id=None, session_id=None):
|
||||
self.analytics_api = analytics_api
|
||||
|
||||
def __init__(self, conf: Config, installation_id: str, session_id: str):
|
||||
self.cookies = {}
|
||||
self.url = ANALYTICS_ENDPOINT
|
||||
self._write_key = utils.deobfuscate(ANALYTICS_TOKEN)
|
||||
self._enabled = conf.share_usage_data
|
||||
self._tracked_data = collections.defaultdict(list)
|
||||
self.looping_tasks = {}
|
||||
self.context = context or self._make_context(
|
||||
system_info.get_platform(), conf.settings['wallet'])
|
||||
self.installation_id = installation_id or conf.settings.installation_id
|
||||
self.session_id = session_id or conf.settings.get_session_id()
|
||||
self.is_started = False
|
||||
self.context = self._make_context(system_info.get_platform(), 'torba')
|
||||
self.installation_id = installation_id
|
||||
self.session_id = session_id
|
||||
self.task: asyncio.Task = None
|
||||
|
||||
@classmethod
|
||||
def new_instance(cls, enabled=None):
|
||||
api = Api.new_instance(enabled)
|
||||
return cls(api)
|
||||
def start(self):
|
||||
if self._enabled and self.task is None:
|
||||
self.task = asyncio.create_task(self.run())
|
||||
log.info("Start")
|
||||
|
||||
async def run(self):
|
||||
while True:
|
||||
await self._send_heartbeat()
|
||||
await asyncio.sleep(1800)
|
||||
|
||||
def stop(self):
|
||||
if self.task is not None and not self.task.done():
|
||||
self.task.cancel()
|
||||
|
||||
async def _post(self, endpoint, data):
|
||||
# there is an issue with a timing condition with keep-alive
|
||||
# that is best explained here: https://github.com/mikem23/keepalive-race
|
||||
#
|
||||
# If you make a request, wait just the right amount of time,
|
||||
# then make another request, the requests module may opt to
|
||||
# reuse the connection, but by the time the server gets it the
|
||||
# timeout will have expired.
|
||||
#
|
||||
# by forcing the connection to close, we will disable the keep-alive.
|
||||
|
||||
assert endpoint[0] == '/'
|
||||
request_kwargs = {
|
||||
'method': 'POST',
|
||||
'url': self.url + endpoint,
|
||||
'headers': {'Connection': 'Close'},
|
||||
'auth': aiohttp.BasicAuth(self._write_key, ''),
|
||||
'json': data,
|
||||
'cookies': self.cookies
|
||||
}
|
||||
try:
|
||||
async with aiohttp.request(**request_kwargs) as response:
|
||||
self.cookies.update(response.cookies)
|
||||
except Exception as e:
|
||||
log.exception('Encountered an exception while POSTing to %s: ', self.url + endpoint, exc_info=e)
|
||||
|
||||
async def track(self, event):
|
||||
"""Send a single tracking event"""
|
||||
if self._enabled:
|
||||
log.debug('Sending track event: %s', event)
|
||||
await self._post('/track', event)
|
||||
|
||||
# Things We Track
|
||||
async def send_new_download_start(self, download_id, name, claim_dict):
|
||||
await self._send_new_download_stats("start", download_id, name, claim_dict)
|
||||
|
||||
|
@ -57,7 +99,7 @@ class Manager:
|
|||
})
|
||||
|
||||
async def _send_new_download_stats(self, action, download_id, name, claim_dict, e=None):
|
||||
await self.analytics_api.track({
|
||||
await self.track({
|
||||
'userId': 'lbry', # required, see https://segment.com/docs/sources/server/http/#track
|
||||
'event': NEW_DOWNLOAD_STAT,
|
||||
'properties': self._event_properties({
|
||||
|
@ -72,7 +114,7 @@ class Manager:
|
|||
})
|
||||
|
||||
async def send_upnp_setup_success_fail(self, success, status):
|
||||
await self.analytics_api.track(
|
||||
await self.track(
|
||||
self._event(UPNP_SETUP, {
|
||||
'success': success,
|
||||
'status': status,
|
||||
|
@ -80,71 +122,44 @@ class Manager:
|
|||
)
|
||||
|
||||
async def send_server_startup(self):
|
||||
await self.analytics_api.track(self._event(SERVER_STARTUP))
|
||||
await self.track(self._event(SERVER_STARTUP))
|
||||
|
||||
async def send_server_startup_success(self):
|
||||
await self.analytics_api.track(self._event(SERVER_STARTUP_SUCCESS))
|
||||
await self.track(self._event(SERVER_STARTUP_SUCCESS))
|
||||
|
||||
async def send_server_startup_error(self, message):
|
||||
await self.analytics_api.track(self._event(SERVER_STARTUP_ERROR, {'message': message}))
|
||||
await self.track(self._event(SERVER_STARTUP_ERROR, {'message': message}))
|
||||
|
||||
async def send_download_started(self, id_, name, claim_dict=None):
|
||||
await self.analytics_api.track(
|
||||
await self.track(
|
||||
self._event(DOWNLOAD_STARTED, self._download_properties(id_, name, claim_dict))
|
||||
)
|
||||
|
||||
async def send_download_errored(self, err, id_, name, claim_dict, report):
|
||||
download_error_properties = self._download_error_properties(err, id_, name, claim_dict,
|
||||
report)
|
||||
await self.analytics_api.track(self._event(DOWNLOAD_ERRORED, download_error_properties))
|
||||
await self.track(self._event(DOWNLOAD_ERRORED, download_error_properties))
|
||||
|
||||
async def send_download_finished(self, id_, name, report, claim_dict=None):
|
||||
download_properties = self._download_properties(id_, name, claim_dict, report)
|
||||
await self.analytics_api.track(self._event(DOWNLOAD_FINISHED, download_properties))
|
||||
await self.track(self._event(DOWNLOAD_FINISHED, download_properties))
|
||||
|
||||
async def send_claim_action(self, action):
|
||||
await self.analytics_api.track(self._event(CLAIM_ACTION, {'action': action}))
|
||||
await self.track(self._event(CLAIM_ACTION, {'action': action}))
|
||||
|
||||
async def send_new_channel(self):
|
||||
await self.analytics_api.track(self._event(NEW_CHANNEL))
|
||||
await self.track(self._event(NEW_CHANNEL))
|
||||
|
||||
async def send_credits_sent(self):
|
||||
await self.analytics_api.track(self._event(CREDITS_SENT))
|
||||
await self.track(self._event(CREDITS_SENT))
|
||||
|
||||
async def _send_heartbeat(self):
|
||||
await self.analytics_api.track(self._event(HEARTBEAT))
|
||||
await self.track(self._event(HEARTBEAT))
|
||||
|
||||
async def _update_tracked_metrics(self):
|
||||
should_send, value = self.summarize_and_reset(BLOB_BYTES_UPLOADED)
|
||||
if should_send:
|
||||
await self.analytics_api.track(self._metric_event(BLOB_BYTES_UPLOADED, value))
|
||||
|
||||
# Setup / Shutdown
|
||||
|
||||
def start(self):
|
||||
if not self.is_started:
|
||||
for name, fn, secs in self._get_looping_calls():
|
||||
self.looping_tasks[name] = asyncio.create_task(looping_call(secs, fn))
|
||||
self.is_started = True
|
||||
log.info("Start")
|
||||
|
||||
def shutdown(self):
|
||||
if self.is_started:
|
||||
try:
|
||||
for name, task in self.looping_tasks.items():
|
||||
if task:
|
||||
task.cancel()
|
||||
self.looping_tasks[name] = None
|
||||
log.info("Stopped analytics looping calls")
|
||||
self.is_started = False
|
||||
except Exception as e:
|
||||
log.exception('Got exception when trying to cancel tasks in analytics: ', exc_info=e)
|
||||
|
||||
def _get_looping_calls(self) -> list:
|
||||
return [
|
||||
('send_heartbeat', self._send_heartbeat, 300),
|
||||
('update_tracked_metrics', self._update_tracked_metrics, 600),
|
||||
]
|
||||
await self.track(self._metric_event(BLOB_BYTES_UPLOADED, value))
|
||||
|
||||
def add_observation(self, metric, value):
|
||||
self._tracked_data[metric].append(value)
|
||||
|
@ -229,57 +244,3 @@ class Manager:
|
|||
context['os']['desktop'] = platform['desktop']
|
||||
context['os']['distro'] = platform['distro']
|
||||
return context
|
||||
|
||||
|
||||
class Api:
|
||||
def __init__(self, cookies, url, write_key, enabled):
|
||||
self.cookies = cookies
|
||||
self.url = url
|
||||
self._write_key = write_key
|
||||
self._enabled = enabled
|
||||
|
||||
async def _post(self, endpoint, data):
|
||||
# there is an issue with a timing condition with keep-alive
|
||||
# that is best explained here: https://github.com/mikem23/keepalive-race
|
||||
#
|
||||
# If you make a request, wait just the right amount of time,
|
||||
# then make another request, the requests module may opt to
|
||||
# reuse the connection, but by the time the server gets it the
|
||||
# timeout will have expired.
|
||||
#
|
||||
# by forcing the connection to close, we will disable the keep-alive.
|
||||
|
||||
assert endpoint[0] == '/'
|
||||
request_kwargs = {
|
||||
'method': 'POST',
|
||||
'url': self.url + endpoint,
|
||||
'headers': {'Connection': 'Close'},
|
||||
'auth': aiohttp.BasicAuth(self._write_key, ''),
|
||||
'json': data,
|
||||
'cookies': self.cookies
|
||||
}
|
||||
try:
|
||||
async with aiohttp.request(**request_kwargs) as response:
|
||||
self.cookies.update(response.cookies)
|
||||
except Exception as e:
|
||||
log.exception('Encountered an exception while POSTing to %s: ', self.url + endpoint, exc_info=e)
|
||||
|
||||
async def track(self, event):
|
||||
"""Send a single tracking event"""
|
||||
if not self._enabled:
|
||||
return 'Analytics disabled'
|
||||
|
||||
log.debug('Sending track event: %s', event)
|
||||
await self._post('/track', event)
|
||||
|
||||
@classmethod
|
||||
def new_instance(cls, enabled=None):
|
||||
"""Initialize an instance using values from the configuration"""
|
||||
if enabled is None:
|
||||
enabled = conf.settings['share_usage_data']
|
||||
return cls(
|
||||
{},
|
||||
conf.settings['ANALYTICS_ENDPOINT'],
|
||||
utils.deobfuscate(conf.settings['ANALYTICS_TOKEN']),
|
||||
enabled,
|
||||
)
|
||||
|
|
|
@ -6,6 +6,9 @@ import traceback
|
|||
from lbrynet import utils, __version__
|
||||
|
||||
|
||||
LOGGLY_TOKEN = 'BQEzZmMzLJHgAGxkBF00LGD0YGuyATVgAmqxAQEuAQZ2BQH4'
|
||||
|
||||
|
||||
class JsonFormatter(logging.Formatter):
|
||||
"""Format log records using json serialization"""
|
||||
|
||||
|
@ -55,7 +58,7 @@ class HTTPSLogglyHandler(logging.Handler):
|
|||
asyncio.ensure_future(self._emit(record))
|
||||
|
||||
|
||||
def get_loggly_handler(loggly_token):
|
||||
def get_loggly_handler(loggly_token=LOGGLY_TOKEN):
|
||||
handler = HTTPSLogglyHandler(loggly_token)
|
||||
handler.setFormatter(JsonFormatter())
|
||||
return handler
|
||||
|
|
|
@ -3,7 +3,7 @@ import logging
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def migrate_db(db_dir, start, end):
|
||||
def migrate_db(conf, start, end):
|
||||
current = start
|
||||
while current < end:
|
||||
if current == 1:
|
||||
|
@ -25,7 +25,7 @@ def migrate_db(db_dir, start, end):
|
|||
else:
|
||||
raise Exception("DB migration of version {} to {} is not available".format(current,
|
||||
current+1))
|
||||
do_migration(db_dir)
|
||||
do_migration(conf)
|
||||
current += 1
|
||||
log.info("successfully migrated the database from revision %i to %i", current - 1, current)
|
||||
return None
|
||||
|
|
|
@ -5,9 +5,9 @@ import logging
|
|||
log = logging.getLogger(__name__)
|
||||
UNSET_NOUT = -1
|
||||
|
||||
def do_migration(db_dir):
|
||||
def do_migration(conf):
|
||||
log.info("Doing the migration")
|
||||
migrate_blockchainname_db(db_dir)
|
||||
migrate_blockchainname_db(conf.data_dir)
|
||||
log.info("Migration succeeded")
|
||||
|
||||
|
||||
|
|
|
@ -5,9 +5,9 @@ import logging
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def do_migration(db_dir):
|
||||
def do_migration(conf):
|
||||
log.info("Doing the migration")
|
||||
migrate_blockchainname_db(db_dir)
|
||||
migrate_blockchainname_db(conf.data_dir)
|
||||
log.info("Migration succeeded")
|
||||
|
||||
|
||||
|
|
|
@ -5,9 +5,9 @@ import logging
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def do_migration(db_dir):
|
||||
def do_migration(conf):
|
||||
log.info("Doing the migration")
|
||||
migrate_blobs_db(db_dir)
|
||||
migrate_blobs_db(conf.data_dir)
|
||||
log.info("Migration succeeded")
|
||||
|
||||
|
||||
|
|
|
@ -5,9 +5,9 @@ import logging
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def do_migration(db_dir):
|
||||
def do_migration(conf):
|
||||
log.info("Doing the migration")
|
||||
add_lbry_file_metadata(db_dir)
|
||||
add_lbry_file_metadata(conf.data_dir)
|
||||
log.info("Migration succeeded")
|
||||
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@ import os
|
|||
import json
|
||||
import logging
|
||||
from binascii import hexlify
|
||||
from lbrynet import conf
|
||||
from lbrynet.schema.decode import smart_decode
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -104,13 +103,13 @@ def verify_sd_blob(sd_hash, blob_dir):
|
|||
return decoded, sd_length
|
||||
|
||||
|
||||
def do_migration(db_dir):
|
||||
new_db_path = os.path.join(db_dir, "lbrynet.sqlite")
|
||||
def do_migration(conf):
|
||||
new_db_path = os.path.join(conf.data_dir, "lbrynet.sqlite")
|
||||
connection = sqlite3.connect(new_db_path)
|
||||
|
||||
metadata_db = sqlite3.connect(os.path.join(db_dir, "blockchainname.db"))
|
||||
lbryfile_db = sqlite3.connect(os.path.join(db_dir, 'lbryfile_info.db'))
|
||||
blobs_db = sqlite3.connect(os.path.join(db_dir, 'blobs.db'))
|
||||
metadata_db = sqlite3.connect(os.path.join(conf.data_dir, "blockchainname.db"))
|
||||
lbryfile_db = sqlite3.connect(os.path.join(conf.data_dir, 'lbryfile_info.db'))
|
||||
blobs_db = sqlite3.connect(os.path.join(conf.data_dir, 'blobs.db'))
|
||||
|
||||
name_metadata_cursor = metadata_db.cursor()
|
||||
lbryfile_cursor = lbryfile_db.cursor()
|
||||
|
@ -186,7 +185,7 @@ def do_migration(db_dir):
|
|||
(stream_hash, blob_hash, position, iv)
|
||||
)
|
||||
|
||||
download_dir = conf.settings.download_dir
|
||||
download_dir = conf.download_dir
|
||||
if not isinstance(download_dir, bytes):
|
||||
download_dir = download_dir.encode()
|
||||
|
||||
|
@ -278,7 +277,7 @@ def do_migration(db_dir):
|
|||
|
||||
# recover damaged streams
|
||||
if damaged_stream_sds:
|
||||
blob_dir = os.path.join(db_dir, "blobfiles")
|
||||
blob_dir = os.path.join(conf.data_dir, "blobfiles")
|
||||
damaged_sds_on_disk = [] if not os.path.isdir(blob_dir) else list({p for p in os.listdir(blob_dir)
|
||||
if p in damaged_stream_sds})
|
||||
for damaged_sd in damaged_sds_on_disk:
|
||||
|
@ -316,7 +315,7 @@ def do_migration(db_dir):
|
|||
log.warning("detected a failed previous migration to revision 6, repairing it")
|
||||
connection.close()
|
||||
os.remove(new_db_path)
|
||||
return do_migration(db_dir)
|
||||
return do_migration(conf)
|
||||
raise err
|
||||
|
||||
connection.close()
|
||||
|
|
|
@ -2,8 +2,8 @@ import sqlite3
|
|||
import os
|
||||
|
||||
|
||||
def do_migration(db_dir):
|
||||
db_path = os.path.join(db_dir, "lbrynet.sqlite")
|
||||
def do_migration(conf):
|
||||
db_path = os.path.join(conf.data_dir, "lbrynet.sqlite")
|
||||
connection = sqlite3.connect(db_path)
|
||||
cursor = connection.cursor()
|
||||
cursor.executescript("alter table blob add last_announced_time integer;")
|
||||
|
|
|
@ -2,8 +2,8 @@ import sqlite3
|
|||
import os
|
||||
|
||||
|
||||
def do_migration(db_dir):
|
||||
db_path = os.path.join(db_dir, "lbrynet.sqlite")
|
||||
def do_migration(conf):
|
||||
db_path = os.path.join(conf.data_dir, "lbrynet.sqlite")
|
||||
connection = sqlite3.connect(db_path)
|
||||
cursor = connection.cursor()
|
||||
|
||||
|
|
|
@ -9,9 +9,9 @@ from lbrynet.blob.CryptBlob import CryptBlobInfo
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def do_migration(db_dir):
|
||||
db_path = os.path.join(db_dir, "lbrynet.sqlite")
|
||||
blob_dir = os.path.join(db_dir, "blobfiles")
|
||||
def do_migration(conf):
|
||||
db_path = os.path.join(conf.data_dir, "lbrynet.sqlite")
|
||||
blob_dir = os.path.join(conf.data_dir, "blobfiles")
|
||||
connection = sqlite3.connect(db_path)
|
||||
cursor = connection.cursor()
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ import traceback
|
|||
import typing
|
||||
from binascii import hexlify, unhexlify
|
||||
from lbrynet.extras.wallet.dewies import dewies_to_lbc, lbc_to_dewies
|
||||
from lbrynet import conf
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.schema.claim import ClaimDict
|
||||
from lbrynet.schema.decode import smart_decode
|
||||
from lbrynet.blob.CryptBlob import CryptBlobInfo
|
||||
|
@ -132,15 +132,16 @@ class SQLiteStorage(SQLiteMixin):
|
|||
);
|
||||
"""
|
||||
|
||||
def __init__(self, path, loop=None):
|
||||
def __init__(self, conf: Config, path, loop=None):
|
||||
super().__init__(path)
|
||||
self.conf = conf
|
||||
self.content_claim_callbacks = {}
|
||||
self.check_should_announce_lc = None
|
||||
self.loop = loop or asyncio.get_event_loop()
|
||||
|
||||
async def open(self):
|
||||
await super().open()
|
||||
if 'reflector' not in conf.settings['components_to_skip']:
|
||||
if 'reflector' not in self.conf.components_to_skip:
|
||||
self.check_should_announce_lc = looping_call(
|
||||
600, self.verify_will_announce_all_head_and_sd_blobs
|
||||
)
|
||||
|
@ -236,7 +237,7 @@ class SQLiteStorage(SQLiteMixin):
|
|||
def get_blobs_to_announce(self):
|
||||
def get_and_update(transaction):
|
||||
timestamp = self.loop.time()
|
||||
if conf.settings['announce_head_blobs_only']:
|
||||
if self.conf.announce_head_blobs_only:
|
||||
r = transaction.execute(
|
||||
"select blob_hash from blob "
|
||||
"where blob_hash is not null and "
|
||||
|
@ -797,7 +798,7 @@ class SQLiteStorage(SQLiteMixin):
|
|||
"select s.sd_hash from stream s "
|
||||
"left outer join reflected_stream r on s.sd_hash=r.sd_hash "
|
||||
"where r.timestamp is null or r.timestamp < ?",
|
||||
self.loop.time() - conf.settings['auto_re_reflect_interval']
|
||||
self.loop.time() - self.conf.auto_re_reflect_interval
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
import random
|
||||
|
||||
from twisted.internet import reactor, defer
|
||||
from lbrynet import conf
|
||||
from lbrynet.extras.reflector.client.client import EncryptedFileReflectorClientFactory
|
||||
from lbrynet.extras.reflector.client.blob import BlobReflectorClientFactory
|
||||
|
||||
|
@ -48,40 +45,19 @@ def _reflect_blobs(blob_manager, blob_hashes, reflector_server):
|
|||
return result
|
||||
|
||||
|
||||
def reflect_file(lbry_file, reflector_server=None):
|
||||
if reflector_server:
|
||||
if len(reflector_server.split(":")) == 2:
|
||||
host, port = tuple(reflector_server.split(":"))
|
||||
reflector_server = host, int(port)
|
||||
else:
|
||||
reflector_server = reflector_server, 5566
|
||||
def reflect_file(lbry_file, reflector_server):
|
||||
if len(reflector_server.split(":")) == 2:
|
||||
host, port = tuple(reflector_server.split(":"))
|
||||
reflector_server = host, int(port)
|
||||
else:
|
||||
reflector_server = random.choice(conf.settings['reflector_servers'])
|
||||
reflector_server = reflector_server, 5566
|
||||
return _reflect_file(lbry_file, reflector_server)
|
||||
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def reflect_stream(blob_manager, stream_hash, reflector_server=None):
|
||||
if reflector_server:
|
||||
if len(reflector_server.split(":")) == 2:
|
||||
host, port = tuple(reflector_server.split(":"))
|
||||
reflector_server = host, int(port)
|
||||
else:
|
||||
reflector_server = reflector_server, 5566
|
||||
def reflect_blob_hashes(blob_hashes, blob_manager, reflector_server):
|
||||
if len(reflector_server.split(":")) == 2:
|
||||
host, port = tuple(reflector_server.split(":"))
|
||||
reflector_server = host, int(port)
|
||||
else:
|
||||
reflector_server = random.choice(conf.settings['reflector_servers'])
|
||||
sd_hash = yield blob_manager.storage.get_sd_blob_hash_for_stream(stream_hash)
|
||||
result = yield _reflect_stream(blob_manager, stream_hash, sd_hash, reflector_server)
|
||||
defer.returnValue(result)
|
||||
|
||||
|
||||
def reflect_blob_hashes(blob_hashes, blob_manager, reflector_server=None):
|
||||
if reflector_server:
|
||||
if len(reflector_server.split(":")) == 2:
|
||||
host, port = tuple(reflector_server.split(":"))
|
||||
reflector_server = host, int(port)
|
||||
else:
|
||||
reflector_server = reflector_server, 5566
|
||||
else:
|
||||
reflector_server = random.choice(conf.settings['reflector_servers'])
|
||||
reflector_server = reflector_server, 5566
|
||||
return _reflect_blobs(blob_manager, blob_hashes, reflector_server)
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
from decimal import Decimal
|
||||
from lbrynet import conf
|
||||
from lbrynet.p2p.Strategy import get_default_strategy, OnlyFreeStrategy
|
||||
|
||||
|
||||
class BasePaymentRateManager:
|
||||
def __init__(self, rate=None, info_rate=None):
|
||||
self.min_blob_data_payment_rate = rate if rate is not None else conf.settings['data_rate']
|
||||
self.min_blob_info_payment_rate = (
|
||||
info_rate if info_rate is not None else conf.settings['min_info_rate'])
|
||||
def __init__(self, rate, info_rate):
|
||||
self.min_blob_data_payment_rate = rate
|
||||
self.min_blob_info_payment_rate = info_rate
|
||||
|
||||
|
||||
class PaymentRateManager:
|
||||
|
@ -37,7 +35,7 @@ class PaymentRateManager:
|
|||
|
||||
|
||||
class NegotiatedPaymentRateManager:
|
||||
def __init__(self, base, availability_tracker, generous=None):
|
||||
def __init__(self, base, availability_tracker, generous):
|
||||
"""
|
||||
@param base: a BasePaymentRateManager
|
||||
@param availability_tracker: a BlobAvailabilityTracker
|
||||
|
@ -48,10 +46,10 @@ class NegotiatedPaymentRateManager:
|
|||
self.min_blob_data_payment_rate = base.min_blob_data_payment_rate
|
||||
self.points_paid = 0.0
|
||||
self.blob_tracker = availability_tracker
|
||||
self.generous = generous if generous is not None else conf.settings['is_generous_host']
|
||||
self.strategy = get_default_strategy(self.blob_tracker,
|
||||
base_price=self.base.min_blob_data_payment_rate,
|
||||
is_generous=generous)
|
||||
self.generous = generous
|
||||
self.strategy = get_default_strategy(
|
||||
self.blob_tracker, self.base.min_blob_data_payment_rate, generous
|
||||
)
|
||||
|
||||
def get_rate_blob_data(self, peer, blobs):
|
||||
response = self.strategy.make_offer(peer, blobs)
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from decimal import Decimal
|
||||
from lbrynet import conf
|
||||
|
||||
|
||||
def get_default_price_model(blob_tracker, base_price, **kwargs):
|
||||
|
@ -26,9 +25,8 @@ class MeanAvailabilityWeightedPrice:
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, tracker, base_price=None, alpha=1.0):
|
||||
def __init__(self, tracker, base_price, alpha=1.0):
|
||||
self.blob_tracker = tracker
|
||||
base_price = base_price if base_price is not None else conf.settings['data_rate']
|
||||
self.base_price = Decimal(base_price)
|
||||
self.alpha = Decimal(alpha)
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ from lbrynet.p2p.client.StandaloneBlobDownloader import StandaloneBlobDownloader
|
|||
from lbrynet.p2p.client.ConnectionManager import ConnectionManager
|
||||
from lbrynet.extras.daemon.storage import SQLiteStorage
|
||||
from lbrynet.extras.daemon.PeerFinder import DummyPeerFinder
|
||||
from lbrynet.conf import Config
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -60,7 +61,8 @@ class SingleBlobDownloadManager:
|
|||
|
||||
|
||||
class SinglePeerDownloader:
|
||||
def __init__(self):
|
||||
def __init__(self, conf: Config):
|
||||
self.conf = conf
|
||||
self._payment_rate_manager = OnlyFreePaymentsManager()
|
||||
self._rate_limiter = DummyRateLimiter()
|
||||
self._wallet = None
|
||||
|
@ -81,7 +83,7 @@ class SinglePeerDownloader:
|
|||
peer_finder = SinglePeerFinder(peer)
|
||||
requester = BlobRequester(blob_manager, peer_finder, self._payment_rate_manager,
|
||||
self._wallet, download_manager)
|
||||
downloader = StandaloneBlobDownloader(blob_hash, blob_manager, peer_finder,
|
||||
downloader = StandaloneBlobDownloader(self.conf, blob_hash, blob_manager, peer_finder,
|
||||
self._rate_limiter, self._payment_rate_manager,
|
||||
self._wallet, timeout=timeout)
|
||||
info_exchanger = self._wallet.get_info_exchanger()
|
||||
|
@ -96,7 +98,7 @@ class SinglePeerDownloader:
|
|||
defer.returnValue(result)
|
||||
|
||||
async def download_temp_blob_from_peer(self, peer, timeout, blob_hash):
|
||||
tmp_storage = SQLiteStorage(':memory:')
|
||||
tmp_storage = SQLiteStorage(Config(), ':memory:')
|
||||
await tmp_storage.open()
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
tmp_blob_manager = DiskBlobManager(tmp_dir, tmp_storage)
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
from decimal import Decimal
|
||||
from lbrynet import conf
|
||||
from lbrynet.p2p.Offer import Offer
|
||||
from lbrynet.p2p.PriceModel import MeanAvailabilityWeightedPrice, ZeroPrice
|
||||
|
||||
|
||||
def get_default_strategy(blob_tracker, **kwargs):
|
||||
return BasicAvailabilityWeightedStrategy(blob_tracker, **kwargs)
|
||||
def get_default_strategy(blob_tracker, base_price, is_generous, **kwargs):
|
||||
return BasicAvailabilityWeightedStrategy(blob_tracker, base_price, is_generous, **kwargs)
|
||||
|
||||
|
||||
class Strategy:
|
||||
|
@ -13,10 +12,9 @@ class Strategy:
|
|||
Base for negotiation strategies
|
||||
"""
|
||||
|
||||
def __init__(self, price_model, max_rate, min_rate, is_generous=None):
|
||||
def __init__(self, price_model, max_rate, min_rate, is_generous):
|
||||
self.price_model = price_model
|
||||
self.is_generous = (
|
||||
is_generous if is_generous is not None else conf.settings['is_generous_host'])
|
||||
self.is_generous = is_generous
|
||||
self.accepted_offers = {}
|
||||
self.pending_sent_offers = {}
|
||||
self.offers_sent = {}
|
||||
|
@ -98,13 +96,11 @@ class BasicAvailabilityWeightedStrategy(Strategy):
|
|||
|
||||
"""
|
||||
|
||||
def __init__(self, blob_tracker, acceleration=1.25,
|
||||
deceleration=0.9, max_rate=None,
|
||||
min_rate=0.0,
|
||||
is_generous=None,
|
||||
base_price=0.0001, alpha=1.0):
|
||||
def __init__(self, blob_tracker, base_price, is_generous,
|
||||
acceleration=1.25, deceleration=0.9, max_rate=None,
|
||||
min_rate=0.0, alpha=1.0):
|
||||
price_model = MeanAvailabilityWeightedPrice(
|
||||
blob_tracker, base_price=base_price, alpha=alpha)
|
||||
blob_tracker, base_price, alpha=alpha)
|
||||
super().__init__(price_model, max_rate, min_rate, is_generous)
|
||||
self._acceleration = Decimal(acceleration) # rate of how quickly to ramp offer
|
||||
self._deceleration = Decimal(deceleration)
|
||||
|
|
|
@ -5,7 +5,8 @@ from twisted.internet import error, defer
|
|||
from twisted.internet.protocol import Protocol, ClientFactory
|
||||
from twisted.protocols.policies import TimeoutMixin
|
||||
from twisted.python import failure
|
||||
from lbrynet import conf, utils
|
||||
from lbrynet import utils
|
||||
from lbrynet.conf import MAX_RESPONSE_INFO_SIZE
|
||||
from lbrynet.p2p.Error import ConnectionClosedBeforeResponseError, NoResponseError
|
||||
from lbrynet.p2p.Error import DownloadCanceledError, MisbehavingPeerError
|
||||
from lbrynet.p2p.Error import RequestCanceledError
|
||||
|
@ -52,7 +53,7 @@ class ClientProtocol(Protocol, TimeoutMixin):
|
|||
self._blob_download_request.write(data)
|
||||
else:
|
||||
self._response_buff += data
|
||||
if len(self._response_buff) > conf.settings['MAX_RESPONSE_INFO_SIZE']:
|
||||
if len(self._response_buff) > MAX_RESPONSE_INFO_SIZE:
|
||||
log.warning("Response is too large from %s. Size %s",
|
||||
self.peer, len(self._response_buff))
|
||||
self.transport.loseConnection()
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import random
|
||||
import logging
|
||||
from twisted.internet import defer, reactor
|
||||
from lbrynet import utils, conf
|
||||
from lbrynet import utils
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.p2p.client.ClientProtocol import ClientProtocolFactory
|
||||
from lbrynet.p2p.Error import InsufficientFundsError
|
||||
|
||||
|
@ -20,11 +21,11 @@ class ConnectionManager:
|
|||
MANAGE_CALL_INTERVAL_SEC = 5
|
||||
TCP_CONNECT_TIMEOUT = 15
|
||||
|
||||
def __init__(self, downloader, rate_limiter,
|
||||
primary_request_creators, secondary_request_creators):
|
||||
def __init__(self, downloader, rate_limiter, primary_request_creators, secondary_request_creators):
|
||||
|
||||
self.seek_head_blob_first = conf.settings['seek_head_blob_first']
|
||||
self.max_connections_per_stream = conf.settings['max_connections_per_stream']
|
||||
self.conf: Config = downloader.conf
|
||||
self.seek_head_blob_first = self.conf.seek_head_blob_first
|
||||
self.max_connections_per_stream = self.conf.max_connections_per_stream
|
||||
|
||||
self.downloader = downloader
|
||||
self.rate_limiter = rate_limiter
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import logging
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.p2p.BlobInfo import BlobInfo
|
||||
from lbrynet.p2p.client.BlobRequester import BlobRequester
|
||||
from lbrynet.p2p.client.ConnectionManager import ConnectionManager
|
||||
|
@ -82,9 +83,10 @@ class DummyBlobHandler:
|
|||
|
||||
|
||||
class StandaloneBlobDownloader:
|
||||
def __init__(self, blob_hash, blob_manager, peer_finder,
|
||||
def __init__(self, conf: Config, blob_hash, blob_manager, peer_finder,
|
||||
rate_limiter, payment_rate_manager, wallet,
|
||||
timeout=None):
|
||||
self.conf = conf
|
||||
self.blob_hash = blob_hash
|
||||
self.blob_manager = blob_manager
|
||||
self.peer_finder = peer_finder
|
||||
|
|
|
@ -4,6 +4,7 @@ import tempfile
|
|||
from hashlib import md5
|
||||
from twisted.trial.unittest import TestCase
|
||||
from twisted.internet import defer, threads
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.p2p.StreamDescriptor import StreamDescriptorIdentifier
|
||||
from lbrynet.p2p.BlobManager import DiskBlobManager
|
||||
from lbrynet.p2p.StreamDescriptor import get_sd_info
|
||||
|
@ -31,12 +32,12 @@ class TestStreamify(TestCase):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
def setUp(self):
|
||||
mocks.mock_conf_settings(self)
|
||||
self.session = None
|
||||
self.lbry_file_manager = None
|
||||
self.is_generous = True
|
||||
self.db_dir = tempfile.mkdtemp()
|
||||
self.blob_dir = os.path.join(self.db_dir, "blobfiles")
|
||||
conf = Config(data_dir=self.blob_dir)
|
||||
os.mkdir(self.blob_dir)
|
||||
self.dht_node = FakeNode()
|
||||
self.wallet = FakeWallet()
|
||||
|
@ -44,11 +45,11 @@ class TestStreamify(TestCase):
|
|||
self.peer_finder = FakePeerFinder(5553, self.peer_manager, 2)
|
||||
self.rate_limiter = DummyRateLimiter()
|
||||
self.sd_identifier = StreamDescriptorIdentifier()
|
||||
self.storage = SQLiteStorage(':memory:')
|
||||
self.storage = SQLiteStorage(conf, ':memory:')
|
||||
self.blob_manager = DiskBlobManager(self.blob_dir, self.storage, self.dht_node._dataStore)
|
||||
self.prm = OnlyFreePaymentsManager()
|
||||
self.lbry_file_manager = EncryptedFileManager(
|
||||
self.peer_finder, self.rate_limiter, self.blob_manager, self.wallet, self.prm, self.storage,
|
||||
conf, self.peer_finder, self.rate_limiter, self.blob_manager, self.wallet, self.prm, self.storage,
|
||||
self.sd_identifier
|
||||
)
|
||||
yield f2d(self.storage.open())
|
||||
|
|
|
@ -8,7 +8,7 @@ from cryptography.hazmat.primitives.asymmetric import rsa
|
|||
from cryptography.hazmat.primitives import serialization
|
||||
from twisted.internet import defer
|
||||
from twisted.python.failure import Failure
|
||||
from lbrynet import conf
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.p2p.client.ClientRequest import ClientRequest
|
||||
from lbrynet.p2p.Error import RequestCanceledError
|
||||
from lbrynet.p2p import BlobAvailability
|
||||
|
@ -472,25 +472,3 @@ create_stream_sd_file = {
|
|||
'stream_hash': '6d27fbe10c86d81aacfb897c7a426d0a2214f5a299455a6d'
|
||||
'315c0f998c4b3545c2dc60906122d94653c23b1898229e3f'
|
||||
}
|
||||
|
||||
|
||||
def mock_conf_settings(obj, settings={}):
|
||||
conf.settings = None
|
||||
settings.setdefault('download_mirrors', [])
|
||||
conf.initialize_settings(False)
|
||||
original_settings = conf.settings
|
||||
conf.settings = conf.Config(conf.FIXED_SETTINGS, conf.ADJUSTABLE_SETTINGS)
|
||||
conf.settings['data_dir'] = settings.get('data_dir') or conf.settings.data_dir \
|
||||
or conf.settings.default_data_dir
|
||||
conf.settings['download_directory'] = settings.get('download_directory') or conf.settings.download_dir \
|
||||
or conf.settings.default_download_dir
|
||||
conf.settings['wallet_dir'] = settings.get('wallet_dir') or conf.settings.wallet_dir or \
|
||||
conf.settings.default_wallet_dir
|
||||
conf.settings.installation_id = conf.settings.get_installation_id()
|
||||
conf.settings.node_id = conf.settings.get_node_id()
|
||||
conf.settings.update(settings)
|
||||
|
||||
def _reset_settings():
|
||||
conf.settings = original_settings
|
||||
|
||||
obj.addCleanup(_reset_settings)
|
||||
|
|
|
@ -2,19 +2,18 @@ import asyncio
|
|||
from unittest import TestCase
|
||||
from torba.testcase import AdvanceTimeTestCase
|
||||
|
||||
from tests import mocks
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.extras.daemon.ComponentManager import ComponentManager
|
||||
from lbrynet.extras.daemon.Components import DATABASE_COMPONENT, DHT_COMPONENT
|
||||
from lbrynet.extras.daemon.Components import HASH_ANNOUNCER_COMPONENT, REFLECTOR_COMPONENT, UPNP_COMPONENT
|
||||
from lbrynet.extras.daemon.Components import PEER_PROTOCOL_SERVER_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT
|
||||
from lbrynet.extras.daemon.Components import RATE_LIMITER_COMPONENT, HEADERS_COMPONENT, PAYMENT_RATE_COMPONENT
|
||||
from lbrynet.extras.daemon import Components
|
||||
from tests import mocks
|
||||
|
||||
|
||||
class TestComponentManager(TestCase):
|
||||
def setUp(self):
|
||||
mocks.mock_conf_settings(self)
|
||||
|
||||
self.default_components_sort = [
|
||||
[
|
||||
Components.HeadersComponent,
|
||||
|
@ -38,7 +37,7 @@ class TestComponentManager(TestCase):
|
|||
Components.ReflectorComponent
|
||||
]
|
||||
]
|
||||
self.component_manager = ComponentManager()
|
||||
self.component_manager = ComponentManager(Config())
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
@ -62,9 +61,6 @@ class TestComponentManager(TestCase):
|
|||
|
||||
|
||||
class TestComponentManagerOverrides(TestCase):
|
||||
def setUp(self):
|
||||
mocks.mock_conf_settings(self)
|
||||
|
||||
def test_init_with_overrides(self):
|
||||
class FakeWallet:
|
||||
component_name = "wallet"
|
||||
|
@ -77,7 +73,7 @@ class TestComponentManagerOverrides(TestCase):
|
|||
def component(self):
|
||||
return self
|
||||
|
||||
new_component_manager = ComponentManager(wallet=FakeWallet)
|
||||
new_component_manager = ComponentManager(Config(), wallet=FakeWallet)
|
||||
fake_wallet = new_component_manager.get_component("wallet")
|
||||
# wallet should be an instance of FakeWallet and not WalletComponent from Components.py
|
||||
self.assertIsInstance(fake_wallet, FakeWallet)
|
||||
|
@ -89,14 +85,14 @@ class TestComponentManagerOverrides(TestCase):
|
|||
depends_on = []
|
||||
|
||||
with self.assertRaises(SyntaxError):
|
||||
ComponentManager(randomComponent=FakeRandomComponent)
|
||||
ComponentManager(Config(), randomComponent=FakeRandomComponent)
|
||||
|
||||
|
||||
class TestComponentManagerProperStart(AdvanceTimeTestCase):
|
||||
|
||||
def setUp(self):
|
||||
mocks.mock_conf_settings(self)
|
||||
self.component_manager = ComponentManager(
|
||||
Config(),
|
||||
skip_components=[DATABASE_COMPONENT, DHT_COMPONENT, HASH_ANNOUNCER_COMPONENT,
|
||||
PEER_PROTOCOL_SERVER_COMPONENT, REFLECTOR_COMPONENT, UPNP_COMPONENT,
|
||||
HEADERS_COMPONENT, PAYMENT_RATE_COMPONENT, RATE_LIMITER_COMPONENT,
|
||||
|
|
|
@ -11,6 +11,7 @@ from lbrynet.p2p.RateLimiter import RateLimiter
|
|||
from lbrynet.p2p.Peer import Peer
|
||||
from lbrynet.p2p.Error import NoResponseError
|
||||
from lbrynet.extras.daemon.PeerManager import PeerManager
|
||||
from lbrynet.conf import Config
|
||||
|
||||
PEER_PORT = 5551
|
||||
LOCAL_HOST = '127.0.0.1'
|
||||
|
@ -118,10 +119,11 @@ class TestIntegrationConnectionManager(TestCase):
|
|||
|
||||
def setUp(self):
|
||||
|
||||
conf.initialize_settings(False)
|
||||
conf = Config()
|
||||
|
||||
self.TEST_PEER = Peer(LOCAL_HOST, PEER_PORT)
|
||||
self.downloader = MocDownloader()
|
||||
self.downloader.conf = conf
|
||||
self.rate_limiter = RateLimiter()
|
||||
self.primary_request_creator = MocRequestCreator([self.TEST_PEER])
|
||||
self.clock = task.Clock()
|
||||
|
|
|
@ -7,15 +7,17 @@ from twisted.trial import unittest
|
|||
|
||||
from lbrynet.p2p import Peer
|
||||
from lbrynet.p2p.PaymentRateManager import NegotiatedPaymentRateManager, BasePaymentRateManager
|
||||
from tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker, mock_conf_settings
|
||||
from lbrynet.conf import Config
|
||||
from tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker
|
||||
|
||||
|
||||
class TestBlobRequestHandlerQueries(unittest.TestCase):
|
||||
def setUp(self):
|
||||
mock_conf_settings(self)
|
||||
conf = Config()
|
||||
self.blob_manager = mock.Mock()
|
||||
self.payment_rate_manager = NegotiatedPaymentRateManager(
|
||||
BasePaymentRateManager(0.001), DummyBlobAvailabilityTracker())
|
||||
BasePaymentRateManager(0.001, conf.min_info_rate), DummyBlobAvailabilityTracker(), conf.is_generous_host
|
||||
)
|
||||
from lbrynet.p2p.server import BlobRequestHandler
|
||||
self.handler = BlobRequestHandler.BlobRequestHandler(
|
||||
self.blob_manager, None, self.payment_rate_manager, None)
|
||||
|
|
|
@ -11,18 +11,17 @@ from lbrynet.p2p.BlobManager import DiskBlobManager
|
|||
from lbrynet.extras.compat import f2d
|
||||
from lbrynet.extras.daemon.storage import SQLiteStorage
|
||||
from lbrynet.p2p.Peer import Peer
|
||||
from lbrynet import conf
|
||||
from lbrynet.cryptoutils import get_lbry_hash_obj
|
||||
from lbrynet.conf import Config
|
||||
|
||||
|
||||
class BlobManagerTest(unittest.TestCase):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def setUp(self):
|
||||
conf.initialize_settings(False)
|
||||
self.blob_dir = tempfile.mkdtemp()
|
||||
self.db_dir = tempfile.mkdtemp()
|
||||
self.bm = DiskBlobManager(self.blob_dir, SQLiteStorage(':memory:'))
|
||||
self.bm = DiskBlobManager(self.blob_dir, SQLiteStorage(Config(data_dir=self.blob_dir), ':memory:'))
|
||||
self.peer = Peer('somehost', 22)
|
||||
yield f2d(self.bm.storage.open())
|
||||
|
||||
|
|
|
@ -7,8 +7,8 @@ from twisted.trial import unittest
|
|||
from lbrynet.p2p.PaymentRateManager import NegotiatedPaymentRateManager, BasePaymentRateManager
|
||||
from lbrynet.p2p.Strategy import BasicAvailabilityWeightedStrategy
|
||||
from lbrynet.p2p.Offer import Offer
|
||||
from tests.mocks\
|
||||
import BlobAvailabilityTracker as DummyBlobAvailabilityTracker, mock_conf_settings
|
||||
from lbrynet.conf import Config
|
||||
from tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker
|
||||
|
||||
MAX_NEGOTIATION_TURNS = 10
|
||||
random.seed(12345)
|
||||
|
@ -52,14 +52,16 @@ def calculate_negotation_turns(client_base, host_base, host_is_generous=True,
|
|||
client = mock.Mock()
|
||||
client.host = "1.2.3.5"
|
||||
|
||||
client_base_prm = BasePaymentRateManager(client_base)
|
||||
conf = Config()
|
||||
|
||||
client_base_prm = BasePaymentRateManager(client_base, conf.min_info_rate)
|
||||
client_prm = NegotiatedPaymentRateManager(client_base_prm,
|
||||
DummyBlobAvailabilityTracker(),
|
||||
generous=client_is_generous)
|
||||
host_base_prm = BasePaymentRateManager(host_base)
|
||||
client_is_generous)
|
||||
host_base_prm = BasePaymentRateManager(host_base, conf.min_info_rate)
|
||||
host_prm = NegotiatedPaymentRateManager(host_base_prm,
|
||||
DummyBlobAvailabilityTracker(),
|
||||
generous=host_is_generous)
|
||||
host_is_generous)
|
||||
blobs_to_query = get_random_sample(blobs)
|
||||
accepted = False
|
||||
turns = 0
|
||||
|
@ -72,11 +74,12 @@ def calculate_negotation_turns(client_base, host_base, host_is_generous=True,
|
|||
|
||||
|
||||
class AvailabilityWeightedStrategyTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
mock_conf_settings(self)
|
||||
|
||||
def test_first_offer_is_zero_and_second_is_not_if_offer_not_accepted(self):
|
||||
strategy = BasicAvailabilityWeightedStrategy(DummyBlobAvailabilityTracker())
|
||||
conf = Config()
|
||||
strategy = BasicAvailabilityWeightedStrategy(
|
||||
DummyBlobAvailabilityTracker(), conf.data_rate, conf.is_generous_host
|
||||
)
|
||||
peer = "1.1.1.1"
|
||||
|
||||
blobs = strategy.price_model.blob_tracker.availability.keys()
|
||||
|
@ -88,8 +91,13 @@ class AvailabilityWeightedStrategyTests(unittest.TestCase):
|
|||
self.assertNotEqual(offer2.rate, 0.0)
|
||||
|
||||
def test_accept_zero_and_persist_if_accepted(self):
|
||||
host_strategy = BasicAvailabilityWeightedStrategy(DummyBlobAvailabilityTracker())
|
||||
client_strategy = BasicAvailabilityWeightedStrategy(DummyBlobAvailabilityTracker())
|
||||
conf = Config()
|
||||
host_strategy = BasicAvailabilityWeightedStrategy(
|
||||
DummyBlobAvailabilityTracker(), conf.data_rate, conf.is_generous_host
|
||||
)
|
||||
client_strategy = BasicAvailabilityWeightedStrategy(
|
||||
DummyBlobAvailabilityTracker(), conf.data_rate, conf.is_generous_host
|
||||
)
|
||||
|
||||
client = "1.1.1.1"
|
||||
host = "1.1.1.2"
|
||||
|
|
|
@ -3,8 +3,6 @@ from twisted.internet import defer
|
|||
from lbrynet.blob import CryptBlob
|
||||
from lbrynet.blob.blob_file import MAX_BLOB_SIZE
|
||||
|
||||
from tests.mocks import mock_conf_settings
|
||||
|
||||
from cryptography.hazmat.primitives.ciphers.algorithms import AES
|
||||
import random
|
||||
import string
|
||||
|
@ -39,9 +37,6 @@ def random_string(length):
|
|||
|
||||
|
||||
class TestCryptBlob(unittest.TestCase):
|
||||
def setUp(self):
|
||||
mock_conf_settings(self)
|
||||
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _test_encrypt_decrypt(self, size_of_data):
|
||||
|
|
|
@ -5,7 +5,7 @@ import logging
|
|||
from copy import deepcopy
|
||||
from twisted.internet import defer
|
||||
from twisted.trial import unittest
|
||||
from lbrynet import conf
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.extras.compat import f2d
|
||||
from lbrynet.extras.daemon.storage import SQLiteStorage, open_file_for_writing
|
||||
from lbrynet.blob.EncryptedFileDownloader import ManagedEncryptedFileDownloader
|
||||
|
@ -84,9 +84,8 @@ class StorageTest(unittest.TestCase):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
def setUp(self):
|
||||
conf.initialize_settings(False)
|
||||
self.db_dir = tempfile.mkdtemp()
|
||||
self.storage = SQLiteStorage(':memory:')
|
||||
self.storage = SQLiteStorage(Config(data_dir=self.db_dir), ':memory:')
|
||||
yield f2d(self.storage.open())
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, task
|
||||
from lbrynet import utils
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.extras.daemon.HashAnnouncer import DHTHashAnnouncer
|
||||
from tests.test_utils import random_lbry_hash
|
||||
from tests.mocks import mock_conf_settings
|
||||
|
||||
|
||||
class MocDHTNode:
|
||||
|
@ -38,7 +38,7 @@ class MocStorage:
|
|||
class DHTHashAnnouncerTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
mock_conf_settings(self)
|
||||
conf = Config()
|
||||
self.num_blobs = 10
|
||||
self.blobs_to_announce = []
|
||||
for i in range(0, self.num_blobs):
|
||||
|
@ -47,7 +47,7 @@ class DHTHashAnnouncerTest(unittest.TestCase):
|
|||
self.clock = self.dht_node.clock
|
||||
utils.call_later = self.clock.callLater
|
||||
self.storage = MocStorage(self.blobs_to_announce)
|
||||
self.announcer = DHTHashAnnouncer(self.dht_node, self.storage)
|
||||
self.announcer = DHTHashAnnouncer(conf, self.dht_node, self.storage)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_immediate_announce(self):
|
||||
|
|
|
@ -3,6 +3,7 @@ from twisted.trial import unittest
|
|||
from twisted.internet import defer
|
||||
|
||||
from cryptography.hazmat.primitives.ciphers.algorithms import AES
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.extras.compat import f2d
|
||||
from lbrynet.extras.daemon.PeerManager import PeerManager
|
||||
from lbrynet.p2p.StreamDescriptor import get_sd_info, BlobStreamDescriptorReader
|
||||
|
@ -38,18 +39,20 @@ class CreateEncryptedFileTest(unittest.TestCase):
|
|||
timeout = 5
|
||||
|
||||
def setUp(self):
|
||||
mocks.mock_conf_settings(self)
|
||||
self.tmp_db_dir, self.tmp_blob_dir = mk_db_and_blob_dir()
|
||||
conf = Config(data_dir=self.tmp_blob_dir)
|
||||
self.wallet = FakeWallet()
|
||||
self.peer_manager = PeerManager()
|
||||
self.peer_finder = FakePeerFinder(5553, self.peer_manager, 2)
|
||||
self.rate_limiter = DummyRateLimiter()
|
||||
self.sd_identifier = StreamDescriptorIdentifier()
|
||||
self.storage = SQLiteStorage(':memory:')
|
||||
self.storage = SQLiteStorage(conf, ':memory:')
|
||||
self.blob_manager = DiskBlobManager(self.tmp_blob_dir, self.storage)
|
||||
self.prm = OnlyFreePaymentsManager()
|
||||
self.lbry_file_manager = EncryptedFileManager(self.peer_finder, self.rate_limiter, self.blob_manager,
|
||||
self.wallet, self.prm, self.storage, self.sd_identifier)
|
||||
self.lbry_file_manager = EncryptedFileManager(
|
||||
conf, self.peer_finder, self.rate_limiter, self.blob_manager,
|
||||
self.wallet, self.prm, self.storage, self.sd_identifier
|
||||
)
|
||||
d = f2d(self.storage.open())
|
||||
d.addCallback(lambda _: f2d(self.lbry_file_manager.setup()))
|
||||
return d
|
||||
|
|
|
@ -1,107 +0,0 @@
|
|||
from unittest import mock, skip
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.trial import unittest
|
||||
|
||||
from lbrynet import conf
|
||||
#from lbrynet.extras.daemon.auth import server
|
||||
from tests.mocks import mock_conf_settings
|
||||
|
||||
|
||||
@skip
|
||||
class AuthJSONRPCServerTest(unittest.TestCase):
|
||||
# TODO: move to using a base class for tests
|
||||
# and add useful general utilities like this
|
||||
# onto it.
|
||||
def setUp(self):
|
||||
conf.initialize_settings(False)
|
||||
self.server = server.AuthJSONRPCServer(True, use_authentication=False)
|
||||
|
||||
def test_listen_auth_https(self):
|
||||
self.server._use_https = True
|
||||
self.server._use_authentication = True
|
||||
factory = self.server.get_server_factory()
|
||||
listening_port = reactor.listenSSL(
|
||||
conf.settings['api_port'], factory, factory.options, interface="localhost"
|
||||
)
|
||||
listening_port.stopListening()
|
||||
|
||||
def test_listen_no_auth_https(self):
|
||||
self.server._use_https = True
|
||||
self.server._use_authentication = False
|
||||
factory = self.server.get_server_factory()
|
||||
listening_port = reactor.listenSSL(
|
||||
conf.settings['api_port'], factory, factory.options, interface="localhost"
|
||||
)
|
||||
listening_port.stopListening()
|
||||
|
||||
def test_listen_auth_http(self):
|
||||
self.server._use_https = False
|
||||
self.server._use_authentication = True
|
||||
factory = self.server.get_server_factory()
|
||||
listening_port = reactor.listenTCP(
|
||||
conf.settings['api_port'], factory, interface="localhost"
|
||||
)
|
||||
listening_port.stopListening()
|
||||
|
||||
def test_listen_no_auth_http(self):
|
||||
self.server._use_https = False
|
||||
self.server._use_authentication = False
|
||||
factory = self.server.get_server_factory()
|
||||
listening_port = reactor.listenTCP(
|
||||
conf.settings['api_port'], factory, interface="localhost"
|
||||
)
|
||||
listening_port.stopListening()
|
||||
|
||||
def test_get_server_port(self):
|
||||
self.assertSequenceEqual(
|
||||
('example.com', 80), self.server.get_server_port('http://example.com'))
|
||||
self.assertSequenceEqual(
|
||||
('example.com', 1234), self.server.get_server_port('http://example.com:1234'))
|
||||
|
||||
def test_foreign_origin_is_rejected(self):
|
||||
mock_conf_settings(self) # have to call this to generate Config mock
|
||||
request = mock.Mock(['getHeader'])
|
||||
request.getHeader = mock.Mock(return_value='http://example.com')
|
||||
self.assertFalse(self.server._check_header_source(request, 'Origin'))
|
||||
|
||||
def test_wrong_port_is_rejected(self):
|
||||
mock_conf_settings(self, {'api_port': 1234})
|
||||
request = mock.Mock(['getHeader'])
|
||||
request.getHeader = mock.Mock(return_value='http://localhost:9999')
|
||||
self.assertFalse(self.server._check_header_source(request, 'Origin'))
|
||||
|
||||
def test_matching_origin_is_allowed(self):
|
||||
mock_conf_settings(self, {'api_host': 'example.com', 'api_port': 1234})
|
||||
request = mock.Mock(['getHeader'])
|
||||
request.getHeader = mock.Mock(return_value='http://example.com:1234')
|
||||
self.assertTrue(self.server._check_header_source(request, 'Origin'))
|
||||
|
||||
def test_any_origin_is_allowed(self):
|
||||
mock_conf_settings(self, {'api_host': '0.0.0.0', 'api_port': 80})
|
||||
request = mock.Mock(['getHeader'])
|
||||
request.getHeader = mock.Mock(return_value='http://example.com')
|
||||
self.assertTrue(self.server._check_header_source(request, 'Origin'))
|
||||
request = mock.Mock(['getHeader'])
|
||||
request.getHeader = mock.Mock(return_value='http://another-example.com')
|
||||
self.assertTrue(self.server._check_header_source(request, 'Origin'))
|
||||
|
||||
def test_matching_referer_is_allowed(self):
|
||||
mock_conf_settings(self, {'api_host': 'the_api', 'api_port': 1111})
|
||||
request = mock.Mock(['getHeader'])
|
||||
request.getHeader = mock.Mock(return_value='http://the_api:1111?settings')
|
||||
self.assertTrue(self.server._check_header_source(request, 'Referer'))
|
||||
request.getHeader.assert_called_with('Referer')
|
||||
|
||||
def test_request_is_allowed_when_matching_allowed_origin_setting(self):
|
||||
mock_conf_settings(self, {'allowed_origin': 'http://example.com:1234'})
|
||||
request = mock.Mock(['getHeader'])
|
||||
request.getHeader = mock.Mock(return_value='http://example.com:1234')
|
||||
self.assertTrue(self.server._check_header_source(request, 'Origin'))
|
||||
|
||||
def test_request_is_rejected_when_not_matching_allowed_origin_setting(self):
|
||||
mock_conf_settings(self, {'allowed_origin': 'http://example.com:1234'})
|
||||
request = mock.Mock(['getHeader'])
|
||||
# note the ports don't match
|
||||
request.getHeader = mock.Mock(return_value='http://example.com:1235')
|
||||
self.assertFalse(self.server._check_header_source(request, 'Origin'))
|
|
@ -20,28 +20,29 @@ from lbrynet.blob.EncryptedFileStatusReport import EncryptedFileStatusReport
|
|||
from lbrynet.extras.wallet import LbryWalletManager
|
||||
from torba.client.wallet import Wallet
|
||||
|
||||
from lbrynet.conf import Config
|
||||
from lbrynet.p2p.PaymentRateManager import OnlyFreePaymentsManager
|
||||
from tests import test_utils
|
||||
from tests.mocks import mock_conf_settings, FakeNetwork, FakeFileManager
|
||||
from tests.mocks import FakeNetwork, FakeFileManager
|
||||
from tests.mocks import ExchangeRateManager as DummyExchangeRateManager
|
||||
from tests.mocks import BTCLBCFeed, USDBTCFeed
|
||||
from tests.test_utils import is_android
|
||||
|
||||
def get_test_daemon(data_rate=None, generous=True, with_fee=False):
|
||||
if data_rate is None:
|
||||
data_rate = conf.ADJUSTABLE_SETTINGS['data_rate'][1]
|
||||
|
||||
def get_test_daemon(conf: Config, with_fee=False):
|
||||
rates = {
|
||||
'BTCLBC': {'spot': 3.0, 'ts': test_utils.DEFAULT_ISO_TIME + 1},
|
||||
'USDBTC': {'spot': 2.0, 'ts': test_utils.DEFAULT_ISO_TIME + 2}
|
||||
}
|
||||
component_manager = ComponentManager(
|
||||
skip_components=[DATABASE_COMPONENT, DHT_COMPONENT, WALLET_COMPONENT, UPNP_COMPONENT,
|
||||
PEER_PROTOCOL_SERVER_COMPONENT, REFLECTOR_COMPONENT, HASH_ANNOUNCER_COMPONENT,
|
||||
EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
|
||||
HEADERS_COMPONENT, RATE_LIMITER_COMPONENT],
|
||||
conf, skip_components=[
|
||||
DATABASE_COMPONENT, DHT_COMPONENT, WALLET_COMPONENT, UPNP_COMPONENT,
|
||||
PEER_PROTOCOL_SERVER_COMPONENT, REFLECTOR_COMPONENT, HASH_ANNOUNCER_COMPONENT,
|
||||
EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
|
||||
HEADERS_COMPONENT, RATE_LIMITER_COMPONENT],
|
||||
file_manager=FakeFileManager
|
||||
)
|
||||
daemon = LBRYDaemon(component_manager=component_manager)
|
||||
daemon = LBRYDaemon(conf, component_manager=component_manager)
|
||||
daemon.payment_rate_manager = OnlyFreePaymentsManager()
|
||||
daemon.wallet_manager = mock.Mock(spec=LbryWalletManager)
|
||||
daemon.wallet_manager.wallet = mock.Mock(spec=Wallet)
|
||||
|
@ -80,24 +81,23 @@ def get_test_daemon(data_rate=None, generous=True, with_fee=False):
|
|||
class TestCostEst(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
mock_conf_settings(self)
|
||||
test_utils.reset_time(self)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_fee_and_generous_data(self):
|
||||
size = 10000000
|
||||
correct_result = 4.5
|
||||
daemon = get_test_daemon(generous=True, with_fee=True)
|
||||
daemon = get_test_daemon(Config(is_generous_host=True), with_fee=True)
|
||||
result = yield f2d(daemon.get_est_cost("test", size))
|
||||
self.assertEqual(result, correct_result)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_fee_and_ungenerous_data(self):
|
||||
conf = Config(is_generous_host=False)
|
||||
size = 10000000
|
||||
fake_fee_amount = 4.5
|
||||
data_rate = conf.ADJUSTABLE_SETTINGS['data_rate'][1]
|
||||
correct_result = size / 10 ** 6 * data_rate + fake_fee_amount
|
||||
daemon = get_test_daemon(generous=False, with_fee=True)
|
||||
correct_result = size / 10 ** 6 * conf.data_rate + fake_fee_amount
|
||||
daemon = get_test_daemon(conf, with_fee=True)
|
||||
result = yield f2d(daemon.get_est_cost("test", size))
|
||||
self.assertEqual(result, round(correct_result, 1))
|
||||
|
||||
|
@ -105,16 +105,16 @@ class TestCostEst(unittest.TestCase):
|
|||
def test_generous_data_and_no_fee(self):
|
||||
size = 10000000
|
||||
correct_result = 0.0
|
||||
daemon = get_test_daemon(generous=True)
|
||||
daemon = get_test_daemon(Config(is_generous_host=True))
|
||||
result = yield f2d(daemon.get_est_cost("test", size))
|
||||
self.assertEqual(result, correct_result)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_ungenerous_data_and_no_fee(self):
|
||||
conf = Config(is_generous_host=False)
|
||||
size = 10000000
|
||||
data_rate = conf.ADJUSTABLE_SETTINGS['data_rate'][1]
|
||||
correct_result = size / 10 ** 6 * data_rate
|
||||
daemon = get_test_daemon(generous=False)
|
||||
correct_result = size / 10 ** 6 * conf.data_rate
|
||||
daemon = get_test_daemon(conf)
|
||||
result = yield f2d(daemon.get_est_cost("test", size))
|
||||
self.assertEqual(result, round(correct_result, 1))
|
||||
|
||||
|
@ -125,10 +125,8 @@ class TestJsonRpc(unittest.TestCase):
|
|||
def noop():
|
||||
return None
|
||||
|
||||
mock_conf_settings(self)
|
||||
test_utils.reset_time(self)
|
||||
self.test_daemon = get_test_daemon()
|
||||
self.test_daemon.wallet_manager.is_first_run = False
|
||||
self.test_daemon = get_test_daemon(Config())
|
||||
self.test_daemon.wallet_manager.get_best_blockhash = noop
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -147,9 +145,8 @@ class TestJsonRpc(unittest.TestCase):
|
|||
class TestFileListSorting(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
mock_conf_settings(self)
|
||||
test_utils.reset_time(self)
|
||||
self.test_daemon = get_test_daemon()
|
||||
self.test_daemon = get_test_daemon(Config())
|
||||
self.test_daemon.file_manager.lbry_files = self._get_fake_lbry_files()
|
||||
|
||||
self.test_points_paid = [
|
||||
|
|
|
@ -17,8 +17,7 @@ from lbrynet.extras.daemon.PeerFinder import DummyPeerFinder
|
|||
from lbrynet.blob.EncryptedFileStatusReport import EncryptedFileStatusReport
|
||||
from lbrynet.blob.EncryptedFileDownloader import ManagedEncryptedFileDownloader
|
||||
from lbrynet.extras.wallet import LbryWalletManager
|
||||
|
||||
from tests.mocks import mock_conf_settings
|
||||
from lbrynet.conf import Config
|
||||
|
||||
|
||||
class MocDownloader:
|
||||
|
@ -70,7 +69,7 @@ def moc_pay_key_fee(d):
|
|||
class GetStreamTests(unittest.TestCase):
|
||||
|
||||
def init_getstream_with_mocs(self):
|
||||
mock_conf_settings(self)
|
||||
conf = Config()
|
||||
|
||||
sd_identifier = mock.Mock(spec=StreamDescriptorIdentifier)
|
||||
wallet = mock.Mock(spec=LbryWalletManager)
|
||||
|
@ -83,7 +82,7 @@ class GetStreamTests(unittest.TestCase):
|
|||
disable_max_key_fee = False
|
||||
data_rate = {'currency': "LBC", 'amount': 0, 'address': ''}
|
||||
getstream = Downloader.GetStream(
|
||||
sd_identifier, wallet, exchange_rate_manager, blob_manager, peer_finder, DummyRateLimiter(), prm,
|
||||
conf, sd_identifier, wallet, exchange_rate_manager, blob_manager, peer_finder, DummyRateLimiter(), prm,
|
||||
storage, max_key_fee, disable_max_key_fee, timeout=3, data_rate=data_rate
|
||||
)
|
||||
getstream.download_manager = mock.Mock(spec=DownloadManager)
|
||||
|
|
|
@ -1,62 +1,62 @@
|
|||
import os
|
||||
import json
|
||||
import sys
|
||||
import types
|
||||
import tempfile
|
||||
import shutil
|
||||
import unittest
|
||||
import argparse
|
||||
from lbrynet import conf
|
||||
from lbrynet.conf import Config, BaseConfig, String, Integer, Toggle, Servers, NOT_SET
|
||||
from lbrynet.p2p.Error import InvalidCurrencyError
|
||||
|
||||
|
||||
class TestConfig(conf.Configuration):
|
||||
test = conf.String('the default')
|
||||
test_int = conf.Integer(9)
|
||||
test_toggle = conf.Toggle(False)
|
||||
servers = conf.Servers([('localhost', 80)])
|
||||
class TestConfig(BaseConfig):
|
||||
test_str = String('str help', 'the default', previous_names=['old_str'])
|
||||
test_int = Integer('int help', 9)
|
||||
test_toggle = Toggle('toggle help', False)
|
||||
servers = Servers('servers help', [('localhost', 80)])
|
||||
|
||||
|
||||
class ConfigurationTests(unittest.TestCase):
|
||||
|
||||
@unittest.skipIf('linux' not in sys.platform, 'skipping linux only test')
|
||||
def test_linux_defaults(self):
|
||||
c = TestConfig()
|
||||
c = Config()
|
||||
self.assertEqual(c.data_dir, os.path.expanduser('~/.local/share/lbry/lbrynet'))
|
||||
self.assertEqual(c.wallet_dir, os.path.expanduser('~/.local/share/lbry/lbryum'))
|
||||
self.assertEqual(c.download_dir, os.path.expanduser('~/Downloads'))
|
||||
self.assertEqual(c.config, os.path.expanduser('~/.local/share/lbry/lbrynet/daemon_settings.yml'))
|
||||
self.assertEqual(c.api_connection_url, 'http://localhost:5279/lbryapi')
|
||||
self.assertEqual(c.log_file_path, os.path.expanduser('~/.local/share/lbry/lbrynet/lbrynet.log'))
|
||||
|
||||
def test_search_order(self):
|
||||
c = TestConfig()
|
||||
c.runtime = {'test': 'runtime'}
|
||||
c.arguments = {'test': 'arguments'}
|
||||
c.environment = {'test': 'environment'}
|
||||
c.persisted = {'test': 'persisted'}
|
||||
self.assertEqual(c.test, 'runtime')
|
||||
c.runtime = {'test_str': 'runtime'}
|
||||
c.arguments = {'test_str': 'arguments'}
|
||||
c.environment = {'test_str': 'environment'}
|
||||
c.persisted = {'test_str': 'persisted'}
|
||||
self.assertEqual(c.test_str, 'runtime')
|
||||
c.runtime = {}
|
||||
self.assertEqual(c.test, 'arguments')
|
||||
self.assertEqual(c.test_str, 'arguments')
|
||||
c.arguments = {}
|
||||
self.assertEqual(c.test, 'environment')
|
||||
self.assertEqual(c.test_str, 'environment')
|
||||
c.environment = {}
|
||||
self.assertEqual(c.test, 'persisted')
|
||||
self.assertEqual(c.test_str, 'persisted')
|
||||
c.persisted = {}
|
||||
self.assertEqual(c.test, 'the default')
|
||||
self.assertEqual(c.test_str, 'the default')
|
||||
|
||||
def test_arguments(self):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--test")
|
||||
args = parser.parse_args(['--test', 'blah'])
|
||||
parser.add_argument("--test-str")
|
||||
args = parser.parse_args(['--test-str', 'blah'])
|
||||
c = TestConfig.create_from_arguments(args)
|
||||
self.assertEqual(c.test, 'blah')
|
||||
self.assertEqual(c.test_str, 'blah')
|
||||
c.arguments = {}
|
||||
self.assertEqual(c.test, 'the default')
|
||||
self.assertEqual(c.test_str, 'the default')
|
||||
|
||||
def test_environment(self):
|
||||
c = TestConfig()
|
||||
self.assertEqual(c.test, 'the default')
|
||||
c.set_environment({'LBRY_TEST': 'from environ'})
|
||||
self.assertEqual(c.test, 'from environ')
|
||||
self.assertEqual(c.test_str, 'the default')
|
||||
c.set_environment({'LBRY_TEST_STR': 'from environ'})
|
||||
self.assertEqual(c.test_str, 'from environ')
|
||||
|
||||
def test_persisted(self):
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
|
@ -67,50 +67,63 @@ class ConfigurationTests(unittest.TestCase):
|
|||
|
||||
# settings.yml doesn't exist on file system
|
||||
self.assertFalse(c.persisted.exists)
|
||||
self.assertEqual(c.test, 'the default')
|
||||
self.assertEqual(c.test_str, 'the default')
|
||||
|
||||
self.assertEqual(c.modify_order, [c.runtime])
|
||||
with c.update_config():
|
||||
self.assertEqual(c.modify_order, [c.runtime, c.persisted])
|
||||
c.test = 'new value'
|
||||
c.test_str = 'original'
|
||||
self.assertEqual(c.modify_order, [c.runtime])
|
||||
|
||||
# share_usage_data has been saved to settings file
|
||||
self.assertTrue(c.persisted.exists)
|
||||
with open(c.config, 'r') as fd:
|
||||
self.assertEqual(fd.read(), 'test: new value\n')
|
||||
self.assertEqual(fd.read(), 'test_str: original\n')
|
||||
|
||||
# load the settings file and check share_usage_data is false
|
||||
c = TestConfig.create_from_arguments(
|
||||
types.SimpleNamespace(config=os.path.join(temp_dir, 'settings.yml'))
|
||||
)
|
||||
self.assertTrue(c.persisted.exists)
|
||||
self.assertEqual(c.test, 'new value')
|
||||
self.assertEqual(c.test_str, 'original')
|
||||
|
||||
# setting in runtime overrides config
|
||||
self.assertNotIn('test', c.runtime)
|
||||
c.test = 'from runtime'
|
||||
self.assertIn('test', c.runtime)
|
||||
self.assertEqual(c.test, 'from runtime')
|
||||
self.assertNotIn('test_str', c.runtime)
|
||||
c.test_str = 'from runtime'
|
||||
self.assertIn('test_str', c.runtime)
|
||||
self.assertEqual(c.test_str, 'from runtime')
|
||||
|
||||
# NOT_SET only clears it in runtime location
|
||||
c.test = conf.NOT_SET
|
||||
self.assertNotIn('test', c.runtime)
|
||||
self.assertEqual(c.test, 'new value')
|
||||
# without context manager NOT_SET only clears it in runtime location
|
||||
c.test_str = NOT_SET
|
||||
self.assertNotIn('test_str', c.runtime)
|
||||
self.assertEqual(c.test_str, 'original')
|
||||
|
||||
# clear it in persisted as well
|
||||
self.assertIn('test', c.persisted)
|
||||
# clear it in persisted as well by using context manager
|
||||
self.assertIn('test_str', c.persisted)
|
||||
with c.update_config():
|
||||
c.test = conf.NOT_SET
|
||||
self.assertNotIn('test', c.persisted)
|
||||
self.assertEqual(c.test, 'the default')
|
||||
c.test_str = NOT_SET
|
||||
self.assertNotIn('test_str', c.persisted)
|
||||
self.assertEqual(c.test_str, 'the default')
|
||||
with open(c.config, 'r') as fd:
|
||||
self.assertEqual(fd.read(), '{}\n')
|
||||
|
||||
def test_persisted_upgrade(self):
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
config = os.path.join(temp_dir, 'settings.yml')
|
||||
with open(config, 'w') as fd:
|
||||
fd.write('old_str: old stuff\n')
|
||||
c = TestConfig.create_from_arguments(
|
||||
types.SimpleNamespace(config=config)
|
||||
)
|
||||
self.assertEqual(c.test_str, 'old stuff')
|
||||
self.assertNotIn('old_str', c.persisted)
|
||||
with open(config, 'w') as fd:
|
||||
fd.write('test_str: old stuff\n')
|
||||
|
||||
def test_validation(self):
|
||||
c = TestConfig()
|
||||
with self.assertRaisesRegex(AssertionError, 'must be a string'):
|
||||
c.test = 9
|
||||
c.test_str = 9
|
||||
with self.assertRaisesRegex(AssertionError, 'must be an integer'):
|
||||
c.test_int = 'hi'
|
||||
with self.assertRaisesRegex(AssertionError, 'must be a true/false'):
|
||||
|
@ -143,7 +156,7 @@ class ConfigurationTests(unittest.TestCase):
|
|||
config = os.path.join(temp_dir, 'settings.yml')
|
||||
with open(config, 'w') as fd:
|
||||
fd.write('max_key_fee: \'{"currency":"USD", "amount":1}\'\n')
|
||||
c = conf.ServerConfiguration.create_from_arguments(
|
||||
c = Config.create_from_arguments(
|
||||
types.SimpleNamespace(config=config)
|
||||
)
|
||||
self.assertEqual(c.max_key_fee['currency'], 'USD')
|
||||
|
|
Loading…
Reference in a new issue