Merge pull request #348 from lbryio/remove-old-settings

remove unqlite requirement and old Settings.py
This commit is contained in:
Jack Robison 2016-12-19 14:38:31 -05:00 committed by GitHub
commit 3da82ce895
10 changed files with 47 additions and 223 deletions

View file

@ -80,7 +80,6 @@ class BlobAvailabilityTracker(object):
d = self._get_most_popular()
d.addCallback(lambda _: self._set_mean_peers())
def _update_mine(self):
def _get_peers(blobs):
dl = []
@ -89,7 +88,7 @@ class BlobAvailabilityTracker(object):
return defer.DeferredList(dl)
def sample(blobs):
return random.sample(blobs, 100)
return random.sample(blobs, min(len(blobs), 100))
start = time.time()
log.debug('==> Updating the peers for my blobs')

View file

@ -1,6 +1,5 @@
import logging
import miniupnpc
from lbrynet.core.PTCWallet import PTCWallet
from lbrynet.core.BlobManager import DiskBlobManager, TempBlobManager
from lbrynet.dht import node
from lbrynet.core.PeerManager import PeerManager
@ -147,6 +146,7 @@ class Session(object):
self.lbryid = generate_id()
if self.wallet is None:
from lbrynet.core.PTCWallet import PTCWallet
self.wallet = PTCWallet(self.db_dir)
if self.peer_manager is None:

View file

@ -919,7 +919,6 @@ class LBRYumWallet(Wallet):
func = getattr(self.cmd_runner, cmd.name)
return defer.succeed(func(*args))
# run commands as a deferToThread, lbryum commands that only make
# queries to lbryum server should be run this way
def _run_cmd_as_defer_to_thread(self, command_name, *args):
@ -1055,7 +1054,6 @@ class LBRYumWallet(Wallet):
return defer.succeed(val)
class LBRYcrdAddressRequester(object):
implements([IRequestCreator])

View file

@ -33,7 +33,6 @@ from lbrynet.lbryfile.EncryptedFileMetadataManager import DBEncryptedFileMetadat
from lbrynet.lbryfile.EncryptedFileMetadataManager import TempEncryptedFileMetadataManager
from lbrynet.lbryfile.StreamDescriptor import EncryptedFileStreamType
from lbrynet.lbryfilemanager.EncryptedFileManager import EncryptedFileManager
from lbrynet.lbrynet_daemon.Settings import Settings
from lbrynet.lbrynet_daemon.UIManager import UIManager
from lbrynet.lbrynet_daemon.Downloader import GetStream
from lbrynet.lbrynet_daemon.Publisher import Publisher
@ -44,7 +43,6 @@ from lbrynet.core import system_info
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob
from lbrynet.core.StreamDescriptor import BlobStreamDescriptorReader
from lbrynet.core.Session import Session
from lbrynet.core.PTCWallet import PTCWallet
from lbrynet.core.Wallet import LBRYumWallet
from lbrynet.core.looping_call_manager import LoopingCallManager
from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory
@ -98,8 +96,9 @@ BAD_REQUEST = 400
NOT_FOUND = 404
OK_CODE = 200
PENDING_LBRY_ID = "not set"
SHORT_LBRY_ID_LEN = 20
PENDING_ID = "not set"
SHORT_ID_LEN = 20
class Checker:
"""The looping calls the daemon runs"""
@ -121,6 +120,7 @@ class FileID:
REMOTE_SERVER = "www.lbry.io"
class NoValidSearch(Exception):
pass
@ -266,7 +266,7 @@ class Daemon(AuthJSONRPCServer):
self.log_uploader = log_support.LogUploader.load('lbrynet', self.log_file)
self.analytics_manager = analytics_manager
self.lbryid = PENDING_LBRY_ID
self.lbryid = PENDING_ID
self.daemon_conf = conf.settings.get_conf_filename()
self.wallet_user = None
@ -286,14 +286,13 @@ class Daemon(AuthJSONRPCServer):
self.looping_call_manager = LoopingCallManager(calls)
self.sd_identifier = StreamDescriptorIdentifier()
self.stream_info_manager = TempEncryptedFileMetadataManager()
self.settings = Settings(self.db_dir)
self.lbry_ui_manager = UIManager(root)
self.blob_request_payment_rate_manager = None
self.lbry_file_metadata_manager = None
self.lbry_file_manager = None
def setup(self):
self._modify_loggly_formatter()
def _log_starting_vals():
log.info("Starting balance: " + str(self.session.wallet.wallet_balance))
return defer.succeed(None)
@ -338,7 +337,6 @@ class Daemon(AuthJSONRPCServer):
d.addCallback(lambda _: self._initial_setup())
d.addCallback(lambda _: threads.deferToThread(self._setup_data_directory))
d.addCallback(lambda _: self._check_db_migration())
d.addCallback(lambda _: self._get_settings())
d.addCallback(lambda _: self._load_caches())
d.addCallback(lambda _: self._set_events())
d.addCallback(lambda _: self._get_session())
@ -368,12 +366,22 @@ class Daemon(AuthJSONRPCServer):
return d
def _load_caches(self):
if os.path.isfile(os.path.join(self.db_dir, "stream_info_cache.json")):
filename = os.path.join(self.db_dir, "stream_info_cache.json")
with open(filename, "r") as stream_info_cache:
self.name_cache = json.loads(stream_info_cache.read())
name_cache_filename = os.path.join(self.db_dir, "stream_info_cache.json")
lbry_id_filename = os.path.join(self.db_dir, "lbry_id")
if os.path.isfile(name_cache_filename):
with open(name_cache_filename, "r") as name_cache:
self.name_cache = json.loads(name_cache.read())
log.info("Loaded claim info cache")
if os.path.isfile(lbry_id_filename):
with open(lbry_id_filename, "r") as lbry_id_file:
self.lbryid = base58.b58decode(lbry_id_file.read())
else:
with open(lbry_id_filename, "w") as lbry_id_file:
self.lbryid = utils.generate_id()
lbry_id_file.write(base58.b58encode(self.lbryid))
def _set_events(self):
context = analytics.make_context(self._get_platform(), self.wallet_type)
self._events = analytics.Events(context, base58.b58encode(self.lbryid), self._session_id)
@ -516,17 +524,10 @@ class Daemon(AuthJSONRPCServer):
return defer.succeed(True)
def _setup_server(self):
def restore_running_status(running):
if running is True:
d = self._start_server()
d.addCallback(lambda _: self._start_reflector())
return defer.succeed(True)
self.startup_status = STARTUP_STAGES[4]
dl = self.settings.get_server_running_status()
dl.addCallback(restore_running_status)
return dl
d = self._start_server()
d.addCallback(lambda _: self._start_reflector())
return d
def _setup_query_handlers(self):
handlers = [
@ -538,42 +539,21 @@ class Daemon(AuthJSONRPCServer):
),
self.session.wallet.get_wallet_info_query_handler_factory(),
]
def get_blob_request_handler_factory(rate):
self.blob_request_payment_rate_manager = self.session.payment_rate_manager
d1 = self.settings.get_server_data_payment_rate()
d1.addCallback(get_blob_request_handler_factory)
dl = defer.DeferredList([d1])
dl.addCallback(lambda _: self._add_query_handlers(handlers))
return dl
return self._add_query_handlers(handlers)
def _add_query_handlers(self, query_handlers):
def _set_query_handlers(statuses):
from future_builtins import zip
for handler, (success, status) in zip(query_handlers, statuses):
if success is True:
self.query_handlers[handler] = status
ds = []
for handler in query_handlers:
query_id = handler.get_primary_query_identifier()
ds.append(self.settings.get_query_handler_status(query_id))
dl = defer.DeferredList(ds)
dl.addCallback(_set_query_handlers)
return dl
self.query_handlers[query_id] = handler
return defer.succeed(None)
def _upload_log(self, log_type=None, exclude_previous=False, force=False):
if self.upload_log or force:
if self.lbryid is not PENDING_LBRY_ID:
id_hash = base58.b58encode(self.lbryid)[:SHORT_LBRY_ID_LEN]
else:
id_hash = self.lbryid
lbry_id = base58.b58encode(self.lbryid)[:SHORT_ID_LEN]
try:
self.log_uploader.upload(exclude_previous, self.lbryid, log_type)
self.log_uploader.upload(exclude_previous, lbry_id, log_type)
except requests.RequestException:
log.exception('Failed to upload log file')
log.warning('Failed to upload log file')
return defer.succeed(None)
def _clean_up_temp_files(self):
@ -700,26 +680,6 @@ class Daemon(AuthJSONRPCServer):
return d
return defer.succeed(True)
def _get_settings(self):
d = self.settings.start()
d.addCallback(lambda _: self.settings.get_lbryid())
d.addCallback(self._set_lbryid)
d.addCallback(lambda _: self._modify_loggly_formatter())
return d
def _set_lbryid(self, lbryid):
if lbryid is PENDING_LBRY_ID or lbryid is None:
return self._make_set_and_save_lbryid()
else:
log.info("LBRY ID: " + base58.b58encode(lbryid))
self.lbryid = lbryid
def _make_set_and_save_lbryid(self):
self.lbryid = utils.generate_id()
log.info("Generated new LBRY ID: " + base58.b58encode(self.lbryid))
d = self.settings.save_lbryid(self.lbryid)
return d
def _modify_loggly_formatter(self):
log_support.configure_loggly_handler(
lbry_id=base58.b58encode(self.lbryid),
@ -759,12 +719,6 @@ class Daemon(AuthJSONRPCServer):
)
def _get_session(self):
def get_default_data_rate():
d = self.settings.get_default_data_payment_rate()
d.addCallback(lambda rate: {"default_data_payment_rate": rate if rate is not None else
conf.settings.data_rate})
return d
def get_wallet():
if self.wallet_type == LBRYCRD_WALLET:
raise ValueError('LBRYcrd Wallet is no longer supported')
@ -773,28 +727,19 @@ class Daemon(AuthJSONRPCServer):
config = {'auto_connect': True}
if conf.settings.lbryum_wallet_dir:
config['lbryum_path'] = conf.settings.lbryum_wallet_dir
d = defer.succeed(LBRYumWallet(self.db_dir, config))
return defer.succeed(LBRYumWallet(self.db_dir, config))
elif self.wallet_type == PTC_WALLET:
log.info("Using PTC wallet")
d = defer.succeed(PTCWallet(self.db_dir))
from lbrynet.core.PTCWallet import PTCWallet
return defer.succeed(PTCWallet(self.db_dir))
else:
raise ValueError('Wallet Type {} is not valid'.format(self.wallet_type))
d.addCallback(lambda w: {"wallet": w})
return d
d1 = get_default_data_rate()
d2 = get_wallet()
d = get_wallet()
def combine_results(results):
r = {}
for success, result in results:
if success is True:
r.update(result)
return r
def create_session(results):
def create_session(wallet):
self.session = Session(
results['default_data_payment_rate'],
conf.settings.data_rate,
db_dir=self.db_dir,
lbryid=self.lbryid,
blob_dir=self.blobfile_dir,
@ -802,17 +747,15 @@ class Daemon(AuthJSONRPCServer):
known_dht_nodes=conf.settings.known_dht_nodes,
peer_port=self.peer_port,
use_upnp=self.use_upnp,
wallet=results['wallet'],
wallet=wallet,
is_generous=conf.settings.is_generous_host
)
self.startup_status = STARTUP_STAGES[2]
dl = defer.DeferredList([d1, d2], fireOnOneErrback=True)
dl.addCallback(combine_results)
dl.addCallback(create_session)
dl.addCallback(lambda _: self.session.setup())
d.addCallback(create_session)
d.addCallback(lambda _: self.session.setup())
return dl
return d
def _setup_stream_identifier(self):
file_saver_factory = EncryptedFileSaverFactory(
@ -953,7 +896,7 @@ class Daemon(AuthJSONRPCServer):
reactor.callLater(self.search_timeout, _check_est, d)
d.addCallback(
lambda _: download_sd_blob(
self.session, sd_hash, self.blob_request_payment_rate_manager))
self.session, sd_hash, self.session.payment_rate_manager))
return d
def get_or_download_sd_blob(self, sd_hash):
@ -1272,7 +1215,7 @@ class Daemon(AuthJSONRPCServer):
def _prepare_message(blobs):
msg = {
'lbry_id': base58.b58encode(self.lbryid)[:SHORT_LBRY_ID_LEN],
'lbry_id': base58.b58encode(self.lbryid)[:SHORT_ID_LEN],
'managed_blobs': len(blobs),
'managed_streams': len(self.lbry_file_manager.lbry_files),
}

View file

@ -1,114 +0,0 @@
import binascii
import functools
import json
import logging
import os
from twisted.internet import threads, defer
import unqlite
log = logging.getLogger(__name__)
def run_in_thread(fn):
@functools.wraps(fn)
def wrapped(*args, **kwargs):
return threads.deferToThread(fn, *args, **kwargs)
return wrapped
class Settings(object):
NAME = "settings.db"
def __init__(self, db_dir):
self.db_dir = db_dir
self.db = None
def start(self):
return self._open_db()
def stop(self):
self.db.close()
self.db = None
return defer.succeed(True)
def _open_db(self):
filename = os.path.join(self.db_dir, self.NAME)
log.debug("Opening %s as the settings database", filename)
self.db = unqlite.UnQLite(filename)
return defer.succeed(True)
@run_in_thread
def save_lbryid(self, lbryid):
self.db['lbryid'] = binascii.hexlify(lbryid)
self.db.commit()
@run_in_thread
def get_lbryid(self):
if 'lbryid' in self.db:
return binascii.unhexlify(self.db['lbryid'])
else:
return None
@run_in_thread
def get_server_running_status(self):
if 'server_running' in self.db:
return json.loads(self.db['server_running'])
else:
return True
@run_in_thread
def save_server_running_status(self, running):
self.db['server_running'] = json.dumps(running)
self.db.commit()
def get_default_data_payment_rate(self):
return self._get_payment_rate("default_data_payment_rate")
def save_default_data_payment_rate(self, rate):
return self._save_payment_rate("default_data_payment_rate", rate)
def get_server_data_payment_rate(self):
return self._get_payment_rate("server_data_payment_rate")
def save_server_data_payment_rate(self, rate):
return self._save_payment_rate("server_data_payment_rate", rate)
def get_server_crypt_info_payment_rate(self):
return self._get_payment_rate("server_crypt_info_payment_rate")
def save_server_crypt_info_payment_rate(self, rate):
return self._save_payment_rate("server_crypt_info_payment_rate", rate)
@run_in_thread
def _get_payment_rate(self, rate_type):
if rate_type in self.db:
return json.loads(self.db[rate_type])
else:
return None
@run_in_thread
def _save_payment_rate(self, rate_type, rate):
if rate is not None:
self.db[rate_type] = json.dumps(rate)
elif rate_type in self.db:
del self.db[rate_type]
self.db.commit()
@run_in_thread
def get_query_handler_status(self, query_identifier):
if json.dumps(('q_h', query_identifier)) in self.db:
return json.loads(self.db[(json.dumps(('q_h', query_identifier)))])
else:
return True
def enable_query_handler(self, query_identifier):
return self._set_query_handler_status(query_identifier, True)
def disable_query_handler(self, query_identifier):
return self._set_query_handler_status(query_identifier, False)
@run_in_thread
def _set_query_handler_status(self, query_identifier, status):
self.db[json.dumps(('q_h', query_identifier))] = json.dumps(status)
self.db.commit()

View file

@ -15,7 +15,7 @@ OPTIONS = {
'LSUIElement': True,
},
'packages': [
'lbrynet', 'lbryum', 'requests', 'unqlite', 'certifi',
'lbrynet', 'lbryum', 'requests', 'certifi',
'pkg_resources', 'json', 'jsonrpc', 'seccure',
],
}

View file

@ -37,6 +37,8 @@ python get-pip.py
rm get-pip.py
pip install -r requirements.txt
pip install cython
pip install unqlite
pip install mock pylint coveralls
# have to do `which trial` instead of simply trial because coverage needs the full path

View file

@ -7,6 +7,5 @@ wget https://www.python.org/ftp/python/2.7.11/python-2.7.11-macosx10.6.pkg
sudo installer -pkg python-2.7.11-macosx10.6.pkg -target /
pip install -U pip
brew update
brew install gmp
brew install openssl
brew link --force openssl

View file

@ -24,7 +24,6 @@ simplejson==3.8.2
six==1.9.0
slowaes==0.1a1
txJSON-RPC==0.3.1
unqlite==0.2.0
wsgiref==0.1.2
zope.interface==4.1.3
base58==0.2.2

View file

@ -57,7 +57,6 @@ requires = [
'six==1.9.0',
'slowaes==0.1a1',
'txJSON-RPC==0.3.1',
'unqlite==0.2.0',
'wsgiref==0.1.2',
'zope.interface==4.1.3',
'base58==0.2.2',
@ -287,7 +286,6 @@ elif platform == WINDOWS:
'six',
'aes',
'txjsonrpc',
'unqlite',
'wsgiref',
'zope.interface',
'os',