Merge remote-tracking branch 'origin/master' into refactor-settings
Conflicts: lbrynet/lbrynet_daemon/Daemon.py lbrynet/lbrynet_daemon/DaemonControl.py lbrynet/lbrynet_daemon/DaemonServer.py
This commit is contained in:
commit
2cd6c644a0
4 changed files with 60 additions and 28 deletions
|
@ -33,7 +33,7 @@ On Ubuntu or Mint you can install the prerequisites by running
|
||||||
|
|
||||||
```
|
```
|
||||||
sudo apt-get install libgmp3-dev build-essential python2.7 python2.7-dev \
|
sudo apt-get install libgmp3-dev build-essential python2.7 python2.7-dev \
|
||||||
python-pip git python-virtualenv
|
python-pip git python-virtualenv libssl-dev libffi-dev
|
||||||
```
|
```
|
||||||
|
|
||||||
##### OSX and Linux Installation
|
##### OSX and Linux Installation
|
||||||
|
|
|
@ -139,3 +139,17 @@ class JsonFormatter(logging.Formatter):
|
||||||
if record.exc_info:
|
if record.exc_info:
|
||||||
data['exc_info'] = self.formatException(record.exc_info)
|
data['exc_info'] = self.formatException(record.exc_info)
|
||||||
return json.dumps(data)
|
return json.dumps(data)
|
||||||
|
|
||||||
|
|
||||||
|
def failure(failure, log, msg, *args):
|
||||||
|
"""Log a failure message from a deferred.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
failure: twisted.python.failure.Failure
|
||||||
|
log: a python logger instance
|
||||||
|
msg: the message to log. Can use normal logging string interpolation.
|
||||||
|
the last argument will be set to the error message from the failure.
|
||||||
|
args: values to substitute into `msg`
|
||||||
|
"""
|
||||||
|
args += (failure.getErrorMessage(),)
|
||||||
|
log.error(msg, *args, exc_info=failure.getTracebackObject())
|
||||||
|
|
|
@ -119,6 +119,8 @@ BAD_REQUEST = 400
|
||||||
NOT_FOUND = 404
|
NOT_FOUND = 404
|
||||||
OK_CODE = 200
|
OK_CODE = 200
|
||||||
|
|
||||||
|
PENDING_LBRY_ID = "not set"
|
||||||
|
|
||||||
|
|
||||||
class Checker:
|
class Checker:
|
||||||
"""The looping calls the daemon runs"""
|
"""The looping calls the daemon runs"""
|
||||||
|
@ -279,7 +281,10 @@ class Daemon(AuthJSONRPCServer):
|
||||||
self.first_run_after_update = False
|
self.first_run_after_update = False
|
||||||
self.uploaded_temp_files = []
|
self.uploaded_temp_files = []
|
||||||
self._session_id = base58.b58encode(generate_id())
|
self._session_id = base58.b58encode(generate_id())
|
||||||
self.lbryid = None
|
|
||||||
|
self.analytics_manager = None
|
||||||
|
self.lbryid = PENDING_LBRY_ID
|
||||||
|
|
||||||
self.daemon_conf = os.path.join(self.db_dir, 'daemon_settings.yml')
|
self.daemon_conf = os.path.join(self.db_dir, 'daemon_settings.yml')
|
||||||
|
|
||||||
|
|
||||||
|
@ -414,11 +419,8 @@ class Daemon(AuthJSONRPCServer):
|
||||||
d.addCallback(lambda _: self._setup_server())
|
d.addCallback(lambda _: self._setup_server())
|
||||||
d.addCallback(lambda _: _log_starting_vals())
|
d.addCallback(lambda _: _log_starting_vals())
|
||||||
d.addCallback(lambda _: _announce_startup())
|
d.addCallback(lambda _: _announce_startup())
|
||||||
# TODO: handle errors here
|
|
||||||
d.callback(None)
|
d.callback(None)
|
||||||
|
return d
|
||||||
return defer.succeed(None)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_platform(self):
|
def _get_platform(self):
|
||||||
r = {
|
r = {
|
||||||
|
@ -629,23 +631,24 @@ class Daemon(AuthJSONRPCServer):
|
||||||
for lm, lp in [('lbrynet', lbrynet_log)]:
|
for lm, lp in [('lbrynet', lbrynet_log)]:
|
||||||
if os.path.isfile(lp):
|
if os.path.isfile(lp):
|
||||||
if exclude_previous:
|
if exclude_previous:
|
||||||
f = open(lp, "r")
|
with open( lp, "r") as f:
|
||||||
f.seek(PREVIOUS_NET_LOG)
|
f.seek(PREVIOUS_NET_LOG)
|
||||||
log_contents = f.read()
|
log_contents = f.read()
|
||||||
f.close()
|
|
||||||
else:
|
else:
|
||||||
f = open(lp, "r")
|
with open(lp, "r") as f:
|
||||||
log_contents = f.read()
|
log_contents = f.read()
|
||||||
f.close()
|
if self.lbryid is not PENDING_LBRY_ID:
|
||||||
|
id_hash = base58.b58encode(self.lbryid)[:20]
|
||||||
|
else:
|
||||||
|
id_hash = self.lbryid
|
||||||
params = {
|
params = {
|
||||||
'date': datetime.utcnow().strftime('%Y%m%d-%H%M%S'),
|
'date': datetime.utcnow().strftime('%Y%m%d-%H%M%S'),
|
||||||
'hash': base58.b58encode(self.lbryid)[:20],
|
'hash': id_hash,
|
||||||
'sys': platform.system(),
|
'sys': platform.system(),
|
||||||
'type': "%s-%s" % (lm, log_type) if log_type else lm,
|
'type': "%s-%s" % (lm, log_type) if log_type else lm,
|
||||||
'log': log_contents
|
'log': log_contents
|
||||||
}
|
}
|
||||||
requests.post(lbrynet_settings.LOG_POST_URL, params)
|
requests.post(lbrynet_settings.LOG_POST_URL, params)
|
||||||
|
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
else:
|
else:
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
|
@ -661,13 +664,19 @@ class Daemon(AuthJSONRPCServer):
|
||||||
log.info("Closing lbrynet session")
|
log.info("Closing lbrynet session")
|
||||||
log.info("Status at time of shutdown: " + self.startup_status[0])
|
log.info("Status at time of shutdown: " + self.startup_status[0])
|
||||||
self.looping_call_manager.shutdown()
|
self.looping_call_manager.shutdown()
|
||||||
|
if self.analytics_manager:
|
||||||
self.analytics_manager.shutdown()
|
self.analytics_manager.shutdown()
|
||||||
if self.lbry_ui_manager.update_checker.running:
|
if self.lbry_ui_manager.update_checker.running:
|
||||||
self.lbry_ui_manager.update_checker.stop()
|
self.lbry_ui_manager.update_checker.stop()
|
||||||
|
|
||||||
self._clean_up_temp_files()
|
self._clean_up_temp_files()
|
||||||
|
|
||||||
d = self._upload_log(log_type="close", exclude_previous=False if self.first_run else True)
|
try:
|
||||||
|
d = self._upload_log(
|
||||||
|
log_type="close", exclude_previous=False if self.first_run else True)
|
||||||
|
except Exception:
|
||||||
|
log.warn('Failed to upload log', exc_info=True)
|
||||||
|
d = defer.succeed(None)
|
||||||
d.addCallback(lambda _: self._stop_server())
|
d.addCallback(lambda _: self._stop_server())
|
||||||
d.addCallback(lambda _: self._stop_reflector())
|
d.addCallback(lambda _: self._stop_reflector())
|
||||||
d.addErrback(lambda err: True)
|
d.addErrback(lambda err: True)
|
||||||
|
@ -723,10 +732,10 @@ class Daemon(AuthJSONRPCServer):
|
||||||
self.startup_status = STARTUP_STAGES[1]
|
self.startup_status = STARTUP_STAGES[1]
|
||||||
log.info("Loading databases...")
|
log.info("Loading databases...")
|
||||||
if self.created_data_dir:
|
if self.created_data_dir:
|
||||||
db_revision = open(os.path.join(self.db_dir, "db_revision"), mode='w')
|
db_revision_path = os.path.join(self.db_dir, "db_revision")
|
||||||
|
with open(db_revision_path, mode='w') as db_revision:
|
||||||
db_revision.write(str(self.current_db_revision))
|
db_revision.write(str(self.current_db_revision))
|
||||||
db_revision.close()
|
log.debug("Created the db revision file: %s", db_revision_path)
|
||||||
log.debug("Created the db revision file: %s", str(os.path.join(self.db_dir, "db_revision")))
|
|
||||||
if not os.path.exists(self.blobfile_dir):
|
if not os.path.exists(self.blobfile_dir):
|
||||||
os.mkdir(self.blobfile_dir)
|
os.mkdir(self.blobfile_dir)
|
||||||
log.debug("Created the blobfile directory: %s", str(self.blobfile_dir))
|
log.debug("Created the blobfile directory: %s", str(self.blobfile_dir))
|
||||||
|
@ -736,6 +745,8 @@ class Daemon(AuthJSONRPCServer):
|
||||||
db_revision_file = os.path.join(self.db_dir, "db_revision")
|
db_revision_file = os.path.join(self.db_dir, "db_revision")
|
||||||
if os.path.exists(db_revision_file):
|
if os.path.exists(db_revision_file):
|
||||||
old_revision = int(open(db_revision_file).read().strip())
|
old_revision = int(open(db_revision_file).read().strip())
|
||||||
|
if old_revision > self.current_db_revision:
|
||||||
|
return defer.fail(Exception('This version of lbrynet is not compatible with the database'))
|
||||||
if old_revision < self.current_db_revision:
|
if old_revision < self.current_db_revision:
|
||||||
from lbrynet.db_migrator import dbmigrator
|
from lbrynet.db_migrator import dbmigrator
|
||||||
log.info("Upgrading your databases...")
|
log.info("Upgrading your databases...")
|
||||||
|
@ -763,7 +774,7 @@ class Daemon(AuthJSONRPCServer):
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def _set_lbryid(self, lbryid):
|
def _set_lbryid(self, lbryid):
|
||||||
if lbryid is None:
|
if lbryid is PENDING_LBRY_ID:
|
||||||
return self._make_lbryid()
|
return self._make_lbryid()
|
||||||
else:
|
else:
|
||||||
log.info("LBRY ID: " + base58.b58encode(lbryid))
|
log.info("LBRY ID: " + base58.b58encode(lbryid))
|
||||||
|
|
|
@ -136,6 +136,7 @@ def start():
|
||||||
d = lbry.start()
|
d = lbry.start()
|
||||||
if args.launchui:
|
if args.launchui:
|
||||||
d.addCallback(lambda _: webbrowser.open(settings.UI_ADDRESS))
|
d.addCallback(lambda _: webbrowser.open(settings.UI_ADDRESS))
|
||||||
|
d.addErrback(log_and_kill)
|
||||||
|
|
||||||
if settings.use_auth_http:
|
if settings.use_auth_http:
|
||||||
log.info("Using authenticated API")
|
log.info("Using authenticated API")
|
||||||
|
@ -163,5 +164,11 @@ def start():
|
||||||
print "Not connected to internet, unable to start"
|
print "Not connected to internet, unable to start"
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def log_and_kill(failure):
|
||||||
|
log_support.failure(failure, log, 'Failed to startup: %s')
|
||||||
|
reactor.stop()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
start()
|
start()
|
Loading…
Add table
Reference in a new issue