add startup scripts

-populate blockchainname.db on first run from older version
This commit is contained in:
Jack 2016-05-25 22:28:45 -04:00
parent 6b39e549f7
commit 8922fd6dde
6 changed files with 179 additions and 58 deletions

View file

@ -4,5 +4,5 @@ import logging
logging.getLogger(__name__).addHandler(logging.NullHandler()) logging.getLogger(__name__).addHandler(logging.NullHandler())
version = (0, 2, 4) version = (0, 2, 5)
__version__ = ".".join([str(x) for x in version]) __version__ = ".".join([str(x) for x in version])

View file

@ -295,6 +295,11 @@ class LBRYWallet(object):
d.addCallback(self._get_stream_info_from_value, name) d.addCallback(self._get_stream_info_from_value, name)
return d return d
def get_txid_for_name(self, name):
d = self._get_value_for_name(name)
d.addCallback(lambda r: None if 'txid' not in r else r['txid'])
return d
def get_stream_info_from_txid(self, name, txid): def get_stream_info_from_txid(self, name, txid):
d = self.get_claims_from_tx(txid) d = self.get_claims_from_tx(txid)

View file

@ -151,6 +151,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
self.waiting_on = {} self.waiting_on = {}
self.streams = {} self.streams = {}
self.known_dht_nodes = KNOWN_DHT_NODES self.known_dht_nodes = KNOWN_DHT_NODES
self.first_run_after_update = False
self.platform_info = { self.platform_info = {
"processor": platform.processor(), "processor": platform.processor(),
"python_version: ": platform.python_version(), "python_version: ": platform.python_version(),
@ -197,7 +198,9 @@ class LBRYDaemon(jsonrpc.JSONRPC):
'use_upnp': True, 'use_upnp': True,
'start_lbrycrdd': True, 'start_lbrycrdd': True,
'requested_first_run_credits': False, 'requested_first_run_credits': False,
'cache_time': DEFAULT_CACHE_TIME 'cache_time': DEFAULT_CACHE_TIME,
'startup_scripts': [],
'last_version': {'lbrynet': lbrynet_version, 'lbryum': lbryum_version}
} }
if os.path.isfile(self.daemon_conf): if os.path.isfile(self.daemon_conf):
@ -234,6 +237,20 @@ class LBRYDaemon(jsonrpc.JSONRPC):
self.session_settings = settings_dict self.session_settings = settings_dict
if 'last_version' in missing_settings.keys():
self.session_settings['last_version'] = None
if self.session_settings['last_version'] != self.default_settings['last_version']:
self.session_settings['last_version'] = self.default_settings['last_version']
f = open(self.daemon_conf, "w")
f.write(json.dumps(self.session_settings))
f.close()
self.first_run_after_update = True
log.info("First run after update")
if lbrynet_version == '0.2.5':
self.session_settings['startup_scripts'].append({'script_name': 'migrateto025', 'run_once': True})
self.run_on_startup = self.session_settings['run_on_startup'] self.run_on_startup = self.session_settings['run_on_startup']
self.data_rate = self.session_settings['data_rate'] self.data_rate = self.session_settings['data_rate']
self.max_key_fee = self.session_settings['max_key_fee'] self.max_key_fee = self.session_settings['max_key_fee']
@ -252,6 +269,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
self.start_lbrycrdd = self.session_settings['start_lbrycrdd'] self.start_lbrycrdd = self.session_settings['start_lbrycrdd']
self.requested_first_run_credits = self.session_settings['requested_first_run_credits'] self.requested_first_run_credits = self.session_settings['requested_first_run_credits']
self.cache_time = self.session_settings['cache_time'] self.cache_time = self.session_settings['cache_time']
self.startup_scripts = self.session_settings['startup_scripts']
if os.path.isfile(os.path.join(self.db_dir, "stream_info_cache.json")): if os.path.isfile(os.path.join(self.db_dir, "stream_info_cache.json")):
f = open(os.path.join(self.db_dir, "stream_info_cache.json"), "r") f = open(os.path.join(self.db_dir, "stream_info_cache.json"), "r")
@ -394,6 +412,10 @@ class LBRYDaemon(jsonrpc.JSONRPC):
self.announced_startup = True self.announced_startup = True
self.startup_status = STARTUP_STAGES[5] self.startup_status = STARTUP_STAGES[5]
log.info("[" + str(datetime.now()) + "] Started lbrynet-daemon") log.info("[" + str(datetime.now()) + "] Started lbrynet-daemon")
if len(self.startup_scripts):
log.info("Scheduling scripts")
reactor.callLater(3, self._run_scripts)
# self.lbrynet_connection_checker.start(3600) # self.lbrynet_connection_checker.start(3600)
if self.first_run: if self.first_run:
@ -608,6 +630,9 @@ class LBRYDaemon(jsonrpc.JSONRPC):
def _shutdown(self): def _shutdown(self):
log.info("Closing lbrynet session") log.info("Closing lbrynet session")
log.info("Status at time of shutdown: " + self.startup_status[0]) log.info("Status at time of shutdown: " + self.startup_status[0])
self.internet_connection_checker.stop()
self.version_checker.stop()
self.connection_problem_checker.stop()
d = self._upload_log(name_prefix="close", exclude_previous=False if self.first_run else True) d = self._upload_log(name_prefix="close", exclude_previous=False if self.first_run else True)
d.addCallback(lambda _: self._stop_server()) d.addCallback(lambda _: self._stop_server())
@ -1017,19 +1042,31 @@ class LBRYDaemon(jsonrpc.JSONRPC):
f.close() f.close()
return defer.succeed(True) return defer.succeed(True)
def _resolve_name(self, name): def _resolve_name(self, name, force_refresh=False):
def _cache_stream_info(stream_info): def _cache_stream_info(stream_info):
def _add_txid(txid):
self.name_cache[name]['txid'] = txid
return defer.succeed(None)
self.name_cache[name] = {'claim_metadata': stream_info, 'timestamp': self._get_long_count_timestamp()} self.name_cache[name] = {'claim_metadata': stream_info, 'timestamp': self._get_long_count_timestamp()}
d = self._update_claim_cache() d = self.session.wallet.get_txid_for_name(name)
d.addCallback(_add_txid)
d.addCallback(lambda _: self._update_claim_cache())
d.addCallback(lambda _: self.name_cache[name]['claim_metadata']) d.addCallback(lambda _: self.name_cache[name]['claim_metadata'])
return d return d
if name in self.name_cache.keys(): if not force_refresh:
if (self._get_long_count_timestamp() - self.name_cache[name]['timestamp']) < self.cache_time: if name in self.name_cache.keys():
log.info("[" + str(datetime.now()) + "] Returning cached stream info for lbry://" + name) if (self._get_long_count_timestamp() - self.name_cache[name]['timestamp']) < self.cache_time:
d = defer.succeed(self.name_cache[name]['claim_metadata']) log.info("[" + str(datetime.now()) + "] Returning cached stream info for lbry://" + name)
d = defer.succeed(self.name_cache[name]['claim_metadata'])
else:
log.info("[" + str(datetime.now()) + "] Refreshing stream info for lbry://" + name)
d = self.session.wallet.get_stream_info_for_name(name)
d.addCallbacks(_cache_stream_info, lambda _: defer.fail(UnknownNameError))
else: else:
log.info("[" + str(datetime.now()) + "] Refreshing stream info for lbry://" + name) log.info("[" + str(datetime.now()) + "] Resolving stream info for lbry://" + name)
d = self.session.wallet.get_stream_info_for_name(name) d = self.session.wallet.get_stream_info_for_name(name)
d.addCallbacks(_cache_stream_info, lambda _: defer.fail(UnknownNameError)) d.addCallbacks(_cache_stream_info, lambda _: defer.fail(UnknownNameError))
else: else:
@ -1221,6 +1258,31 @@ class LBRYDaemon(jsonrpc.JSONRPC):
requests.post(URL, json.dumps({"text": msg})) requests.post(URL, json.dumps({"text": msg}))
return defer.succeed(None) return defer.succeed(None)
def _run_scripts(self):
if len([k for k in self.startup_scripts if 'run_once' in k.keys()]):
log.info("Removing one time startup scripts")
f = open(self.daemon_conf, "r")
initialsettings = json.loads(f.read())
f.close()
t = [s for s in self.startup_scripts if 'run_once' not in s.keys()]
initialsettings['startup_scripts'] = t
f = open(self.daemon_conf, "w")
f.write(json.dumps(initialsettings))
f.close()
for script in self.startup_scripts:
if script['script_name'] == 'migrateto025':
log.info("Running migrator to 0.2.5")
from lbrynet.lbrynet_daemon.daemon_scripts.migrateto025 import run as run_migrate
run_migrate(self)
if script['script_name'] == 'Autofetcher':
log.info("Starting autofetcher script")
from lbrynet.lbrynet_daemon.daemon_scripts.Autofetcher import run as run_autofetcher
run_autofetcher(self)
return defer.succeed(None)
def _render_response(self, result, code): def _render_response(self, result, code):
return defer.succeed({'result': result, 'code': code}) return defer.succeed({'result': result, 'code': code})

View file

@ -189,50 +189,6 @@ class HostedLBRYFile(resource.Resource):
call.cancel() call.cancel()
class MyLBRYFiles(resource.Resource):
isLeaf = False
def __init__(self):
resource.Resource.__init__(self)
self.files_table = None
def delayed_render(self, request, result):
request.write(result.encode('utf-8'))
request.finish()
def render_GET(self, request):
self.files_table = None
api = jsonrpc.Proxy(API_CONNECTION_STRING)
d = api.callRemote("get_lbry_files", {})
d.addCallback(self._get_table)
d.addCallback(lambda results: self.delayed_render(request, results))
return server.NOT_DONE_YET
def _get_table(self, files):
if not self.files_table:
self.files_table = r'<html><head><title>My LBRY files</title></head><body><table border="1">'
self.files_table += r'<tr>'
self.files_table += r'<td>Stream name</td>'
self.files_table += r'<td>Completed</td>'
self.files_table += r'<td>Toggle</td>'
self.files_table += r'<td>Remove</td>'
self.files_table += r'</tr>'
return self._get_table(files)
if not len(files):
self.files_table += r'</table></body></html>'
return self.files_table
else:
f = files.pop()
self.files_table += r'<tr>'
self.files_table += r'<td>%s</td>' % (f['stream_name'])
self.files_table += r'<td>%s</td>' % (f['completed'])
self.files_table += r'<td>Start</td>' if f['stopped'] else r'<td>Stop</td>'
self.files_table += r'<td>Delete</td>'
self.files_table += r'</tr>'
return self._get_table(files)
class LBRYDaemonServer(object): class LBRYDaemonServer(object):
def __init__(self): def __init__(self):
self.data_dir = user_data_dir("LBRY") self.data_dir = user_data_dir("LBRY")
@ -336,12 +292,9 @@ class LBRYDaemonServer(object):
def _setup_server(self, ui_ver, wallet): def _setup_server(self, ui_ver, wallet):
self._api = LBRYDaemon(ui_ver, wallet_type=wallet) self._api = LBRYDaemon(ui_ver, wallet_type=wallet)
self.root = LBRYindex(self.ui_dir) self.root = LBRYindex(self.ui_dir)
self.root.putChild("css", static.File(os.path.join(self.ui_dir, "css"))) for d in [i[0] for i in os.walk(self.ui_dir) if os.path.dirname(i[0]) == self.ui_dir]:
self.root.putChild("font", static.File(os.path.join(self.ui_dir, "font"))) self.root.putChild(os.path.basename(d), static.File(d))
self.root.putChild("img", static.File(os.path.join(self.ui_dir, "img")))
self.root.putChild("js", static.File(os.path.join(self.ui_dir, "js")))
self.root.putChild("view", HostedLBRYFile(self._api)) self.root.putChild("view", HostedLBRYFile(self._api))
self.root.putChild("files", MyLBRYFiles())
self.root.putChild(API_ADDRESS, self._api) self.root.putChild(API_ADDRESS, self._api)
return defer.succeed(True) return defer.succeed(True)

View file

@ -0,0 +1,68 @@
import json
import logging.handlers
import sys
import os
from appdirs import user_data_dir
from twisted.internet.task import LoopingCall
from twisted.internet import reactor
if sys.platform != "darwin":
log_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
else:
log_dir = user_data_dir("LBRY")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
LOG_FILENAME = os.path.join(log_dir, 'lbrynet-daemon.log')
if os.path.isfile(LOG_FILENAME):
f = open(LOG_FILENAME, 'r')
PREVIOUS_LOG = len(f.read())
f.close()
else:
PREVIOUS_LOG = 0
log = logging.getLogger(__name__)
handler = logging.handlers.RotatingFileHandler(LOG_FILENAME, maxBytes=2097152, backupCount=5)
log.addHandler(handler)
log.setLevel(logging.INFO)
class Autofetcher(object):
"""
Download name claims as they occur
"""
def __init__(self, api):
self._api = api
self._checker = LoopingCall(self._check_for_new_claims)
self.best_block = None
def start(self):
reactor.addSystemEventTrigger('before', 'shutdown', self.stop)
self._checker.start(5)
def stop(self):
log.info("Stopping autofetcher")
self._checker.stop()
def _check_for_new_claims(self):
block = self._api.get_best_blockhash()
if block != self.best_block:
log.info("Checking new block for name claims, block hash: %s" % block)
self.best_block = block
transactions = self._api.get_block({'blockhash': block})['tx']
for t in transactions:
c = self._api.get_claims_for_tx({'txid': t})
if len(c):
for i in c:
log.info("Downloading stream for claim txid: %s" % t)
self._api.get({'name': t, 'stream_info': json.loads(i['value'])})
def run(api):
fetcher = Autofetcher(api)
fetcher.start()

View file

@ -0,0 +1,33 @@
from twisted.internet import defer
class migrator(object):
"""
Re-resolve lbry names to write missing data to blockchain.db and to cache the nametrie
"""
def __init__(self, api):
self._api = api
def start(self):
def _resolve_claims(claimtrie):
claims = [i for i in claimtrie if 'txid' in i.keys()]
r = defer.DeferredList([self._api._resolve_name(claim['name'], force_refresh=True) for claim in claims], consumeErrors=True)
return r
def _restart_lbry_files():
def _restart_lbry_file(lbry_file):
return lbry_file.restore()
r = defer.DeferredList([_restart_lbry_file(lbry_file) for lbry_file in self._api.lbry_file_manager.lbry_files if not lbry_file.txid], consumeErrors=True)
r.callback(None)
return r
d = self._api.session.wallet.get_nametrie()
d.addCallback(_resolve_claims)
d.addCallback(lambda _: _restart_lbry_files())
def run(api):
refresher = migrator(api)
refresher.start()