Merge branch 'mk_download_dir'

* mk_download_dir:
  make directories in one place when initializing
  create download directory if it doesn't exist
  Fix restart procedure in DaemonControl
  Revert "Bump version: 0.9.2rc1 → 0.9.3rc1"
  Bump version: 0.9.2rc1 → 0.9.3rc1
  Bump version: 0.9.1 → 0.9.2rc1
  update changelog
  handle not being able to decode claim cache file
This commit is contained in:
Alex Grintsvayg 2017-03-23 11:07:15 -04:00
commit 41fbb1399c
6 changed files with 44 additions and 22 deletions

View file

@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.9.1
current_version = 0.9.2rc1
commit = True
tag = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)((?P<release>[a-z]+)(?P<candidate>\d+))?

View file

@ -9,23 +9,33 @@ at anytime.
## [Unreleased]
### Added
* Add `wallet_list` command
*
*
*
### Changed
* Dont add expected payment to wallet when payment rate is 0
*
*
*
### Fixed
* Fix restart procedure in DaemonControl
*
* Create download directory if it doesn't exist
*
## [0.9.2rc1] - 2017-03-21
### Added
* Add `wallet_list` command
### Changed
* Dont add expected payment to wallet when payment rate is 0
### Fixed
* Fixed descriptor_get
* Fixed jsonrpc_reflect()
* Fixed api help return
* Fixed API command descriptor_get
* Fixed API command transaction_show
*
*
* Handle failure to decode claim cache file
## [0.9.1] - 2017-03-17
### Fixed

View file

@ -1,6 +1,6 @@
import logging
__version__ = "0.9.1"
__version__ = "0.9.2rc1"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())

View file

@ -232,16 +232,11 @@ class Daemon(AuthJSONRPCServer):
conf.settings.update(last_version)
self.db_dir = conf.settings['data_dir']
self.download_directory = conf.settings['download_directory']
self.created_data_dir = False
if not os.path.exists(self.db_dir):
os.mkdir(self.db_dir)
self.created_data_dir = True
if conf.settings['BLOBFILES_DIR'] == "blobfiles":
self.blobfile_dir = os.path.join(self.db_dir, "blobfiles")
else:
log.info("Using non-default blobfiles directory: %s", conf.settings['BLOBFILES_DIR'])
self.blobfile_dir = conf.settings['BLOBFILES_DIR']
self.run_on_startup = conf.settings['run_on_startup']
self.data_rate = conf.settings['data_rate']
self.max_key_fee = conf.settings['max_key_fee']
@ -294,7 +289,7 @@ class Daemon(AuthJSONRPCServer):
}
self.looping_call_manager = LoopingCallManager(calls)
self.sd_identifier = StreamDescriptorIdentifier()
self.stream_info_manager = DBEncryptedFileMetadataManager(self.db_dir)
self.stream_info_manager = None
self.lbry_file_manager = None
@defer.inlineCallbacks
@ -350,9 +345,13 @@ class Daemon(AuthJSONRPCServer):
name_cache_filename = os.path.join(self.db_dir, "stream_info_cache.json")
if os.path.isfile(name_cache_filename):
with open(name_cache_filename, "r") as name_cache:
self.name_cache = json.loads(name_cache.read())
with open(name_cache_filename, "r") as name_cache_file:
name_cache = name_cache_file.read()
try:
self.name_cache = json.loads(name_cache)
log.info("Loaded claim info cache")
except ValueError:
log.warning("Unable to load claim info cache")
def _check_network_connection(self):
self.connected_to_internet = utils.check_connection()
@ -595,7 +594,10 @@ class Daemon(AuthJSONRPCServer):
old_revision = 1
self.startup_status = STARTUP_STAGES[1]
log.info("Loading databases")
if self.created_data_dir:
if not os.path.exists(self.download_directory):
os.mkdir(self.download_directory)
if not os.path.exists(self.db_dir):
os.mkdir(self.db_dir)
self._write_db_revision_file(self.current_db_revision)
log.debug("Created the db revision file: %s", self.db_revision_file)
if not os.path.exists(self.blobfile_dir):
@ -636,6 +638,7 @@ class Daemon(AuthJSONRPCServer):
def _setup_lbry_file_manager(self):
log.info('Starting to setup up file manager')
self.startup_status = STARTUP_STAGES[3]
self.stream_info_manager = DBEncryptedFileMetadataManager(self.db_dir)
yield self.stream_info_manager.setup()
self.lbry_file_manager = EncryptedFileManager(
self.session,

View file

@ -131,9 +131,11 @@ def start_server_and_listen(launchui, use_auth, analytics_manager, max_tries=5):
break
except Exception as e:
log.exception('Failed to startup')
yield daemon_server.stop()
analytics_manager.send_server_startup_error(str(e))
tries += 1
else:
log.warn("Exceeded max tries to start up, stopping")
reactor.callFromThread(reactor.stop)

View file

@ -1,6 +1,5 @@
import logging
import os
import sys
from twisted.web import server, guard
from twisted.internet import defer, reactor, error
@ -19,7 +18,9 @@ log = logging.getLogger(__name__)
class DaemonServer(object):
def __init__(self, analytics_manager=None):
self._api = None
self.root = None
self.server_port = None
self.analytics_manager = analytics_manager
def _setup_server(self, use_auth):
@ -34,11 +35,11 @@ class DaemonServer(object):
lbrynet_server.requestFactory = DaemonRequest
try:
reactor.listenTCP(
self.server_port = reactor.listenTCP(
conf.settings['api_port'], lbrynet_server, interface=conf.settings['api_host'])
except error.CannotListenError:
log.info('Daemon already running, exiting app')
sys.exit(1)
raise
return defer.succeed(True)
@ -47,6 +48,12 @@ class DaemonServer(object):
yield self._setup_server(use_auth)
yield self._api.setup(launch_ui)
@defer.inlineCallbacks
def stop(self):
if self._api is not None:
yield self._api._shutdown()
if self.server_port is not None:
yield self.server_port.stopListening()
def get_site_base(use_auth, root):
if use_auth: