forked from LBRYCommunity/lbry-sdk
Merge branch 'daemon_fixes'
* daemon_fixes: fix tests no datas fix daemon shutdown dont ignore SIGTERM/SIGINT when already shutting down removed old /view and /upload endpoints, moved api to root path fixed shutdown messages build from setuptools entry_point
This commit is contained in:
commit
350386ba21
19 changed files with 119 additions and 260 deletions
|
@ -20,9 +20,12 @@ at anytime.
|
|||
### Fixed
|
||||
*
|
||||
*
|
||||
* Fixed incorrect formatting of "amount" fields
|
||||
* Fixed handling of SIGINT, SIGTERM.
|
||||
* Fixed shutdown sequence
|
||||
|
||||
### Deprecated
|
||||
*
|
||||
* The API will no longer be served at the /lbryapi path. It will now be at the root.
|
||||
*
|
||||
|
||||
### Changed
|
||||
|
@ -45,6 +48,8 @@ at anytime.
|
|||
|
||||
### Removed
|
||||
* Removed TempBlobManager
|
||||
* Removed old /view and /upload API paths
|
||||
*
|
||||
|
||||
|
||||
## [0.14.2] - 2017-07-24
|
||||
|
|
|
@ -2,13 +2,14 @@
|
|||
import platform
|
||||
import os
|
||||
|
||||
|
||||
dir = 'build';
|
||||
cwd = os.getcwd()
|
||||
if os.path.basename(cwd) != dir:
|
||||
raise Exception('pyinstaller build needs to be run from the ' + dir + ' directory')
|
||||
repo_base = os.path.abspath(os.path.join(cwd, '..'))
|
||||
|
||||
execfile(os.path.join(cwd, "entrypoint.py")) # ghetto import
|
||||
|
||||
|
||||
system = platform.system()
|
||||
if system == 'Darwin':
|
||||
|
@ -21,28 +22,10 @@ else:
|
|||
print 'Warning: System {} has no icons'.format(system)
|
||||
icns = None
|
||||
|
||||
block_cipher = None
|
||||
|
||||
a = Entrypoint('lbrynet', 'console_scripts', 'lbrynet-cli', pathex=[cwd])
|
||||
|
||||
a = Analysis(
|
||||
['cli.py'],
|
||||
pathex=[cwd],
|
||||
binaries=None,
|
||||
datas=[],
|
||||
hiddenimports=[],
|
||||
hookspath=[],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher
|
||||
)
|
||||
|
||||
pyz = PYZ(
|
||||
a.pure,
|
||||
a.zipped_data,
|
||||
cipher=block_cipher
|
||||
)
|
||||
pyz = PYZ(a.pure, a.zipped_data)
|
||||
|
||||
exe = EXE(
|
||||
pyz,
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
from lbrynet.daemon import DaemonCLI
|
||||
import logging
|
||||
|
||||
logging.basicConfig()
|
||||
|
||||
if __name__ == '__main__':
|
||||
DaemonCLI.main()
|
|
@ -4,13 +4,14 @@ import os
|
|||
|
||||
import lbryum
|
||||
|
||||
|
||||
dir = 'build';
|
||||
cwd = os.getcwd()
|
||||
if os.path.basename(cwd) != dir:
|
||||
raise Exception('pyinstaller build needs to be run from the ' + dir + ' directory')
|
||||
repo_base = os.path.abspath(os.path.join(cwd, '..'))
|
||||
|
||||
execfile(os.path.join(cwd, "entrypoint.py")) # ghetto import
|
||||
|
||||
|
||||
system = platform.system()
|
||||
if system == 'Darwin':
|
||||
|
@ -24,44 +25,15 @@ else:
|
|||
icns = None
|
||||
|
||||
|
||||
block_cipher = None
|
||||
|
||||
|
||||
languages = (
|
||||
'chinese_simplified.txt', 'japanese.txt', 'spanish.txt',
|
||||
'english.txt', 'portuguese.txt'
|
||||
)
|
||||
|
||||
|
||||
datas = [
|
||||
(
|
||||
os.path.join(os.path.dirname(lbryum.__file__), 'wordlist', language),
|
||||
'lbryum/wordlist'
|
||||
)
|
||||
for language in languages
|
||||
(os.path.join(os.path.dirname(lbryum.__file__), 'wordlist', language + '.txt'), 'lbryum/wordlist')
|
||||
for language in ('chinese_simplified', 'japanese', 'spanish','english', 'portuguese')
|
||||
]
|
||||
|
||||
|
||||
a = Analysis(
|
||||
['daemon.py'],
|
||||
pathex=[cwd],
|
||||
binaries=None,
|
||||
datas=datas,
|
||||
hiddenimports=[],
|
||||
hookspath=[],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher
|
||||
)
|
||||
|
||||
|
||||
pyz = PYZ(
|
||||
a.pure, a.zipped_data,
|
||||
cipher=block_cipher
|
||||
)
|
||||
a = Entrypoint('lbrynet', 'console_scripts', 'lbrynet-daemon', pathex=[cwd], datas=datas)
|
||||
|
||||
pyz = PYZ(a.pure, a.zipped_data)
|
||||
|
||||
exe = EXE(
|
||||
pyz,
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
from lbrynet.daemon import DaemonControl
|
||||
|
||||
if __name__ == '__main__':
|
||||
DaemonControl.start()
|
47
build/entrypoint.py
Normal file
47
build/entrypoint.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
# https://github.com/pyinstaller/pyinstaller/wiki/Recipe-Setuptools-Entry-Point
|
||||
def Entrypoint(dist, group, name,
|
||||
scripts=None, pathex=None, binaries=None, datas=None,
|
||||
hiddenimports=None, hookspath=None, excludes=None, runtime_hooks=None,
|
||||
cipher=None, win_no_prefer_redirects=False, win_private_assemblies=False):
|
||||
import pkg_resources
|
||||
|
||||
# get toplevel packages of distribution from metadata
|
||||
def get_toplevel(dist):
|
||||
distribution = pkg_resources.get_distribution(dist)
|
||||
if distribution.has_metadata('top_level.txt'):
|
||||
return list(distribution.get_metadata('top_level.txt').split())
|
||||
else:
|
||||
return []
|
||||
|
||||
hiddenimports = hiddenimports or []
|
||||
packages = []
|
||||
for distribution in hiddenimports:
|
||||
packages += get_toplevel(distribution)
|
||||
|
||||
scripts = scripts or []
|
||||
pathex = pathex or []
|
||||
# get the entry point
|
||||
ep = pkg_resources.get_entry_info(dist, group, name)
|
||||
# insert path of the egg at the verify front of the search path
|
||||
pathex = [ep.dist.location] + pathex
|
||||
# script name must not be a valid module name to avoid name clashes on import
|
||||
script_path = os.path.join(workpath, name + '-script.py')
|
||||
print "creating script for entry point", dist, group, name
|
||||
with open(script_path, 'w') as fh:
|
||||
fh.write("import {0}\n".format(ep.module_name))
|
||||
fh.write("{0}.{1}()\n".format(ep.module_name, '.'.join(ep.attrs)))
|
||||
for package in packages:
|
||||
fh.write("import {0}\n".format(package))
|
||||
|
||||
return Analysis([script_path] + scripts,
|
||||
pathex=pathex,
|
||||
binaries=binaries,
|
||||
datas=datas,
|
||||
hiddenimports=hiddenimports,
|
||||
hookspath=hookspath,
|
||||
excludes=excludes,
|
||||
runtime_hooks=runtime_hooks,
|
||||
cipher=cipher,
|
||||
win_no_prefer_redirects=win_no_prefer_redirects,
|
||||
win_private_assemblies=win_private_assemblies
|
||||
)
|
|
@ -233,7 +233,7 @@ class Api(object):
|
|||
def track(self, event):
|
||||
"""Send a single tracking event"""
|
||||
if not self._enabled:
|
||||
return defer.succeed('analytics disabled')
|
||||
return defer.succeed('Analytics disabled')
|
||||
|
||||
def _log_error(failure, event):
|
||||
log.warning('Failed to send track event. %s (%s)', failure.getTraceback(), str(event))
|
||||
|
|
|
@ -27,12 +27,12 @@ class BlobAvailabilityTracker(object):
|
|||
self._check_mine = LoopingCall(self._update_mine)
|
||||
|
||||
def start(self):
|
||||
log.info("Starting %s", self)
|
||||
log.info("Starting blob availability tracker.")
|
||||
self._check_popular.start(600)
|
||||
self._check_mine.start(600)
|
||||
|
||||
def stop(self):
|
||||
log.info("Stopping %s", self)
|
||||
log.info("Stopping blob availability tracker.")
|
||||
if self._check_popular.running:
|
||||
self._check_popular.stop()
|
||||
if self._check_mine.running:
|
||||
|
|
|
@ -28,12 +28,12 @@ class DiskBlobManager(DHTHashSupplier):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
def setup(self):
|
||||
log.info("Setting up the DiskBlobManager. blob_dir: %s, db_file: %s", str(self.blob_dir),
|
||||
log.info("Starting disk blob manager. blob_dir: %s, db_file: %s", str(self.blob_dir),
|
||||
str(self.db_file))
|
||||
yield self._open_db()
|
||||
|
||||
def stop(self):
|
||||
log.info("Stopping the DiskBlobManager")
|
||||
log.info("Stopping disk blob manager.")
|
||||
self.db_conn.close()
|
||||
return defer.succeed(True)
|
||||
|
||||
|
|
|
@ -69,7 +69,7 @@ class RateLimiter(object):
|
|||
self.protocols = []
|
||||
|
||||
def start(self):
|
||||
log.info("Starting %s", self)
|
||||
log.info("Starting rate limiter.")
|
||||
self.tick_call = task.LoopingCall(self.tick)
|
||||
self.tick_call.start(self.tick_interval)
|
||||
|
||||
|
@ -80,7 +80,7 @@ class RateLimiter(object):
|
|||
self.unthrottle_ul()
|
||||
|
||||
def stop(self):
|
||||
log.info("Stopping %s", self)
|
||||
log.info("Stopping rate limiter.")
|
||||
if self.tick_call is not None:
|
||||
self.tick_call.stop()
|
||||
self.tick_call = None
|
||||
|
|
|
@ -140,7 +140,7 @@ class Session(object):
|
|||
def setup(self):
|
||||
"""Create the blob directory and database if necessary, start all desired services"""
|
||||
|
||||
log.debug("Setting up the lbry session")
|
||||
log.debug("Starting session.")
|
||||
|
||||
if self.lbryid is None:
|
||||
self.lbryid = generate_id()
|
||||
|
@ -169,7 +169,7 @@ class Session(object):
|
|||
|
||||
def shut_down(self):
|
||||
"""Stop all services"""
|
||||
log.info('Shutting down %s', self)
|
||||
log.info('Stopping session.')
|
||||
ds = []
|
||||
if self.blob_tracker is not None:
|
||||
ds.append(defer.maybeDeferred(self.blob_tracker.stop))
|
||||
|
@ -320,7 +320,7 @@ class Session(object):
|
|||
return dl
|
||||
|
||||
def _unset_upnp(self):
|
||||
log.info("Unsetting upnp for %s", self)
|
||||
log.info("Unsetting upnp for session")
|
||||
|
||||
def threaded_unset_upnp():
|
||||
u = miniupnpc.UPnP()
|
||||
|
|
|
@ -448,6 +448,7 @@ class Wallet(object):
|
|||
self._batch_count = 20
|
||||
|
||||
def start(self):
|
||||
log.info("Starting wallet.")
|
||||
def start_manage():
|
||||
self.stopped = False
|
||||
self.manage()
|
||||
|
@ -472,7 +473,7 @@ class Wallet(object):
|
|||
log.error("An error occurred stopping the wallet: %s", err.getTraceback())
|
||||
|
||||
def stop(self):
|
||||
log.info("Stopping %s", self)
|
||||
log.info("Stopping wallet.")
|
||||
self.stopped = True
|
||||
# If self.next_manage_call is None, then manage is currently running or else
|
||||
# start has not been called, so set stopped and do nothing else.
|
||||
|
|
|
@ -25,7 +25,7 @@ class DHTPeerFinder(object):
|
|||
self.next_manage_call = reactor.callLater(60, self.run_manage_loop)
|
||||
|
||||
def stop(self):
|
||||
log.info("Stopping %s", self)
|
||||
log.info("Stopping DHT peer finder.")
|
||||
if self.next_manage_call is not None and self.next_manage_call.active():
|
||||
self.next_manage_call.cancel()
|
||||
self.next_manage_call = None
|
||||
|
|
|
@ -28,7 +28,7 @@ class DHTHashAnnouncer(object):
|
|||
self.next_manage_call = utils.call_later(self.ANNOUNCE_CHECK_INTERVAL, self.run_manage_loop)
|
||||
|
||||
def stop(self):
|
||||
log.info("Stopping %s", self)
|
||||
log.info("Stopping DHT hash announcer.")
|
||||
if self.next_manage_call is not None:
|
||||
self.next_manage_call.cancel()
|
||||
self.next_manage_call = None
|
||||
|
|
|
@ -8,6 +8,7 @@ import urllib
|
|||
import json
|
||||
import textwrap
|
||||
import random
|
||||
import signal
|
||||
|
||||
from twisted.web import server
|
||||
from twisted.internet import defer, threads, error, reactor
|
||||
|
@ -169,7 +170,7 @@ class Daemon(AuthJSONRPCServer):
|
|||
'daemon_stop', 'status', 'version',
|
||||
]
|
||||
|
||||
def __init__(self, root, analytics_manager):
|
||||
def __init__(self, analytics_manager):
|
||||
AuthJSONRPCServer.__init__(self, conf.settings['use_auth_http'])
|
||||
self.db_dir = conf.settings['data_dir']
|
||||
self.download_directory = conf.settings['download_directory']
|
||||
|
@ -346,7 +347,7 @@ class Daemon(AuthJSONRPCServer):
|
|||
try:
|
||||
if self.lbry_server_port is not None:
|
||||
self.lbry_server_port, old_port = None, self.lbry_server_port
|
||||
log.info('Stop listening to %s', old_port)
|
||||
log.info('Stop listening on port %s', old_port.port)
|
||||
return defer.maybeDeferred(old_port.stopListening)
|
||||
else:
|
||||
return defer.succeed(True)
|
||||
|
@ -385,7 +386,15 @@ class Daemon(AuthJSONRPCServer):
|
|||
except OSError:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _already_shutting_down(sig_num, frame):
|
||||
log.info("Already shutting down")
|
||||
|
||||
def _shutdown(self):
|
||||
# ignore INT/TERM signals once shutdown has started
|
||||
signal.signal(signal.SIGINT, self._already_shutting_down)
|
||||
signal.signal(signal.SIGTERM, self._already_shutting_down)
|
||||
|
||||
log.info("Closing lbrynet session")
|
||||
log.info("Status at time of shutdown: " + self.startup_status[0])
|
||||
self.looping_call_manager.shutdown()
|
||||
|
|
|
@ -9,7 +9,6 @@ from jsonrpc.proxy import JSONRPCProxy
|
|||
from lbrynet import analytics
|
||||
from lbrynet import conf
|
||||
from lbrynet.core import utils, system_info
|
||||
from lbrynet.daemon.auth.client import LBRYAPIClient
|
||||
from lbrynet.daemon.DaemonServer import DaemonServer
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -19,18 +18,8 @@ def test_internet_connection():
|
|||
return utils.check_connection()
|
||||
|
||||
|
||||
def stop():
|
||||
conf.initialize_settings()
|
||||
log_support.configure_console()
|
||||
try:
|
||||
LBRYAPIClient.get_client().call('stop')
|
||||
except Exception:
|
||||
log.exception('Failed to stop deamon')
|
||||
else:
|
||||
log.info("Shutting down lbrynet-daemon from command line")
|
||||
|
||||
|
||||
def start():
|
||||
"""The primary entry point for launching the daemon."""
|
||||
conf.initialize_settings()
|
||||
|
||||
parser = argparse.ArgumentParser(description="Launch lbrynet-daemon")
|
||||
|
@ -89,16 +78,15 @@ def start():
|
|||
|
||||
|
||||
def update_settings_from_args(args):
|
||||
cli_settings = {}
|
||||
cli_settings['use_auth_http'] = args.useauth
|
||||
cli_settings['wallet'] = args.wallet
|
||||
conf.settings.update(cli_settings, data_types=(conf.TYPE_CLI,))
|
||||
conf.settings.update({
|
||||
'use_auth_http': args.useauth,
|
||||
'wallet': args.wallet,
|
||||
}, data_types=(conf.TYPE_CLI,))
|
||||
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def start_server_and_listen(use_auth, analytics_manager, max_tries=5):
|
||||
"""The primary entry point for launching the daemon.
|
||||
|
||||
def start_server_and_listen(use_auth, analytics_manager):
|
||||
"""
|
||||
Args:
|
||||
use_auth: set to true to enable http authentication
|
||||
analytics_manager: to send analytics
|
||||
|
@ -109,10 +97,9 @@ def start_server_and_listen(use_auth, analytics_manager, max_tries=5):
|
|||
yield daemon_server.start(use_auth)
|
||||
analytics_manager.send_server_startup_success()
|
||||
except Exception as e:
|
||||
log.exception('Failed to startup')
|
||||
yield daemon_server.stop()
|
||||
log.exception('Failed to start')
|
||||
analytics_manager.send_server_startup_error(str(e))
|
||||
reactor.fireSystemEvent("shutdown")
|
||||
daemon_server.stop()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -1,37 +1,41 @@
|
|||
import logging
|
||||
import os
|
||||
|
||||
from twisted.web import server, guard
|
||||
from twisted.web import server, guard, resource
|
||||
from twisted.internet import defer, reactor, error
|
||||
from twisted.cred import portal
|
||||
|
||||
from lbrynet import conf
|
||||
from lbrynet.daemon.Daemon import Daemon
|
||||
from lbrynet.daemon.Resources import LBRYindex, HostedEncryptedFile, EncryptedFileUpload
|
||||
from lbrynet.daemon.auth.auth import PasswordChecker, HttpPasswordRealm
|
||||
from lbrynet.daemon.auth.util import initialize_api_key_file
|
||||
from lbrynet.daemon.DaemonRequest import DaemonRequest
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IndexResource(resource.Resource):
|
||||
def getChild(self, name, request):
|
||||
request.setHeader('cache-control', 'no-cache, no-store, must-revalidate')
|
||||
request.setHeader('expires', '0')
|
||||
return self if name == '' else resource.Resource.getChild(self, name, request)
|
||||
|
||||
|
||||
class DaemonServer(object):
|
||||
def __init__(self, analytics_manager=None):
|
||||
self._api = None
|
||||
self._daemon = None
|
||||
self.root = None
|
||||
self.server_port = None
|
||||
self.analytics_manager = analytics_manager
|
||||
|
||||
def _setup_server(self, use_auth):
|
||||
ui_path = os.path.join(conf.settings.ensure_data_dir(), "lbry-ui", "active")
|
||||
self.root = LBRYindex(ui_path)
|
||||
self._api = Daemon(self.root, self.analytics_manager)
|
||||
self.root.putChild("view", HostedEncryptedFile(self._api))
|
||||
self.root.putChild("upload", EncryptedFileUpload(self._api))
|
||||
self.root.putChild(conf.settings['API_ADDRESS'], self._api)
|
||||
self.root = IndexResource()
|
||||
self._daemon = Daemon(self.analytics_manager)
|
||||
self.root.putChild("", self._daemon)
|
||||
# TODO: DEPRECATED, remove this and just serve the API at the root
|
||||
self.root.putChild(conf.settings['API_ADDRESS'], self._daemon)
|
||||
|
||||
lbrynet_server = server.Site(get_site_base(use_auth, self.root))
|
||||
lbrynet_server = get_site_base(use_auth, self.root)
|
||||
lbrynet_server.requestFactory = DaemonRequest
|
||||
|
||||
try:
|
||||
|
@ -46,23 +50,22 @@ class DaemonServer(object):
|
|||
@defer.inlineCallbacks
|
||||
def start(self, use_auth):
|
||||
yield self._setup_server(use_auth)
|
||||
yield self._api.setup()
|
||||
yield self._daemon.setup()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def stop(self):
|
||||
if self._api is not None:
|
||||
yield self._api._shutdown()
|
||||
if self.server_port is not None:
|
||||
yield self.server_port.stopListening()
|
||||
if reactor.running:
|
||||
log.info("Stopping the reactor")
|
||||
reactor.fireSystemEvent("shutdown")
|
||||
|
||||
|
||||
def get_site_base(use_auth, root):
|
||||
if use_auth:
|
||||
log.info("Using authenticated API")
|
||||
return create_auth_session(root)
|
||||
root = create_auth_session(root)
|
||||
else:
|
||||
log.info("Using non-authenticated API")
|
||||
return server.Site(root)
|
||||
return server.Site(root)
|
||||
|
||||
|
||||
def create_auth_session(root):
|
||||
|
|
|
@ -1,137 +0,0 @@
|
|||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import json
|
||||
import tempfile
|
||||
|
||||
|
||||
from twisted.web import server, static, resource
|
||||
from twisted.internet import defer, error
|
||||
|
||||
from lbrynet import conf
|
||||
from lbrynet.daemon.FileStreamer import EncryptedFileStreamer
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NoCacheStaticFile(static.File):
|
||||
def _set_no_cache(self, request):
|
||||
request.setHeader('cache-control', 'no-cache, no-store, must-revalidate')
|
||||
request.setHeader('expires', '0')
|
||||
|
||||
def render_GET(self, request):
|
||||
self._set_no_cache(request)
|
||||
return static.File.render_GET(self, request)
|
||||
|
||||
|
||||
class LBRYindex(resource.Resource):
|
||||
def __init__(self, ui_dir):
|
||||
resource.Resource.__init__(self)
|
||||
self.ui_dir = ui_dir
|
||||
|
||||
isLeaf = False
|
||||
|
||||
def _delayed_render(self, request, results):
|
||||
request.write(str(results))
|
||||
request.finish()
|
||||
|
||||
def getChild(self, name, request):
|
||||
request.setHeader('cache-control', 'no-cache, no-store, must-revalidate')
|
||||
request.setHeader('expires', '0')
|
||||
|
||||
if name == '':
|
||||
return self
|
||||
return resource.Resource.getChild(self, name, request)
|
||||
|
||||
def render_GET(self, request):
|
||||
return NoCacheStaticFile(os.path.join(self.ui_dir, "index.html")).render_GET(request)
|
||||
|
||||
|
||||
class HostedEncryptedFile(resource.Resource):
|
||||
def __init__(self, api):
|
||||
self._api = api
|
||||
resource.Resource.__init__(self)
|
||||
|
||||
def _make_stream_producer(self, request, stream):
|
||||
path = os.path.join(self._api.download_directory, stream.file_name)
|
||||
|
||||
producer = EncryptedFileStreamer(request, path, stream, self._api.lbry_file_manager)
|
||||
request.registerProducer(producer, streaming=True)
|
||||
|
||||
d = request.notifyFinish()
|
||||
d.addErrback(self._responseFailed, d)
|
||||
return d
|
||||
|
||||
def is_valid_request_name(self, request):
|
||||
return (
|
||||
request.args['name'][0] != 'lbry' and
|
||||
request.args['name'][0] not in self._api.waiting_on.keys())
|
||||
|
||||
def render_GET(self, request):
|
||||
request.setHeader("Content-Security-Policy", "sandbox")
|
||||
if 'name' in request.args.keys():
|
||||
if self.is_valid_request_name(request):
|
||||
name = request.args['name'][0]
|
||||
d = self._api.jsonrpc_get(name=name)
|
||||
d.addCallback(lambda response: response['stream_hash'])
|
||||
d.addCallback(lambda sd_hash: self._api._get_lbry_file_by_sd_hash(sd_hash))
|
||||
d.addCallback(lambda lbry_file: self._make_stream_producer(request, lbry_file))
|
||||
elif request.args['name'][0] in self._api.waiting_on.keys():
|
||||
request.redirect(
|
||||
conf.settings.get_ui_address() + "/?watch=" + request.args['name'][0]
|
||||
)
|
||||
request.finish()
|
||||
else:
|
||||
request.redirect(conf.settings.get_ui_address())
|
||||
request.finish()
|
||||
return server.NOT_DONE_YET
|
||||
|
||||
def _responseFailed(self, err, call):
|
||||
call.addErrback(lambda err: err.trap(error.ConnectionDone))
|
||||
call.addErrback(lambda err: err.trap(defer.CancelledError))
|
||||
call.addErrback(lambda err: log.info("Error: " + str(err)))
|
||||
call.cancel()
|
||||
|
||||
|
||||
class EncryptedFileUpload(resource.Resource):
|
||||
"""
|
||||
Accepts a file sent via the file upload widget in the web UI, saves
|
||||
it into a temporary dir, and responds with a JSON string containing
|
||||
the path of the newly created file.
|
||||
"""
|
||||
def __init__(self, api):
|
||||
self._api = api
|
||||
|
||||
def render_POST(self, request):
|
||||
origfilename = request.args['file_filename'][0]
|
||||
# Temp file created by request
|
||||
uploaded_file = request.args['file'][0]
|
||||
newpath = move_to_temp_dir_and_restore_filename(uploaded_file, origfilename)
|
||||
self._api.uploaded_temp_files.append(newpath)
|
||||
return json.dumps(newpath)
|
||||
|
||||
|
||||
def move_to_temp_dir_and_restore_filename(uploaded_file, origfilename):
|
||||
newdirpath = tempfile.mkdtemp()
|
||||
newpath = os.path.join(newdirpath, origfilename)
|
||||
if os.name == "nt":
|
||||
# TODO: comment on why shutil.move doesn't work?
|
||||
move_win(uploaded_file.name, newpath)
|
||||
else:
|
||||
shutil.move(uploaded_file.name, newpath)
|
||||
return newpath
|
||||
|
||||
|
||||
def move_win(from_path, to_path):
|
||||
shutil.copy(from_path, to_path)
|
||||
# TODO Still need to remove the file
|
||||
# TODO deal with pylint error in cleaner fashion than this
|
||||
try:
|
||||
from exceptions import WindowsError as win_except
|
||||
except ImportError as e:
|
||||
log.error("This shouldn't happen")
|
||||
win_except = Exception
|
||||
try:
|
||||
os.remove(from_path)
|
||||
except win_except as e:
|
||||
pass
|
|
@ -24,7 +24,7 @@ def get_test_daemon(data_rate=None, generous=True, with_fee=False):
|
|||
'BTCLBC': {'spot': 3.0, 'ts': util.DEFAULT_ISO_TIME + 1},
|
||||
'USDBTC': {'spot': 2.0, 'ts': util.DEFAULT_ISO_TIME + 2}
|
||||
}
|
||||
daemon = LBRYDaemon(None, None)
|
||||
daemon = LBRYDaemon(None)
|
||||
daemon.session = mock.Mock(spec=Session.Session)
|
||||
daemon.session.wallet = mock.Mock(spec=Wallet.LBRYumWallet)
|
||||
market_feeds = [BTCLBCFeed(), USDBTCFeed()]
|
||||
|
|
Loading…
Reference in a new issue