Merge branch 'delete-unused-code'

This commit is contained in:
Jack Robison 2017-12-06 20:47:01 -05:00
commit 3793d7f5b8
No known key found for this signature in database
GPG key ID: 284699E7404E3CFF
2 changed files with 0 additions and 124 deletions

View file

@ -198,7 +198,6 @@ class Daemon(AuthJSONRPCServer):
self.current_db_revision = 5
self.db_revision_file = conf.settings.get_db_revision_filename()
self.session = None
self.uploaded_temp_files = []
self._session_id = conf.settings.get_session_id()
# TODO: this should probably be passed into the daemon, or
# possibly have the entire log upload functionality taken out
@ -275,18 +274,6 @@ class Daemon(AuthJSONRPCServer):
def _check_network_connection(self):
self.connected_to_internet = utils.check_connection()
def _check_lbrynet_connection(self):
def _log_success():
log.info("lbrynet connectivity test passed")
def _log_failure():
log.info("lbrynet connectivity test failed")
wonderfullife_sh = ("6f3af0fa3924be98a54766aa2715d22c6c1509c3f7fa32566df4899"
"a41f3530a9f97b2ecb817fa1dcbf1b30553aefaa7")
d = download_sd_blob(self.session, wonderfullife_sh, self.session.base_payment_rate_manager)
d.addCallbacks(lambda _: _log_success, lambda _: _log_failure)
def _update_connection_status(self):
self.connection_status_code = CONNECTION_STATUS_CONNECTED
@ -400,14 +387,6 @@ class Daemon(AuthJSONRPCServer):
self.query_handlers[query_id] = handler
return defer.succeed(None)
def _clean_up_temp_files(self):
for path in self.uploaded_temp_files:
try:
log.debug('Removing tmp file: %s', path)
os.remove(path)
except OSError:
pass
@staticmethod
def _already_shutting_down(sig_num, frame):
log.info("Already shutting down")
@ -431,8 +410,6 @@ class Daemon(AuthJSONRPCServer):
if self.analytics_manager:
self.analytics_manager.shutdown()
self._clean_up_temp_files()
d = self._stop_server()
d.addErrback(log.fail(), 'Failure while shutting down')
d.addCallback(lambda _: self._stop_reflector())
@ -740,10 +717,6 @@ class Daemon(AuthJSONRPCServer):
claim_out['nout'])
defer.returnValue(claim_out)
def _get_long_count_timestamp(self):
dt = utils.utcnow() - utils.datetime_obj(year=2012, month=12, day=21)
return int(dt.total_seconds())
@defer.inlineCallbacks
def _resolve_name(self, name, force_refresh=False):
"""Resolves a name. Checks the cache first before going out to the blockchain.

View file

@ -1,97 +0,0 @@
import logging
import mimetypes
from zope.interface import implements
from twisted.internet import defer, error, interfaces, abstract, task, reactor
log = logging.getLogger(__name__)
STATUS_FINISHED = 'finished'
class EncryptedFileStreamer(object):
"""
Writes LBRY stream to request; will pause to wait for new data if the file
is downloading.
No support for range requests (some browser players can't handle it when
the full video data isn't available on request).
"""
implements(interfaces.IPushProducer)
bufferSize = abstract.FileDescriptor.bufferSize
# How long to wait between sending blocks (needed because some
# video players freeze up if you try to send data too fast)
stream_interval = 0.005
# How long to wait before checking if new data has been appended to the file
new_data_check_interval = 0.25
def __init__(self, request, path, stream, file_manager):
def _set_content_length_header(length):
self._request.setHeader('content-length', length)
return defer.succeed(None)
self._request = request
self._file = open(path, 'rb')
self._stream = stream
self._file_manager = file_manager
self._headers_sent = False
self._running = True
self._request.setResponseCode(200)
self._request.setHeader('accept-ranges', 'none')
self._request.setHeader('content-type', mimetypes.guess_type(path)[0])
self._request.setHeader("Content-Security-Policy", "sandbox")
self._deferred = stream.get_total_bytes()
self._deferred.addCallback(_set_content_length_header)
self._deferred.addCallback(lambda _: self.resumeProducing())
def _check_for_new_data(self):
def _recurse_or_stop(stream_status):
if not self._running:
return
if stream_status != STATUS_FINISHED:
self._deferred.addCallback(
lambda _: task.deferLater(
reactor, self.new_data_check_interval, self._check_for_new_data))
else:
self.stopProducing()
if not self._running:
return
# Clear the file's EOF indicator by seeking to current position
self._file.seek(self._file.tell())
data = self._file.read(self.bufferSize)
if data:
self._request.write(data)
if self._running: # .write() can trigger a pause
self._deferred.addCallback(
lambda _: task.deferLater(
reactor, self.stream_interval, self._check_for_new_data))
else:
self._deferred.addCallback(
lambda _: self._file_manager.get_lbry_file_status(self._stream))
self._deferred.addCallback(_recurse_or_stop)
def pauseProducing(self):
self._running = False
def resumeProducing(self):
self._running = True
self._check_for_new_data()
def stopProducing(self):
self._running = False
self._file.close()
self._deferred.addErrback(lambda err: err.trap(defer.CancelledError))
self._deferred.addErrback(lambda err: err.trap(error.ConnectionDone))
self._deferred.cancel()
self._request.unregisterProducer()
self._request.finish()