cache name claim info
-cache name claim info for an hour rather than looking it up each time it’s required -add default thumbnail to search results
This commit is contained in:
parent
dfaf51a432
commit
591634f175
3 changed files with 102 additions and 76 deletions
|
@ -40,4 +40,4 @@ DEFAULT_TIMEOUT = 30
|
||||||
DEFAULT_MAX_SEARCH_RESULTS = 25
|
DEFAULT_MAX_SEARCH_RESULTS = 25
|
||||||
DEFAULT_MAX_KEY_FEE = 100.0
|
DEFAULT_MAX_KEY_FEE = 100.0
|
||||||
DEFAULT_SEARCH_TIMEOUT = 3.0
|
DEFAULT_SEARCH_TIMEOUT = 3.0
|
||||||
|
DEFAULT_CACHE_TIME = 3600
|
|
@ -36,7 +36,7 @@ from lbrynet.lbrynet_daemon.LBRYPublisher import Publisher
|
||||||
from lbrynet.core.utils import generate_id
|
from lbrynet.core.utils import generate_id
|
||||||
from lbrynet.lbrynet_console.LBRYSettings import LBRYSettings
|
from lbrynet.lbrynet_console.LBRYSettings import LBRYSettings
|
||||||
from lbrynet.conf import MIN_BLOB_DATA_PAYMENT_RATE, DEFAULT_MAX_SEARCH_RESULTS, KNOWN_DHT_NODES, DEFAULT_MAX_KEY_FEE, \
|
from lbrynet.conf import MIN_BLOB_DATA_PAYMENT_RATE, DEFAULT_MAX_SEARCH_RESULTS, KNOWN_DHT_NODES, DEFAULT_MAX_KEY_FEE, \
|
||||||
DEFAULT_WALLET, DEFAULT_SEARCH_TIMEOUT
|
DEFAULT_WALLET, DEFAULT_SEARCH_TIMEOUT, DEFAULT_CACHE_TIME
|
||||||
from lbrynet.conf import API_CONNECTION_STRING, API_PORT, API_ADDRESS, DEFAULT_TIMEOUT, UI_ADDRESS
|
from lbrynet.conf import API_CONNECTION_STRING, API_PORT, API_ADDRESS, DEFAULT_TIMEOUT, UI_ADDRESS
|
||||||
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob
|
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob
|
||||||
from lbrynet.core.Session import LBRYSession
|
from lbrynet.core.Session import LBRYSession
|
||||||
|
@ -83,12 +83,26 @@ STARTUP_STAGES = [
|
||||||
(STARTED_CODE, 'Started lbrynet')
|
(STARTED_CODE, 'Started lbrynet')
|
||||||
]
|
]
|
||||||
|
|
||||||
|
DOWNLOAD_METADATA_CODE = 'downloading_metadata'
|
||||||
|
DOWNLOAD_TIMEOUT_CODE = 'timeout'
|
||||||
|
DOWNLOAD_RUNNING_CODE = 'running'
|
||||||
|
DOWNLOAD_STOPPED_CODE = 'stopped'
|
||||||
|
STREAM_STAGES = [
|
||||||
|
(INITIALIZING_CODE, 'Initializing...'),
|
||||||
|
(DOWNLOAD_METADATA_CODE, 'Downloading metadata'),
|
||||||
|
(DOWNLOAD_RUNNING_CODE, 'Started stream'),
|
||||||
|
(DOWNLOAD_STOPPED_CODE, 'Paused stream'),
|
||||||
|
(DOWNLOAD_TIMEOUT_CODE, 'Stream timed out')
|
||||||
|
]
|
||||||
|
|
||||||
CONNECT_CODE_VERSION_CHECK = 'version_check'
|
CONNECT_CODE_VERSION_CHECK = 'version_check'
|
||||||
CONNECT_CODE_NETWORK = 'network_connection'
|
CONNECT_CODE_NETWORK = 'network_connection'
|
||||||
CONNECT_CODE_WALLET = 'wallet_catchup_lag'
|
CONNECT_CODE_WALLET = 'wallet_catchup_lag'
|
||||||
CONNECTION_PROBLEM_CODES = [(CONNECT_CODE_VERSION_CHECK, "There was a problem checking for updates on github"),
|
CONNECTION_PROBLEM_CODES = [
|
||||||
(CONNECT_CODE_NETWORK, "Your internet connection appears to have been interrupted"),
|
(CONNECT_CODE_VERSION_CHECK, "There was a problem checking for updates on github"),
|
||||||
(CONNECT_CODE_WALLET, "Synchronization with the blockchain is lagging... if this continues try restarting LBRY")]
|
(CONNECT_CODE_NETWORK, "Your internet connection appears to have been interrupted"),
|
||||||
|
(CONNECT_CODE_WALLET, "Synchronization with the blockchain is lagging... if this continues try restarting LBRY")
|
||||||
|
]
|
||||||
|
|
||||||
ALLOWED_DURING_STARTUP = ['is_running', 'is_first_run',
|
ALLOWED_DURING_STARTUP = ['is_running', 'is_first_run',
|
||||||
'get_time_behind_blockchain', 'stop',
|
'get_time_behind_blockchain', 'stop',
|
||||||
|
@ -218,7 +232,8 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
'dht_node_port': 4444,
|
'dht_node_port': 4444,
|
||||||
'use_upnp': True,
|
'use_upnp': True,
|
||||||
'start_lbrycrdd': True,
|
'start_lbrycrdd': True,
|
||||||
'requested_first_run_credits': False
|
'requested_first_run_credits': False,
|
||||||
|
'cache_time': DEFAULT_CACHE_TIME
|
||||||
}
|
}
|
||||||
|
|
||||||
if os.path.isfile(self.daemon_conf):
|
if os.path.isfile(self.daemon_conf):
|
||||||
|
@ -272,6 +287,15 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
self.use_upnp = self.session_settings['use_upnp']
|
self.use_upnp = self.session_settings['use_upnp']
|
||||||
self.start_lbrycrdd = self.session_settings['start_lbrycrdd']
|
self.start_lbrycrdd = self.session_settings['start_lbrycrdd']
|
||||||
self.requested_first_run_credits = self.session_settings['requested_first_run_credits']
|
self.requested_first_run_credits = self.session_settings['requested_first_run_credits']
|
||||||
|
self.cache_time = self.session_settings['cache_time']
|
||||||
|
|
||||||
|
if os.path.isfile(os.path.join(self.db_dir, "stream_info_cache.json")):
|
||||||
|
f = open(os.path.join(self.db_dir, "stream_info_cache.json"), "r")
|
||||||
|
self.name_cache = json.loads(f.read())
|
||||||
|
f.close()
|
||||||
|
log.info("Loaded claim info cache")
|
||||||
|
else:
|
||||||
|
self.name_cache = {}
|
||||||
|
|
||||||
def render(self, request):
|
def render(self, request):
|
||||||
request.content.seek(0, 0)
|
request.content.seek(0, 0)
|
||||||
|
@ -341,12 +365,11 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
|
|
||||||
def setup(self):
|
def setup(self):
|
||||||
def _log_starting_vals():
|
def _log_starting_vals():
|
||||||
r = json.dumps(self._get_lbry_files())
|
d = self._get_lbry_files()
|
||||||
|
d.addCallback(lambda r: json.dumps([d[1] for d in r]))
|
||||||
log.info("LBRY Files: " + r)
|
d.addCallback(lambda r: log.info("LBRY Files: " + r))
|
||||||
log.info("Starting balance: " + str(self.session.wallet.wallet_balance))
|
d.addCallback(lambda _: log.info("Starting balance: " + str(self.session.wallet.wallet_balance)))
|
||||||
|
return d
|
||||||
return defer.succeed(None)
|
|
||||||
|
|
||||||
def _announce_startup():
|
def _announce_startup():
|
||||||
def _announce():
|
def _announce():
|
||||||
|
@ -630,7 +653,16 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
self.session_settings['download_timeout'] = settings['download_timeout']
|
self.session_settings['download_timeout'] = settings['download_timeout']
|
||||||
else:
|
else:
|
||||||
return defer.fail()
|
return defer.fail()
|
||||||
|
elif k == 'search_timeout':
|
||||||
|
if type(settings['search_timeout']) is float:
|
||||||
|
self.session_settings['search_timeout'] = settings['search_timeout']
|
||||||
|
else:
|
||||||
|
return defer.fail()
|
||||||
|
elif k == 'cache_time':
|
||||||
|
if type(settings['cache_time']) is int:
|
||||||
|
self.session_settings['cache_time'] = settings['cache_time']
|
||||||
|
else:
|
||||||
|
return defer.fail()
|
||||||
self.run_on_startup = self.session_settings['run_on_startup']
|
self.run_on_startup = self.session_settings['run_on_startup']
|
||||||
self.data_rate = self.session_settings['data_rate']
|
self.data_rate = self.session_settings['data_rate']
|
||||||
self.max_key_fee = self.session_settings['max_key_fee']
|
self.max_key_fee = self.session_settings['max_key_fee']
|
||||||
|
@ -639,6 +671,8 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
self.max_download = self.session_settings['max_download']
|
self.max_download = self.session_settings['max_download']
|
||||||
self.upload_log = self.session_settings['upload_log']
|
self.upload_log = self.session_settings['upload_log']
|
||||||
self.download_timeout = self.session_settings['download_timeout']
|
self.download_timeout = self.session_settings['download_timeout']
|
||||||
|
self.search_timeout = self.session_settings['search_timeout']
|
||||||
|
self.cache_time = self.session_settings['cache_time']
|
||||||
|
|
||||||
f = open(self.daemon_conf, "w")
|
f = open(self.daemon_conf, "w")
|
||||||
f.write(json.dumps(self.session_settings))
|
f.write(json.dumps(self.session_settings))
|
||||||
|
@ -879,7 +913,6 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
stream_hash = stream_info['stream_hash']
|
stream_hash = stream_info['stream_hash']
|
||||||
if isinstance(stream_hash, dict):
|
if isinstance(stream_hash, dict):
|
||||||
stream_hash = stream_hash['sd_hash']
|
stream_hash = stream_hash['sd_hash']
|
||||||
log.info("[" + str(datetime.now()) + "] Resolved lbry://" + name + " to sd hash: " + stream_hash)
|
|
||||||
d = self._get_lbry_file_by_sd_hash(stream_hash)
|
d = self._get_lbry_file_by_sd_hash(stream_hash)
|
||||||
def _add_results(l):
|
def _add_results(l):
|
||||||
return defer.succeed((stream_info, l))
|
return defer.succeed((stream_info, l))
|
||||||
|
@ -902,25 +935,41 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
return d
|
return d
|
||||||
|
|
||||||
self.waiting_on[name] = True
|
self.waiting_on[name] = True
|
||||||
d = self.session.wallet.get_stream_info_for_name(name)
|
d = self._resolve_name(name)
|
||||||
d.addCallback(_setup_stream)
|
d.addCallback(_setup_stream)
|
||||||
d.addCallback(lambda (stream_info, lbry_file): _get_stream(stream_info) if not lbry_file else _disp_file(lbry_file))
|
d.addCallback(lambda (stream_info, lbry_file): _get_stream(stream_info) if not lbry_file else _disp_file(lbry_file))
|
||||||
d.addCallback(_remove_from_wait)
|
d.addCallback(_remove_from_wait)
|
||||||
|
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
def _get_long_count_timestamp(self):
|
||||||
|
return int((datetime.utcnow() - (datetime(year=2012, month=12, day=21))).total_seconds())
|
||||||
|
|
||||||
|
def _update_claim_cache(self):
|
||||||
|
f = open(os.path.join(self.db_dir, "stream_info_cache.json"), "w")
|
||||||
|
f.write(json.dumps(self.name_cache))
|
||||||
|
f.close()
|
||||||
|
return defer.succeed(True)
|
||||||
|
|
||||||
def _resolve_name(self, name):
|
def _resolve_name(self, name):
|
||||||
d = defer.Deferred()
|
def _cache_stream_info(stream_info):
|
||||||
d.addCallback(lambda _: self.session.wallet.get_stream_info_for_name(name))
|
self.name_cache[name] = {'claim_metadata': stream_info, 'timestamp': self._get_long_count_timestamp()}
|
||||||
d.addErrback(lambda _: defer.fail(UnknownNameError))
|
d = self._update_claim_cache()
|
||||||
|
d.addCallback(lambda _: self.name_cache[name]['claim_metadata'])
|
||||||
|
return d
|
||||||
|
|
||||||
return d
|
if name in self.name_cache.keys():
|
||||||
|
if (self._get_long_count_timestamp() - self.name_cache[name]['timestamp']) < self.cache_time:
|
||||||
def _resolve_name_wc(self, name):
|
log.info("[" + str(datetime.now()) + "] Returning cached stream info for lbry://" + name)
|
||||||
d = defer.Deferred()
|
d = defer.succeed(self.name_cache[name]['claim_metadata'])
|
||||||
d.addCallback(lambda _: self.session.wallet.get_stream_info_for_name(name))
|
else:
|
||||||
d.addErrback(lambda _: defer.fail(UnknownNameError))
|
log.info("[" + str(datetime.now()) + "] Refreshing stream info for lbry://" + name)
|
||||||
d.callback(None)
|
d = self.session.wallet.get_stream_info_for_name(name)
|
||||||
|
d.addCallbacks(_cache_stream_info, lambda _: defer.fail(UnknownNameError))
|
||||||
|
else:
|
||||||
|
log.info("[" + str(datetime.now()) + "] Resolving stream info for lbry://" + name)
|
||||||
|
d = self.session.wallet.get_stream_info_for_name(name)
|
||||||
|
d.addCallbacks(_cache_stream_info, lambda _: defer.fail(UnknownNameError))
|
||||||
|
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
@ -954,11 +1003,11 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
|
|
||||||
def _add_key_fee(data_cost):
|
def _add_key_fee(data_cost):
|
||||||
d = self.session.wallet.get_stream_info_for_name(name)
|
d = self._resolve_name(name)
|
||||||
d.addCallback(lambda info: data_cost + info['key_fee'] if 'key_fee' in info.keys() else data_cost)
|
d.addCallback(lambda info: data_cost + info['key_fee'] if 'key_fee' in info.keys() else data_cost)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
d = self.session.wallet.get_stream_info_for_name(name)
|
d = self._resolve_name(name)
|
||||||
d.addCallback(lambda info: info['stream_hash'] if isinstance(info['stream_hash'], str)
|
d.addCallback(lambda info: info['stream_hash'] if isinstance(info['stream_hash'], str)
|
||||||
else info['stream_hash']['sd_hash'])
|
else info['stream_hash']['sd_hash'])
|
||||||
d.addCallback(lambda sd_hash: download_sd_blob(self.session, sd_hash,
|
d.addCallback(lambda sd_hash: download_sd_blob(self.session, sd_hash,
|
||||||
|
@ -972,25 +1021,6 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
|
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def _get_lbry_files(self):
|
|
||||||
r = []
|
|
||||||
for f in self.lbry_file_manager.lbry_files:
|
|
||||||
if f.key:
|
|
||||||
t = {'completed': f.completed, 'file_name': f.file_name, 'key': binascii.b2a_hex(f.key),
|
|
||||||
'points_paid': f.points_paid, 'stopped': f.stopped, 'stream_hash': f.stream_hash,
|
|
||||||
'stream_name': f.stream_name, 'suggested_file_name': f.suggested_file_name,
|
|
||||||
'upload_allowed': f.upload_allowed, 'sd_hash': f.sd_hash}
|
|
||||||
|
|
||||||
else:
|
|
||||||
t = {'completed': f.completed, 'file_name': f.file_name, 'key': None,
|
|
||||||
'points_paid': f.points_paid,
|
|
||||||
'stopped': f.stopped, 'stream_hash': f.stream_hash, 'stream_name': f.stream_name,
|
|
||||||
'suggested_file_name': f.suggested_file_name, 'upload_allowed': f.upload_allowed,
|
|
||||||
'sd_hash': f.sd_hash}
|
|
||||||
|
|
||||||
r.append(t)
|
|
||||||
return r
|
|
||||||
|
|
||||||
def _get_lbry_file_by_uri(self, name):
|
def _get_lbry_file_by_uri(self, name):
|
||||||
def _get_file(stream_info):
|
def _get_file(stream_info):
|
||||||
if isinstance(stream_info['stream_hash'], str) or isinstance(stream_info['stream_hash'], unicode):
|
if isinstance(stream_info['stream_hash'], str) or isinstance(stream_info['stream_hash'], unicode):
|
||||||
|
@ -1003,7 +1033,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
return defer.succeed(l)
|
return defer.succeed(l)
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
|
|
||||||
d = self.session.wallet.get_stream_info_for_name(name)
|
d = self._resolve_name(name)
|
||||||
d.addCallback(_get_file)
|
d.addCallback(_get_file)
|
||||||
|
|
||||||
return d
|
return d
|
||||||
|
@ -1022,26 +1052,22 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
|
|
||||||
def _get_lbry_file(self, search_by, val, return_json=True):
|
def _get_lbry_file(self, search_by, val, return_json=True):
|
||||||
def _log_get_lbry_file(f):
|
def _log_get_lbry_file(f):
|
||||||
if f:
|
if f and val:
|
||||||
log.info("Found LBRY file for " + search_by + ": " + val)
|
log.info("Found LBRY file for " + search_by + ": " + val)
|
||||||
else:
|
elif val:
|
||||||
log.info("Did not find LBRY file for " + search_by + ": " + val)
|
log.info("Did not find LBRY file for " + search_by + ": " + val)
|
||||||
return f
|
return f
|
||||||
|
|
||||||
def _get_json_for_return(f):
|
def _get_json_for_return(f):
|
||||||
if f:
|
if f:
|
||||||
if f.key:
|
if f.key:
|
||||||
t = {'completed': f.completed, 'file_name': f.file_name, 'key': binascii.b2a_hex(f.key),
|
key = binascii.b2a_hex(f.key)
|
||||||
'points_paid': f.points_paid, 'stopped': f.stopped, 'stream_hash': f.stream_hash,
|
|
||||||
'stream_name': f.stream_name, 'suggested_file_name': f.suggested_file_name,
|
|
||||||
'upload_allowed': f.upload_allowed, 'sd_hash': f.sd_hash}
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
t = {'completed': f.completed, 'file_name': f.file_name, 'key': None,
|
key = None
|
||||||
'points_paid': f.points_paid,
|
t = {'completed': f.completed, 'file_name': f.file_name, 'key': key,
|
||||||
'stopped': f.stopped, 'stream_hash': f.stream_hash, 'stream_name': f.stream_name,
|
'points_paid': f.points_paid, 'stopped': f.stopped, 'stream_hash': f.stream_hash,
|
||||||
'suggested_file_name': f.suggested_file_name, 'upload_allowed': f.upload_allowed,
|
'stream_name': f.stream_name, 'suggested_file_name': f.suggested_file_name,
|
||||||
'sd_hash': f.sd_hash}
|
'upload_allowed': f.upload_allowed, 'sd_hash': f.sd_hash}
|
||||||
return t
|
return t
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
@ -1057,6 +1083,10 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
d.addCallback(_get_json_for_return)
|
d.addCallback(_get_json_for_return)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
def _get_lbry_files(self):
|
||||||
|
d = defer.DeferredList([self._get_lbry_file('sd_hash', l.sd_hash) for l in self.lbry_file_manager.lbry_files])
|
||||||
|
return d
|
||||||
|
|
||||||
def _log_to_slack(self, msg):
|
def _log_to_slack(self, msg):
|
||||||
URL = "https://hooks.slack.com/services/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"
|
URL = "https://hooks.slack.com/services/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"
|
||||||
msg = platform.platform() + ": " + base58.b58encode(self.lbryid)[:20] + ", " + msg
|
msg = platform.platform() + ": " + base58.b58encode(self.lbryid)[:20] + ", " + msg
|
||||||
|
@ -1351,9 +1381,11 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
'sd_hash': string
|
'sd_hash': string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
r = self._get_lbry_files()
|
d = self._get_lbry_files()
|
||||||
log.info("[" + str(datetime.now()) + "] Get LBRY files")
|
d.addCallback(lambda r: [d[1] for d in r])
|
||||||
return self._render_response(r, OK_CODE)
|
d.addCallback(lambda r: self._render_response(r, OK_CODE) if len(r) else self._render_response(False, OK_CODE))
|
||||||
|
|
||||||
|
return d
|
||||||
|
|
||||||
def jsonrpc_get_lbry_file(self, p):
|
def jsonrpc_get_lbry_file(self, p):
|
||||||
"""
|
"""
|
||||||
|
@ -1399,18 +1431,8 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
else:
|
else:
|
||||||
return self._render_response(None, BAD_REQUEST)
|
return self._render_response(None, BAD_REQUEST)
|
||||||
|
|
||||||
def _disp(info):
|
|
||||||
stream_hash = info['stream_hash']
|
|
||||||
if isinstance(stream_hash, dict):
|
|
||||||
stream_hash = stream_hash['sd_hash']
|
|
||||||
|
|
||||||
log.info("[" + str(datetime.now()) + "] Resolved info: " + stream_hash)
|
|
||||||
|
|
||||||
return self._render_response(info, OK_CODE)
|
|
||||||
|
|
||||||
d = self._resolve_name(name)
|
d = self._resolve_name(name)
|
||||||
d.addCallbacks(_disp, lambda _: server.failure)
|
d.addCallbacks(lambda info: self._render_response(info, OK_CODE), lambda _: server.failure)
|
||||||
d.callback(None)
|
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def jsonrpc_get(self, p):
|
def jsonrpc_get(self, p):
|
||||||
|
@ -1525,7 +1547,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
ds = []
|
ds = []
|
||||||
for claim in claims:
|
for claim in claims:
|
||||||
d1 = defer.succeed(claim)
|
d1 = defer.succeed(claim)
|
||||||
d2 = self._resolve_name_wc(claim['name'])
|
d2 = self._resolve_name(claim['name'])
|
||||||
d3 = self._get_est_cost(claim['name'])
|
d3 = self._get_est_cost(claim['name'])
|
||||||
dl = defer.DeferredList([d1, d2, d3], consumeErrors=True)
|
dl = defer.DeferredList([d1, d2, d3], consumeErrors=True)
|
||||||
ds.append(dl)
|
ds.append(dl)
|
||||||
|
@ -1542,6 +1564,8 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
||||||
del r[1]['name']
|
del r[1]['name']
|
||||||
t.update(r[1])
|
t.update(r[1])
|
||||||
t['cost_est'] = r[2]
|
t['cost_est'] = r[2]
|
||||||
|
if not 'thumbnail' in t.keys():
|
||||||
|
t['thumbnail'] = "img/Free-speech-flag.svg"
|
||||||
consolidated_results.append(t)
|
consolidated_results.append(t)
|
||||||
# log.info(str(t))
|
# log.info(str(t))
|
||||||
return consolidated_results
|
return consolidated_results
|
||||||
|
|
|
@ -68,12 +68,12 @@ class LBRYFileProducer(StaticProducer):
|
||||||
def start(self):
|
def start(self):
|
||||||
d = self._set_size()
|
d = self._set_size()
|
||||||
self.fileObject.seek(0)
|
self.fileObject.seek(0)
|
||||||
self.updater.start(5)
|
self.updater.start(1)
|
||||||
|
|
||||||
def _set_size(self):
|
def _set_size(self):
|
||||||
def _set(size):
|
def _set(size):
|
||||||
self.request.setHeader('content-length', str(size))
|
self.request.setHeader('content-length', str(size))
|
||||||
self.request.setHeader('content-type', ' application/octet-stream')
|
self.request.setHeader('content-type', 'application/octet-stream')
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
|
|
||||||
d = self.stream.get_total_bytes()
|
d = self.stream.get_total_bytes()
|
||||||
|
@ -85,6 +85,7 @@ class LBRYFileProducer(StaticProducer):
|
||||||
self.fileObject.seek(self.fileObject.tell())
|
self.fileObject.seek(self.fileObject.tell())
|
||||||
data = self.fileObject.read()
|
data = self.fileObject.read()
|
||||||
self.total_bytes += len(data)
|
self.total_bytes += len(data)
|
||||||
|
log.info(str(self.total_bytes))
|
||||||
|
|
||||||
if data:
|
if data:
|
||||||
self.request.write(data)
|
self.request.write(data)
|
||||||
|
@ -93,6 +94,7 @@ class LBRYFileProducer(StaticProducer):
|
||||||
def _check_status(stream_status):
|
def _check_status(stream_status):
|
||||||
if stream_status.running_status == "completed":
|
if stream_status.running_status == "completed":
|
||||||
self.stopProducing()
|
self.stopProducing()
|
||||||
|
|
||||||
return defer.succeed(None)
|
return defer.succeed(None)
|
||||||
|
|
||||||
d = _write_new_data_to_request()
|
d = _write_new_data_to_request()
|
||||||
|
|
Loading…
Add table
Reference in a new issue