forked from LBRYCommunity/lbry-sdk
fix pylint failures unveiled after astroid pin
This commit is contained in:
parent
3f03a845ec
commit
9f0b1f3e25
6 changed files with 7 additions and 15 deletions
|
@ -44,7 +44,7 @@ class DictDataStore(UserDict):
|
||||||
del self[key]
|
del self[key]
|
||||||
|
|
||||||
def hasPeersForBlob(self, key):
|
def hasPeersForBlob(self, key):
|
||||||
return True if key in self and len(tuple(self.filter_bad_and_expired_peers(key))) else False
|
return bool(key in self and len(tuple(self.filter_bad_and_expired_peers(key))))
|
||||||
|
|
||||||
def addPeerToBlob(self, contact, key, compact_address, lastPublished, originallyPublished, originalPublisherID):
|
def addPeerToBlob(self, contact, key, compact_address, lastPublished, originallyPublished, originalPublisherID):
|
||||||
if key in self:
|
if key in self:
|
||||||
|
|
|
@ -13,14 +13,12 @@ class DecodeError(Exception):
|
||||||
Should be raised by an C{Encoding} implementation if decode operation
|
Should be raised by an C{Encoding} implementation if decode operation
|
||||||
fails
|
fails
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class BucketFull(Exception):
|
class BucketFull(Exception):
|
||||||
"""
|
"""
|
||||||
Raised when the bucket is full
|
Raised when the bucket is full
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class UnknownRemoteException(Exception):
|
class UnknownRemoteException(Exception):
|
||||||
|
|
|
@ -10,7 +10,6 @@ class IDataStore(Interface):
|
||||||
|
|
||||||
def keys(self):
|
def keys(self):
|
||||||
""" Return a list of the keys in this data store """
|
""" Return a list of the keys in this data store """
|
||||||
pass
|
|
||||||
|
|
||||||
def removeExpiredPeers(self):
|
def removeExpiredPeers(self):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -418,7 +418,7 @@ class SQLiteStorage:
|
||||||
|
|
||||||
def check_if_stream_exists(self, stream_hash):
|
def check_if_stream_exists(self, stream_hash):
|
||||||
d = self.db.runQuery("select stream_hash from stream where stream_hash=?", (stream_hash, ))
|
d = self.db.runQuery("select stream_hash from stream where stream_hash=?", (stream_hash, ))
|
||||||
d.addCallback(lambda r: True if len(r) else False)
|
d.addCallback(lambda r: bool(len(r)))
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def get_blob_num_by_hash(self, stream_hash, blob_hash):
|
def get_blob_num_by_hash(self, stream_hash, blob_hash):
|
||||||
|
|
|
@ -29,7 +29,6 @@ class StreamDescriptorReader:
|
||||||
def _get_raw_data(self):
|
def _get_raw_data(self):
|
||||||
"""This method must be overridden by subclasses. It should return a deferred
|
"""This method must be overridden by subclasses. It should return a deferred
|
||||||
which fires with the raw data in the stream descriptor"""
|
which fires with the raw data in the stream descriptor"""
|
||||||
pass
|
|
||||||
|
|
||||||
def get_info(self):
|
def get_info(self):
|
||||||
"""Return the fields contained in the file"""
|
"""Return the fields contained in the file"""
|
||||||
|
@ -89,7 +88,6 @@ class StreamDescriptorWriter:
|
||||||
"""This method must be overridden by subclasses to write raw data to
|
"""This method must be overridden by subclasses to write raw data to
|
||||||
the stream descriptor
|
the stream descriptor
|
||||||
"""
|
"""
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PlainStreamDescriptorWriter(StreamDescriptorWriter):
|
class PlainStreamDescriptorWriter(StreamDescriptorWriter):
|
||||||
|
@ -389,7 +387,7 @@ def validate_descriptor(stream_info):
|
||||||
raise InvalidStreamDescriptorError("Missing '%s'" % (e.args[0]))
|
raise InvalidStreamDescriptorError("Missing '%s'" % (e.args[0]))
|
||||||
if stream_info['blobs'][-1]['length'] != 0:
|
if stream_info['blobs'][-1]['length'] != 0:
|
||||||
raise InvalidStreamDescriptorError("Does not end with a zero-length blob.")
|
raise InvalidStreamDescriptorError("Does not end with a zero-length blob.")
|
||||||
if any([False if blob_info['length'] > 0 else True for blob_info in stream_info['blobs'][:-1]]):
|
if any([blob_info['length'] == 0 for blob_info in stream_info['blobs'][:-1]]):
|
||||||
raise InvalidStreamDescriptorError("Contains zero-length data blob")
|
raise InvalidStreamDescriptorError("Contains zero-length data blob")
|
||||||
if 'blob_hash' in stream_info['blobs'][-1]:
|
if 'blob_hash' in stream_info['blobs'][-1]:
|
||||||
raise InvalidStreamDescriptorError("Stream terminator blob should not have a hash")
|
raise InvalidStreamDescriptorError("Stream terminator blob should not have a hash")
|
||||||
|
|
|
@ -201,15 +201,12 @@ class DeferredProfiler:
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
|
|
||||||
def _cb(result, fn, start, caller_info):
|
def _cb(result, fn, start, caller_info):
|
||||||
if isinstance(result, (Failure, Exception)):
|
got_error = isinstance(result, (Failure, Exception))
|
||||||
error = result
|
self.add_result(fn, start, reactor.seconds(), caller_info, not got_error)
|
||||||
result = None
|
if got_error:
|
||||||
|
raise result
|
||||||
else:
|
else:
|
||||||
error = None
|
|
||||||
self.add_result(fn, start, reactor.seconds(), caller_info, error is None)
|
|
||||||
if error is None:
|
|
||||||
return result
|
return result
|
||||||
raise error
|
|
||||||
|
|
||||||
def _profiled_deferred(fn):
|
def _profiled_deferred(fn):
|
||||||
reactor.addSystemEventTrigger("after", "shutdown", self.show_profile_results, fn)
|
reactor.addSystemEventTrigger("after", "shutdown", self.show_profile_results, fn)
|
||||||
|
|
Loading…
Reference in a new issue