diff --git a/lbrynet/dht/datastore.py b/lbrynet/dht/datastore.py index 12be982f8..19fa0f84a 100644 --- a/lbrynet/dht/datastore.py +++ b/lbrynet/dht/datastore.py @@ -44,7 +44,7 @@ class DictDataStore(UserDict): del self[key] def hasPeersForBlob(self, key): - return True if key in self and len(tuple(self.filter_bad_and_expired_peers(key))) else False + return bool(key in self and len(tuple(self.filter_bad_and_expired_peers(key)))) def addPeerToBlob(self, contact, key, compact_address, lastPublished, originallyPublished, originalPublisherID): if key in self: diff --git a/lbrynet/dht/error.py b/lbrynet/dht/error.py index f61b7944f..44b5a194d 100644 --- a/lbrynet/dht/error.py +++ b/lbrynet/dht/error.py @@ -13,14 +13,12 @@ class DecodeError(Exception): Should be raised by an C{Encoding} implementation if decode operation fails """ - pass class BucketFull(Exception): """ Raised when the bucket is full """ - pass class UnknownRemoteException(Exception): diff --git a/lbrynet/dht/interface.py b/lbrynet/dht/interface.py index e4071dffc..6353dbbfd 100644 --- a/lbrynet/dht/interface.py +++ b/lbrynet/dht/interface.py @@ -10,7 +10,6 @@ class IDataStore(Interface): def keys(self): """ Return a list of the keys in this data store """ - pass def removeExpiredPeers(self): pass diff --git a/lbrynet/extras/daemon/storage.py b/lbrynet/extras/daemon/storage.py index 30c2d9330..25065e84c 100644 --- a/lbrynet/extras/daemon/storage.py +++ b/lbrynet/extras/daemon/storage.py @@ -418,7 +418,7 @@ class SQLiteStorage: def check_if_stream_exists(self, stream_hash): d = self.db.runQuery("select stream_hash from stream where stream_hash=?", (stream_hash, )) - d.addCallback(lambda r: True if len(r) else False) + d.addCallback(lambda r: bool(len(r))) return d def get_blob_num_by_hash(self, stream_hash, blob_hash): diff --git a/lbrynet/p2p/StreamDescriptor.py b/lbrynet/p2p/StreamDescriptor.py index 9ef1048d3..3b5fddcf1 100644 --- a/lbrynet/p2p/StreamDescriptor.py +++ b/lbrynet/p2p/StreamDescriptor.py @@ -29,7 +29,6 @@ class StreamDescriptorReader: def _get_raw_data(self): """This method must be overridden by subclasses. It should return a deferred which fires with the raw data in the stream descriptor""" - pass def get_info(self): """Return the fields contained in the file""" @@ -89,7 +88,6 @@ class StreamDescriptorWriter: """This method must be overridden by subclasses to write raw data to the stream descriptor """ - pass class PlainStreamDescriptorWriter(StreamDescriptorWriter): @@ -389,7 +387,7 @@ def validate_descriptor(stream_info): raise InvalidStreamDescriptorError("Missing '%s'" % (e.args[0])) if stream_info['blobs'][-1]['length'] != 0: raise InvalidStreamDescriptorError("Does not end with a zero-length blob.") - if any([False if blob_info['length'] > 0 else True for blob_info in stream_info['blobs'][:-1]]): + if any([blob_info['length'] == 0 for blob_info in stream_info['blobs'][:-1]]): raise InvalidStreamDescriptorError("Contains zero-length data blob") if 'blob_hash' in stream_info['blobs'][-1]: raise InvalidStreamDescriptorError("Stream terminator blob should not have a hash") diff --git a/lbrynet/utils.py b/lbrynet/utils.py index 114291102..7955a4219 100644 --- a/lbrynet/utils.py +++ b/lbrynet/utils.py @@ -201,15 +201,12 @@ class DeferredProfiler: from twisted.internet import reactor def _cb(result, fn, start, caller_info): - if isinstance(result, (Failure, Exception)): - error = result - result = None + got_error = isinstance(result, (Failure, Exception)) + self.add_result(fn, start, reactor.seconds(), caller_info, not got_error) + if got_error: + raise result else: - error = None - self.add_result(fn, start, reactor.seconds(), caller_info, error is None) - if error is None: return result - raise error def _profiled_deferred(fn): reactor.addSystemEventTrigger("after", "shutdown", self.show_profile_results, fn)