forked from LBRYCommunity/lbry-sdk
lint: lbry/stream/*
This commit is contained in:
parent
2a04943a67
commit
494feb9f6d
4 changed files with 34 additions and 34 deletions
|
@ -16,14 +16,14 @@ from lbry.error import InvalidStreamDescriptorError
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
RE_ILLEGAL_FILENAME_CHARS = re.compile(
|
RE_ILLEGAL_FILENAME_CHARS = re.compile(
|
||||||
'('
|
r'('
|
||||||
'[<>:"/\\\|\?\*]+|' # Illegal characters
|
r'[<>:"/\\\|\?\*]+|' # Illegal characters
|
||||||
'[\\x00-\\x1F]+|' # All characters in range 0-31
|
r'[\\x00-\\x1F]+|' # All characters in range 0-31
|
||||||
'[ \t]*(\.)+[ \t]*$|' # Dots at the end
|
r'[ \t]*(\.)+[ \t]*$|' # Dots at the end
|
||||||
'(^[ \t]+|[ \t]+$)|' # Leading and trailing whitespace
|
r'(^[ \t]+|[ \t]+$)|' # Leading and trailing whitespace
|
||||||
'^CON$|^PRN$|^AUX$|' # Illegal names
|
r'^CON$|^PRN$|^AUX$|' # Illegal names
|
||||||
'^NUL$|^COM[1-9]$|^LPT[1-9]$' # ...
|
r'^NUL$|^COM[1-9]$|^LPT[1-9]$' # ...
|
||||||
')'
|
r')'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -122,12 +122,12 @@ class StreamDescriptor:
|
||||||
|
|
||||||
def old_sort_json(self) -> bytes:
|
def old_sort_json(self) -> bytes:
|
||||||
blobs = []
|
blobs = []
|
||||||
for b in self.blobs:
|
for blob in self.blobs:
|
||||||
blobs.append(OrderedDict(
|
blobs.append(OrderedDict(
|
||||||
[('length', b.length), ('blob_num', b.blob_num), ('iv', b.iv)] if not b.blob_hash else
|
[('length', blob.length), ('blob_num', blob.blob_num), ('iv', blob.iv)] if not blob.blob_hash else
|
||||||
[('length', b.length), ('blob_num', b.blob_num), ('blob_hash', b.blob_hash), ('iv', b.iv)]
|
[('length', blob.length), ('blob_num', blob.blob_num), ('blob_hash', blob.blob_hash), ('iv', blob.iv)]
|
||||||
))
|
))
|
||||||
if not b.blob_hash:
|
if not blob.blob_hash:
|
||||||
break
|
break
|
||||||
return json.dumps(
|
return json.dumps(
|
||||||
OrderedDict([
|
OrderedDict([
|
||||||
|
@ -204,14 +204,14 @@ class StreamDescriptor:
|
||||||
return await loop.run_in_executor(None, cls._from_stream_descriptor_blob, loop, blob_dir, blob)
|
return await loop.run_in_executor(None, cls._from_stream_descriptor_blob, loop, blob_dir, blob)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_blob_hashsum(b: typing.Dict):
|
def get_blob_hashsum(blob_dict: typing.Dict):
|
||||||
length = b['length']
|
length = blob_dict['length']
|
||||||
if length != 0:
|
if length != 0:
|
||||||
blob_hash = b['blob_hash']
|
blob_hash = blob_dict['blob_hash']
|
||||||
else:
|
else:
|
||||||
blob_hash = None
|
blob_hash = None
|
||||||
blob_num = b['blob_num']
|
blob_num = blob_dict['blob_num']
|
||||||
iv = b['iv']
|
iv = blob_dict['iv']
|
||||||
blob_hashsum = get_lbry_hash_obj()
|
blob_hashsum = get_lbry_hash_obj()
|
||||||
if length != 0:
|
if length != 0:
|
||||||
blob_hashsum.update(blob_hash.encode())
|
blob_hashsum.update(blob_hash.encode())
|
||||||
|
@ -248,8 +248,8 @@ class StreamDescriptor:
|
||||||
for blob_bytes in file_reader(file_path):
|
for blob_bytes in file_reader(file_path):
|
||||||
blob_num += 1
|
blob_num += 1
|
||||||
blob_info = await BlobFile.create_from_unencrypted(
|
blob_info = await BlobFile.create_from_unencrypted(
|
||||||
loop, blob_dir, key, next(iv_generator), blob_bytes, blob_num, blob_completed_callback
|
loop, blob_dir, key, next(iv_generator), blob_bytes, blob_num, blob_completed_callback
|
||||||
)
|
)
|
||||||
blobs.append(blob_info)
|
blobs.append(blob_info)
|
||||||
blobs.append(
|
blobs.append(
|
||||||
BlobInfo(len(blobs), 0, binascii.hexlify(next(iv_generator)).decode())) # add the stream terminator
|
BlobInfo(len(blobs), 0, binascii.hexlify(next(iv_generator)).decode())) # add the stream terminator
|
||||||
|
|
|
@ -62,7 +62,7 @@ class StreamDownloader:
|
||||||
for url, port in self.config.reflector_servers
|
for url, port in self.config.reflector_servers
|
||||||
]
|
]
|
||||||
if 'dht' in self.config.components_to_skip or not self.node or not \
|
if 'dht' in self.config.components_to_skip or not self.node or not \
|
||||||
len(self.node.protocol.routing_table.get_peers()):
|
len(self.node.protocol.routing_table.get_peers()) > 0:
|
||||||
self.fixed_peers_delay = 0.0
|
self.fixed_peers_delay = 0.0
|
||||||
else:
|
else:
|
||||||
self.fixed_peers_delay = self.config.fixed_peer_delay
|
self.fixed_peers_delay = self.config.fixed_peer_delay
|
||||||
|
|
|
@ -58,7 +58,7 @@ class ReflectorServerProtocol(asyncio.Protocol):
|
||||||
response_bytes = json.dumps(response).encode()
|
response_bytes = json.dumps(response).encode()
|
||||||
chunk_response(response_bytes)
|
chunk_response(response_bytes)
|
||||||
|
|
||||||
async def handle_request(self, request: typing.Dict):
|
async def handle_request(self, request: typing.Dict): # pylint: disable=too-many-return-statements
|
||||||
if self.client_version is None:
|
if self.client_version is None:
|
||||||
if 'version' not in request:
|
if 'version' not in request:
|
||||||
self.transport.close()
|
self.transport.close()
|
||||||
|
|
|
@ -14,7 +14,7 @@ from lbry.stream.managed_stream import ManagedStream
|
||||||
from lbry.schema.claim import Claim
|
from lbry.schema.claim import Claim
|
||||||
from lbry.schema.url import URL
|
from lbry.schema.url import URL
|
||||||
from lbry.wallet.dewies import dewies_to_lbc
|
from lbry.wallet.dewies import dewies_to_lbc
|
||||||
from lbry.wallet import WalletManager, Wallet, Transaction, Output
|
from lbry.wallet import Output
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
from lbry.conf import Config
|
from lbry.conf import Config
|
||||||
|
@ -26,7 +26,7 @@ if typing.TYPE_CHECKING:
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
filter_fields = [
|
FILTER_FIELDS = [
|
||||||
'rowid',
|
'rowid',
|
||||||
'status',
|
'status',
|
||||||
'file_name',
|
'file_name',
|
||||||
|
@ -46,7 +46,7 @@ filter_fields = [
|
||||||
'blobs_in_stream'
|
'blobs_in_stream'
|
||||||
]
|
]
|
||||||
|
|
||||||
comparison_operators = {
|
COMPARISON_OPERATORS = {
|
||||||
'eq': lambda a, b: a == b,
|
'eq': lambda a, b: a == b,
|
||||||
'ne': lambda a, b: a != b,
|
'ne': lambda a, b: a != b,
|
||||||
'g': lambda a, b: a > b,
|
'g': lambda a, b: a > b,
|
||||||
|
@ -56,10 +56,10 @@ comparison_operators = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def path_or_none(p) -> Optional[str]:
|
def path_or_none(path) -> Optional[str]:
|
||||||
if not p:
|
if not path:
|
||||||
return
|
return
|
||||||
return binascii.unhexlify(p).decode()
|
return binascii.unhexlify(path).decode()
|
||||||
|
|
||||||
|
|
||||||
class StreamManager:
|
class StreamManager:
|
||||||
|
@ -256,21 +256,21 @@ class StreamManager:
|
||||||
:param comparison: comparison operator used for filtering
|
:param comparison: comparison operator used for filtering
|
||||||
:param search_by: fields and values to filter by
|
:param search_by: fields and values to filter by
|
||||||
"""
|
"""
|
||||||
if sort_by and sort_by not in filter_fields:
|
if sort_by and sort_by not in FILTER_FIELDS:
|
||||||
raise ValueError(f"'{sort_by}' is not a valid field to sort by")
|
raise ValueError(f"'{sort_by}' is not a valid field to sort by")
|
||||||
if comparison and comparison not in comparison_operators:
|
if comparison and comparison not in COMPARISON_OPERATORS:
|
||||||
raise ValueError(f"'{comparison}' is not a valid comparison")
|
raise ValueError(f"'{comparison}' is not a valid comparison")
|
||||||
if 'full_status' in search_by:
|
if 'full_status' in search_by:
|
||||||
del search_by['full_status']
|
del search_by['full_status']
|
||||||
for search in search_by.keys():
|
for search in search_by:
|
||||||
if search not in filter_fields:
|
if search not in FILTER_FIELDS:
|
||||||
raise ValueError(f"'{search}' is not a valid search operation")
|
raise ValueError(f"'{search}' is not a valid search operation")
|
||||||
if search_by:
|
if search_by:
|
||||||
comparison = comparison or 'eq'
|
comparison = comparison or 'eq'
|
||||||
streams = []
|
streams = []
|
||||||
for stream in self.streams.values():
|
for stream in self.streams.values():
|
||||||
for search, val in search_by.items():
|
for search, val in search_by.items():
|
||||||
if comparison_operators[comparison](getattr(stream, search), val):
|
if COMPARISON_OPERATORS[comparison](getattr(stream, search), val):
|
||||||
streams.append(stream)
|
streams.append(stream)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
@ -281,8 +281,8 @@ class StreamManager:
|
||||||
streams.reverse()
|
streams.reverse()
|
||||||
return streams
|
return streams
|
||||||
|
|
||||||
async def _check_update_or_replace(self, outpoint: str, claim_id: str, claim: Claim) -> typing.Tuple[
|
async def _check_update_or_replace(self, outpoint: str, claim_id: str, claim: Claim
|
||||||
Optional[ManagedStream], Optional[ManagedStream]]:
|
) -> typing.Tuple[Optional[ManagedStream], Optional[ManagedStream]]:
|
||||||
existing = self.get_filtered_streams(outpoint=outpoint)
|
existing = self.get_filtered_streams(outpoint=outpoint)
|
||||||
if existing:
|
if existing:
|
||||||
return existing[0], None
|
return existing[0], None
|
||||||
|
|
Loading…
Reference in a new issue