diff --git a/Makefile b/Makefile index b3e7df236..c2b8eed0b 100644 --- a/Makefile +++ b/Makefile @@ -4,8 +4,8 @@ install: pip install -e . tools: - pip install mypy==0.701 pylint==2.4.4 - pip install coverage astroid pylint + pip install pylint==2.10.0 + pip install coverage lint: pylint --rcfile=setup.cfg lbry diff --git a/lbry/conf.py b/lbry/conf.py index d29b76237..604cba086 100644 --- a/lbry/conf.py +++ b/lbry/conf.py @@ -203,7 +203,7 @@ class MaxKeyFee(Setting[dict]): ) parser.add_argument( self.no_cli_name, - help=f"Disable maximum key fee check.", + help="Disable maximum key fee check.", dest=self.name, const=None, action="store_const", diff --git a/lbry/dht/protocol/protocol.py b/lbry/dht/protocol/protocol.py index 8c24aeb76..d6c0d3347 100644 --- a/lbry/dht/protocol/protocol.py +++ b/lbry/dht/protocol/protocol.py @@ -556,7 +556,7 @@ class KademliaProtocol(DatagramProtocol): address[0], address[1], OLD_PROTOCOL_ERRORS[error_datagram.response] ) - def datagram_received(self, datagram: bytes, address: typing.Tuple[str, int]) -> None: # pylint: disable=arguments-differ + def datagram_received(self, datagram: bytes, address: typing.Tuple[str, int]) -> None: # pylint: disable=arguments-renamed try: message = decode_datagram(datagram) except (ValueError, TypeError, DecodeError): diff --git a/lbry/dht/protocol/routing_table.py b/lbry/dht/protocol/routing_table.py index 1cf18d778..4fea1266f 100644 --- a/lbry/dht/protocol/routing_table.py +++ b/lbry/dht/protocol/routing_table.py @@ -50,7 +50,7 @@ class KBucket: self.peers.append(peer) return True else: - for i in range(len(self.peers)): + for i, _ in enumerate(self.peers): local_peer = self.peers[i] if local_peer.node_id == peer.node_id: self.peers.remove(local_peer) diff --git a/lbry/dht/serialization/datagram.py b/lbry/dht/serialization/datagram.py index 5008907f0..a13f1792d 100644 --- a/lbry/dht/serialization/datagram.py +++ b/lbry/dht/serialization/datagram.py @@ -181,7 +181,7 @@ def decode_datagram(datagram: bytes) -> typing.Union[RequestDatagram, ResponseDa def make_compact_ip(address: str) -> bytearray: compact_ip = reduce(lambda buff, x: buff + bytearray([int(x)]), address.split('.'), bytearray()) if len(compact_ip) != 4: - raise ValueError(f"invalid IPv4 length") + raise ValueError("invalid IPv4 length") return compact_ip @@ -190,7 +190,7 @@ def make_compact_address(node_id: bytes, address: str, port: int) -> bytearray: if not 0 < port < 65536: raise ValueError(f'Invalid port: {port}') if len(node_id) != constants.HASH_BITS // 8: - raise ValueError(f"invalid node node_id length") + raise ValueError("invalid node node_id length") return compact_ip + port.to_bytes(2, 'big') + node_id @@ -201,5 +201,5 @@ def decode_compact_address(compact_address: bytes) -> typing.Tuple[bytes, str, i if not 0 < port < 65536: raise ValueError(f'Invalid port: {port}') if len(node_id) != constants.HASH_BITS // 8: - raise ValueError(f"invalid node node_id length") + raise ValueError("invalid node node_id length") return node_id, address, port diff --git a/lbry/error/generate.py b/lbry/error/generate.py index 1752e8452..94e9f6eb1 100644 --- a/lbry/error/generate.py +++ b/lbry/error/generate.py @@ -63,7 +63,7 @@ class ErrorClass: @staticmethod def get_fields(args): if len(args) > 1: - return f''.join(f'\n{INDENT*2}self.{field} = {field}' for field in args[1:]) + return ''.join(f'\n{INDENT*2}self.{field} = {field}' for field in args[1:]) return '' @staticmethod diff --git a/lbry/extras/cli.py b/lbry/extras/cli.py index 5df93f9e1..c263a84d9 100644 --- a/lbry/extras/cli.py +++ b/lbry/extras/cli.py @@ -101,7 +101,7 @@ class ArgumentParser(argparse.ArgumentParser): self._optionals.title = 'Options' if group_name is None: self.epilog = ( - f"Run 'lbrynet COMMAND --help' for more information on a command or group." + "Run 'lbrynet COMMAND --help' for more information on a command or group." ) else: self.epilog = ( diff --git a/lbry/extras/daemon/daemon.py b/lbry/extras/daemon/daemon.py index ed0183f0a..b469767bc 100644 --- a/lbry/extras/daemon/daemon.py +++ b/lbry/extras/daemon/daemon.py @@ -1701,9 +1701,9 @@ class Daemon(metaclass=JSONRPCServerType): 'change': {'gap': change_gap, 'maximum_uses_per_address': change_max_uses}, 'receiving': {'gap': receiving_gap, 'maximum_uses_per_address': receiving_max_uses}, } - for chain_name in address_changes: + for chain_name, changes in address_changes.items(): chain = getattr(account, chain_name) - for attr, value in address_changes[chain_name].items(): + for attr, value in changes.items(): if value is not None: setattr(chain, attr, value) change_made = True @@ -2240,15 +2240,15 @@ class Daemon(metaclass=JSONRPCServerType): txo = await self.ledger.get_claim_by_claim_id(accounts, claim_id, include_purchase_receipt=True) if not isinstance(txo, Output) or not txo.is_claim: # TODO: use error from lbry.error - raise Exception(f"Could not find claim with claim_id '{claim_id}'. ") + raise Exception(f"Could not find claim with claim_id '{claim_id}'.") elif url: txo = (await self.ledger.resolve(accounts, [url], include_purchase_receipt=True))[url] if not isinstance(txo, Output) or not txo.is_claim: # TODO: use error from lbry.error - raise Exception(f"Could not find claim with url '{url}'. ") + raise Exception(f"Could not find claim with url '{url}'.") else: # TODO: use error from lbry.error - raise Exception(f"Missing argument claim_id or url. ") + raise Exception("Missing argument claim_id or url.") if not allow_duplicate_purchase and txo.purchase_receipt: # TODO: use error from lbry.error raise Exception( @@ -4091,15 +4091,15 @@ class Daemon(metaclass=JSONRPCServerType): txo = await self.ledger.get_claim_by_claim_id(wallet.accounts, claim_id) if not isinstance(txo, Output) or not txo.is_claim: # TODO: use error from lbry.error - raise Exception(f"Could not find collection with claim_id '{claim_id}'. ") + raise Exception(f"Could not find collection with claim_id '{claim_id}'.") elif url: txo = (await self.ledger.resolve(wallet.accounts, [url]))[url] if not isinstance(txo, Output) or not txo.is_claim: # TODO: use error from lbry.error - raise Exception(f"Could not find collection with url '{url}'. ") + raise Exception(f"Could not find collection with url '{url}'.") else: # TODO: use error from lbry.error - raise Exception(f"Missing argument claim_id or url. ") + raise Exception("Missing argument claim_id or url.") page_num, page_size = abs(page), min(abs(page_size), 50) items = await self.ledger.resolve_collection(txo, page_size * (page_num - 1), page_size) @@ -5072,7 +5072,7 @@ class Daemon(metaclass=JSONRPCServerType): 'buckets': {} } - for i in range(len(self.dht_node.protocol.routing_table.buckets)): + for i, _ in enumerate(self.dht_node.protocol.routing_table.buckets): result['buckets'][i] = [] for peer in self.dht_node.protocol.routing_table.buckets[i].peers: host = { diff --git a/lbry/extras/daemon/json_response_encoder.py b/lbry/extras/daemon/json_response_encoder.py index 5d1924224..7e56770be 100644 --- a/lbry/extras/daemon/json_response_encoder.py +++ b/lbry/extras/daemon/json_response_encoder.py @@ -123,7 +123,7 @@ class JSONResponseEncoder(JSONEncoder): self.ledger = ledger self.include_protobuf = include_protobuf - def default(self, obj): # pylint: disable=method-hidden,arguments-differ,too-many-return-statements + def default(self, obj): # pylint: disable=method-hidden,arguments-renamed,too-many-return-statements if isinstance(obj, Account): return self.encode_account(obj) if isinstance(obj, Wallet): diff --git a/lbry/extras/daemon/storage.py b/lbry/extras/daemon/storage.py index 1387f94a7..38e6caf68 100644 --- a/lbry/extras/daemon/storage.py +++ b/lbry/extras/daemon/storage.py @@ -617,7 +617,7 @@ class SQLiteStorage(SQLiteMixin): ).fetchall() download_dir = binascii.hexlify(self.conf.download_dir.encode()).decode() transaction.executemany( - f"update file set download_directory=? where stream_hash=?", + "update file set download_directory=? where stream_hash=?", ((download_dir, stream_hash) for stream_hash in stream_hashes) ).fetchall() await self.db.run_with_foreign_keys_disabled(_recover) @@ -861,6 +861,6 @@ class SQLiteStorage(SQLiteMixin): transaction.execute('delete from peer').fetchall() transaction.executemany( 'insert into peer(node_id, address, udp_port, tcp_port) values (?, ?, ?, ?)', - tuple([(binascii.hexlify(p.node_id), p.address, p.udp_port, p.tcp_port) for p in peers]) + ((binascii.hexlify(p.node_id), p.address, p.udp_port, p.tcp_port) for p in peers) ).fetchall() return await self.db.run(_save_kademlia_peers) diff --git a/lbry/file_analysis.py b/lbry/file_analysis.py index 179ffbfd6..1d601b097 100644 --- a/lbry/file_analysis.py +++ b/lbry/file_analysis.py @@ -69,8 +69,8 @@ class VideoFileAnalyzer: version = str(e) if code != 0 or not version.startswith("ffmpeg"): log.warning("Unable to run ffmpeg, but it was requested. Code: %d; Message: %s", code, version) - raise FileNotFoundError(f"Unable to locate or run ffmpeg or ffprobe. Please install FFmpeg " - f"and ensure that it is callable via PATH or conf.ffmpeg_path") + raise FileNotFoundError("Unable to locate or run ffmpeg or ffprobe. Please install FFmpeg " + "and ensure that it is callable via PATH or conf.ffmpeg_path") log.debug("Using %s at %s", version.splitlines()[0].split(" Copyright")[0], self._which_ffmpeg) return version diff --git a/lbry/stream/managed_stream.py b/lbry/stream/managed_stream.py index 888dca2e2..2a85da66e 100644 --- a/lbry/stream/managed_stream.py +++ b/lbry/stream/managed_stream.py @@ -254,7 +254,7 @@ class ManagedStream(ManagedDownloadSource): self.finished_writing.clear() self.started_writing.clear() try: - open(output_path, 'wb').close() + open(output_path, 'wb').close() # pylint: disable=consider-using-with async for blob_info, decrypted in self._aiter_read_stream(connection_id=self.SAVING_ID): log.info("write blob %i/%i", blob_info.blob_num + 1, len(self.descriptor.blobs) - 1) await self.loop.run_in_executor(None, self._write_decrypted_blob, output_path, decrypted) diff --git a/lbry/utils.py b/lbry/utils.py index ee998da41..a5dc1a26e 100644 --- a/lbry/utils.py +++ b/lbry/utils.py @@ -155,7 +155,7 @@ def async_timed_cache(duration: int): async def _inner(*args, **kwargs) -> typing.Any: loop = asyncio.get_running_loop() time_now = loop.time() - key = tuple([args, tuple([tuple([k, kwargs[k]]) for k in kwargs])]) + key = (args, tuple(kwargs.items())) if key in cache and (time_now - cache[key][1] < duration): return cache[key][0] to_cache = await func(*args, **kwargs) @@ -173,7 +173,7 @@ def cache_concurrent(async_fn): @functools.wraps(async_fn) async def wrapper(*args, **kwargs): - key = tuple([args, tuple([tuple([k, kwargs[k]]) for k in kwargs])]) + key = (args, tuple(kwargs.items())) cache[key] = cache.get(key) or asyncio.create_task(async_fn(*args, **kwargs)) try: return await cache[key] @@ -342,7 +342,7 @@ def lru_cache_concurrent(cache_size: typing.Optional[int] = None, @functools.wraps(async_fn) async def _inner(*args, **kwargs): - key = tuple([args, tuple([tuple([k, kwargs[k]]) for k in kwargs])]) + key = (args, tuple(kwargs.items())) if key in lru_cache: return lru_cache.get(key) diff --git a/lbry/wallet/account.py b/lbry/wallet/account.py index a61878403..05989c324 100644 --- a/lbry/wallet/account.py +++ b/lbry/wallet/account.py @@ -250,7 +250,7 @@ class Account: generator_name = address_generator.get('name', HierarchicalDeterministic.name) self.address_generator = self.address_generators[generator_name] self.receiving, self.change = self.address_generator.from_dict(self, address_generator) - self.address_managers = {am.chain_number: am for am in {self.receiving, self.change}} + self.address_managers = {am.chain_number: am for am in (self.receiving, self.change)} self.channel_keys = channel_keys ledger.add_account(self) wallet.add_account(self) diff --git a/lbry/wallet/bip32.py b/lbry/wallet/bip32.py index 3e6bc3a7f..2bfcb1ad3 100644 --- a/lbry/wallet/bip32.py +++ b/lbry/wallet/bip32.py @@ -46,9 +46,11 @@ class _KeyBase: if len(raw_serkey) != 33: raise ValueError('raw_serkey must have length 33') - return (ver_bytes + bytes((self.depth,)) - + self.parent_fingerprint() + self.n.to_bytes(4, 'big') - + self.chain_code + raw_serkey) + return ( + ver_bytes + bytes((self.depth,)) + + self.parent_fingerprint() + self.n.to_bytes(4, 'big') + + self.chain_code + raw_serkey + ) def identifier(self): raise NotImplementedError diff --git a/lbry/wallet/database.py b/lbry/wallet/database.py index c12ea0b8e..4507bd7dd 100644 --- a/lbry/wallet/database.py +++ b/lbry/wallet/database.py @@ -82,10 +82,10 @@ class AIOSQLite: "read_count", "Number of database reads", namespace="daemon_database" ) acquire_write_lock_metric = Histogram( - f'write_lock_acquired', 'Time to acquire the write lock', namespace="daemon_database", buckets=HISTOGRAM_BUCKETS + 'write_lock_acquired', 'Time to acquire the write lock', namespace="daemon_database", buckets=HISTOGRAM_BUCKETS ) held_write_lock_metric = Histogram( - f'write_lock_held', 'Length of time the write lock is held for', namespace="daemon_database", + 'write_lock_held', 'Length of time the write lock is held for', namespace="daemon_database", buckets=HISTOGRAM_BUCKETS ) @@ -506,7 +506,7 @@ def _get_spendable_utxos(transaction: sqlite3.Connection, accounts: List, decode amount_to_reserve: int, reserved_amount: int, floor: int, ceiling: int, fee_per_byte: int) -> int: accounts_fmt = ",".join(["?"] * len(accounts)) - txo_query = f""" + txo_query = """ SELECT tx.txid, txo.txoid, tx.raw, tx.height, txo.position as nout, tx.is_verified, txo.amount FROM txo INNER JOIN account_address USING (address) LEFT JOIN txi USING (txoid) diff --git a/setup.cfg b/setup.cfg index 7a5b684e5..3b0698f56 100644 --- a/setup.cfg +++ b/setup.cfg @@ -21,6 +21,7 @@ disable= c-extension-no-member, fixme, broad-except, + raise-missing-from, no-else-return, cyclic-import, missing-docstring, @@ -37,5 +38,6 @@ disable= too-many-public-methods, too-many-return-statements, too-many-instance-attributes, + unspecified-encoding, protected-access, unused-argument