forked from LBRYCommunity/lbry-sdk
Upgrade Python syntax with pyupgrade --py37-plus
This commit is contained in:
parent
b70a23a10a
commit
c5cfd8f40e
38 changed files with 77 additions and 85 deletions
|
@ -33,13 +33,13 @@ class HashBlobWriter:
|
|||
def write(self, data: bytes):
|
||||
expected_length = self.get_length()
|
||||
if not expected_length:
|
||||
raise IOError("unknown blob length")
|
||||
raise OSError("unknown blob length")
|
||||
if self.buffer is None:
|
||||
log.warning("writer has already been closed")
|
||||
if not self.finished.done():
|
||||
self.finished.cancel()
|
||||
return
|
||||
raise IOError('I/O operation on closed file')
|
||||
raise OSError('I/O operation on closed file')
|
||||
|
||||
self._hashsum.update(data)
|
||||
self.len_so_far += len(data)
|
||||
|
|
|
@ -86,7 +86,7 @@ class BlobExchangeClientProtocol(asyncio.Protocol):
|
|||
self._blob_bytes_received += len(data)
|
||||
try:
|
||||
self.writer.write(data)
|
||||
except IOError as err:
|
||||
except OSError as err:
|
||||
log.error("error downloading blob from %s:%i: %s", self.peer_address, self.peer_port, err)
|
||||
if self._response_fut and not self._response_fut.done():
|
||||
self._response_fut.set_exception(err)
|
||||
|
|
|
@ -75,10 +75,10 @@ class BlobDownloader:
|
|||
|
||||
def clearbanned(self):
|
||||
now = self.loop.time()
|
||||
self.ignored = dict((
|
||||
(peer, when) for (peer, when) in self.ignored.items()
|
||||
self.ignored = {
|
||||
peer: when for (peer, when) in self.ignored.items()
|
||||
if (now - when) < min(30.0, (self.failures.get(peer, 0) ** self.BAN_FACTOR))
|
||||
))
|
||||
}
|
||||
|
||||
@cache_concurrent
|
||||
async def download_blob(self, blob_hash: str, length: typing.Optional[int] = None,
|
||||
|
|
|
@ -81,10 +81,10 @@ class BlobServerProtocol(asyncio.Protocol):
|
|||
responses.append(BlobPaymentAddressResponse(lbrycrd_address=self.lbrycrd_address))
|
||||
availability_request = request.get_availability_request()
|
||||
if availability_request:
|
||||
responses.append(BlobAvailabilityResponse(available_blobs=list(set((
|
||||
responses.append(BlobAvailabilityResponse(available_blobs=list(set(
|
||||
filter(lambda blob_hash: blob_hash in self.blob_manager.completed_blob_hashes,
|
||||
availability_request.requested_blobs)
|
||||
)))))
|
||||
))))
|
||||
price_request = request.get_price_request()
|
||||
if price_request:
|
||||
responses.append(BlobPriceResponse(blob_data_payment_rate='RATE_ACCEPTED'))
|
||||
|
|
|
@ -14,7 +14,7 @@ from torba.client.coinselection import STRATEGIES
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
NOT_SET = type(str('NOT_SET'), (object,), {})
|
||||
NOT_SET = type('NOT_SET', (object,), {})
|
||||
T = typing.TypeVar('T')
|
||||
|
||||
CURRENCIES = {
|
||||
|
@ -659,9 +659,9 @@ def get_linux_directories() -> typing.Tuple[str, str, str]:
|
|||
try:
|
||||
with open(os.path.join(user_config_dir(), 'user-dirs.dirs'), 'r') as xdg:
|
||||
down_dir = re.search(r'XDG_DOWNLOAD_DIR=(.+)', xdg.read()).group(1)
|
||||
down_dir = re.sub('\$HOME', os.getenv('HOME') or os.path.expanduser("~/"), down_dir)
|
||||
down_dir = re.sub(r'\$HOME', os.getenv('HOME') or os.path.expanduser("~/"), down_dir)
|
||||
download_dir = re.sub('\"', '', down_dir)
|
||||
except EnvironmentError:
|
||||
except OSError:
|
||||
download_dir = os.getenv('XDG_DOWNLOAD_DIR')
|
||||
if not download_dir:
|
||||
download_dir = os.path.expanduser('~/Downloads')
|
||||
|
|
|
@ -286,7 +286,7 @@ def do_migration(conf):
|
|||
_add_recovered_blobs(blobs, damaged_sd, sd_length) # pylint: disable=no-value-for-parameter
|
||||
_import_file(*file_args[damaged_sd])
|
||||
damaged_stream_sds.remove(damaged_sd)
|
||||
except (OSError, ValueError, TypeError, IOError, AssertionError, sqlite3.IntegrityError):
|
||||
except (OSError, ValueError, TypeError, AssertionError, sqlite3.IntegrityError):
|
||||
continue
|
||||
|
||||
log.info("migrated %i files", new_db.execute("select count(*) from file").fetchone()[0])
|
||||
|
|
|
@ -94,8 +94,7 @@ def _batched_select(transaction, query, parameters, batch_size=900):
|
|||
for start_index in range(0, len(parameters), batch_size):
|
||||
current_batch = parameters[start_index:start_index+batch_size]
|
||||
bind = "({})".format(','.join(['?'] * len(current_batch)))
|
||||
for result in transaction.execute(query.format(bind), current_batch):
|
||||
yield result
|
||||
yield from transaction.execute(query.format(bind), current_batch)
|
||||
|
||||
|
||||
def get_all_lbry_files(transaction: sqlite3.Connection) -> typing.List[typing.Dict]:
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2016-2017 Ionuț Arțăriși <ionut@artarisi.eu>
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
|
|
@ -10,4 +10,4 @@ def normalize_tag(tag: str):
|
|||
|
||||
|
||||
def clean_tags(tags: List[str]):
|
||||
return [tag for tag in set(normalize_tag(tag) for tag in tags) if tag]
|
||||
return [tag for tag in {normalize_tag(tag) for tag in tags} if tag]
|
||||
|
|
|
@ -263,7 +263,7 @@ class StreamDescriptor:
|
|||
return descriptor
|
||||
|
||||
def lower_bound_decrypted_length(self) -> int:
|
||||
length = sum((blob.length - 1 for blob in self.blobs[:-2]))
|
||||
length = sum(blob.length - 1 for blob in self.blobs[:-2])
|
||||
return length + self.blobs[-2].length - (AES.block_size // 8)
|
||||
|
||||
def upper_bound_decrypted_length(self) -> int:
|
||||
|
|
|
@ -37,7 +37,7 @@ class ReflectorServerProtocol(asyncio.Protocol):
|
|||
if self.incoming.is_set():
|
||||
try:
|
||||
self.writer.write(data)
|
||||
except IOError as err:
|
||||
except OSError as err:
|
||||
log.error("error receiving blob: %s", err)
|
||||
self.transport.close()
|
||||
return
|
||||
|
|
|
@ -99,8 +99,8 @@ class MainNetLedger(BaseLedger):
|
|||
"%d channels, %d certificates and %d claims",
|
||||
account.id, balance, channel_count, len(account.channel_keys), claim_count)
|
||||
else:
|
||||
total_receiving = len((await account.receiving.get_addresses()))
|
||||
total_change = len((await account.change.get_addresses()))
|
||||
total_receiving = len(await account.receiving.get_addresses())
|
||||
total_change = len(await account.change.get_addresses())
|
||||
log.info("Loaded account %s with %s LBC, %d receiving addresses (gap: %d), "
|
||||
"%d change addresses (gap: %d), %d channels, %d certificates and %d claims. ",
|
||||
account.id, balance, total_receiving, account.receiving.gap, total_change,
|
||||
|
|
|
@ -137,9 +137,9 @@ class LbryWalletManager(BaseWalletManager):
|
|||
|
||||
async def _migrate_addresses(self, receiving_addresses: set, change_addresses: set):
|
||||
async with self.default_account.receiving.address_generator_lock:
|
||||
migrated_receiving = set((await self.default_account.receiving._generate_keys(0, len(receiving_addresses))))
|
||||
migrated_receiving = set(await self.default_account.receiving._generate_keys(0, len(receiving_addresses)))
|
||||
async with self.default_account.change.address_generator_lock:
|
||||
migrated_change = set((await self.default_account.change._generate_keys(0, len(change_addresses))))
|
||||
migrated_change = set(await self.default_account.change._generate_keys(0, len(change_addresses)))
|
||||
receiving_addresses = set(map(self.default_account.ledger.public_key_to_address, receiving_addresses))
|
||||
change_addresses = set(map(self.default_account.ledger.public_key_to_address, change_addresses))
|
||||
if not any(change_addresses.difference(migrated_change)):
|
||||
|
|
|
@ -319,7 +319,7 @@ class SQLDB:
|
|||
for table in ('claim', 'support', 'claimtrie'):
|
||||
self.execute(*self._delete_sql(table, {'claim_hash__in': binary_claim_hashes}))
|
||||
self._clear_claim_metadata(binary_claim_hashes)
|
||||
return set(r['channel_hash'] for r in affected_channels)
|
||||
return {r['channel_hash'] for r in affected_channels}
|
||||
return set()
|
||||
|
||||
def _clear_claim_metadata(self, binary_claim_hashes: List[sqlite3.Binary]):
|
||||
|
|
|
@ -408,9 +408,9 @@ def get_examples():
|
|||
|
||||
|
||||
SECTIONS = re.compile("(.*?)Usage:(.*?)Options:(.*?)Returns:(.*)", re.DOTALL)
|
||||
REQUIRED_OPTIONS = re.compile("\(<(.*?)>.*?\)")
|
||||
REQUIRED_OPTIONS = re.compile(r"\(<(.*?)>.*?\)")
|
||||
ARGUMENT_NAME = re.compile("--([^=]+)")
|
||||
ARGUMENT_TYPE = re.compile("\s*\((.*?)\)(.*)")
|
||||
ARGUMENT_TYPE = re.compile(r"\s*\((.*?)\)(.*)")
|
||||
|
||||
|
||||
def get_return_def(returns):
|
||||
|
|
|
@ -14,9 +14,9 @@ def get_build_type(travis_tag=None):
|
|||
if not travis_tag:
|
||||
return "qa"
|
||||
log.debug("getting build type for tag: \"%s\"", travis_tag)
|
||||
if re.match('v\d+\.\d+\.\d+rc\d+$', travis_tag):
|
||||
if re.match(r'v\d+\.\d+\.\d+rc\d+$', travis_tag):
|
||||
return 'rc'
|
||||
elif re.match('v\d+\.\d+\.\d+$', travis_tag):
|
||||
elif re.match(r'v\d+\.\d+\.\d+$', travis_tag):
|
||||
return 'release'
|
||||
return 'qa'
|
||||
|
||||
|
|
|
@ -24,14 +24,14 @@ async def report_to_slack(output, webhook):
|
|||
|
||||
def confidence(times, z, plus_err=True):
|
||||
mean = sum(times) / len(times)
|
||||
standard_dev = (sum(((t - sum(times) / len(times)) ** 2.0 for t in times)) / len(times)) ** 0.5
|
||||
standard_dev = (sum((t - sum(times) / len(times)) ** 2.0 for t in times) / len(times)) ** 0.5
|
||||
err = (z * standard_dev) / (len(times) ** 0.5)
|
||||
return f"{round((mean + err) if plus_err else (mean - err), 3)}"
|
||||
|
||||
|
||||
def variance(times):
|
||||
mean = sum(times) / len(times)
|
||||
return round(sum(((i - mean) ** 2.0 for i in times)) / (len(times) - 1), 3)
|
||||
return round(sum((i - mean) ** 2.0 for i in times) / (len(times) - 1), 3)
|
||||
|
||||
|
||||
async def wait_for_done(conf, claim_name, timeout):
|
||||
|
|
|
@ -7,7 +7,7 @@ from lbry.wallet.account import Account
|
|||
|
||||
|
||||
def extract(d, keys):
|
||||
return dict((k, d[k]) for k in keys)
|
||||
return {k: d[k] for k in keys}
|
||||
|
||||
|
||||
class AccountManagement(CommandTestCase):
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import unittest
|
||||
import asyncio
|
||||
from lbry import utils
|
||||
|
@ -119,7 +118,7 @@ class CacheConcurrentDecoratorTests(AsyncioTestCase):
|
|||
# test that the task is run fresh, it should not error
|
||||
self.counter = 0
|
||||
t3 = self.loop.create_task(self.foo(1))
|
||||
self.assertTrue((await t3))
|
||||
self.assertTrue(await t3)
|
||||
self.assertEqual(1, self.counter)
|
||||
|
||||
# the previously failed call should still raise if awaited
|
||||
|
|
|
@ -24,7 +24,7 @@ class TestManagedStream(BlobExchangeTestBase):
|
|||
async def create_stream(self, blob_count: int = 10, file_name='test_file'):
|
||||
self.stream_bytes = b''
|
||||
for _ in range(blob_count):
|
||||
self.stream_bytes += os.urandom((MAX_BLOB_SIZE - 1))
|
||||
self.stream_bytes += os.urandom(MAX_BLOB_SIZE - 1)
|
||||
# create the stream
|
||||
file_path = os.path.join(self.server_dir, file_name)
|
||||
with open(file_path, 'wb') as f:
|
||||
|
|
|
@ -7,7 +7,7 @@ from torba.client.mnemonic import Mnemonic
|
|||
class TestMnemonic(unittest.TestCase):
|
||||
|
||||
def test_mnemonic_to_seed(self):
|
||||
seed = Mnemonic.mnemonic_to_seed(mnemonic=u'foobar', passphrase=u'torba')
|
||||
seed = Mnemonic.mnemonic_to_seed(mnemonic='foobar', passphrase='torba')
|
||||
self.assertEqual(
|
||||
hexlify(seed),
|
||||
b'475a419db4e991cab14f08bde2d357e52b3e7241f72c6d8a2f92782367feeee9f403dc6a37c26a3f02ab9'
|
||||
|
|
|
@ -446,7 +446,7 @@ class BaseDatabase(SQLiteMixin):
|
|||
SELECT txi.txid FROM txi JOIN account_address USING (address) {where}
|
||||
"""
|
||||
return await self.db.execute_fetchall(
|
||||
*query("SELECT {} FROM tx".format(cols), **constraints)
|
||||
*query(f"SELECT {cols} FROM tx", **constraints)
|
||||
)
|
||||
|
||||
async def get_transactions(self, wallet=None, **constraints):
|
||||
|
@ -523,7 +523,7 @@ class BaseDatabase(SQLiteMixin):
|
|||
return await self.db.execute_fetchall(*query(sql.format(cols), **constraints))
|
||||
|
||||
async def get_txos(self, wallet=None, no_tx=False, **constraints):
|
||||
my_accounts = set(a.public_key.address for a in wallet.accounts) if wallet else set()
|
||||
my_accounts = {a.public_key.address for a in wallet.accounts} if wallet else set()
|
||||
if 'order_by' not in constraints:
|
||||
constraints['order_by'] = [
|
||||
"tx.height=0 DESC", "tx.height DESC", "tx.position DESC", "txo.position"
|
||||
|
@ -595,7 +595,7 @@ class BaseDatabase(SQLiteMixin):
|
|||
|
||||
async def select_addresses(self, cols, **constraints):
|
||||
return await self.db.execute_fetchall(*query(
|
||||
"SELECT {} FROM pubkey_address JOIN account_address USING (address)".format(cols),
|
||||
f"SELECT {cols} FROM pubkey_address JOIN account_address USING (address)",
|
||||
**constraints
|
||||
))
|
||||
|
||||
|
|
|
@ -54,8 +54,7 @@ class BaseWalletManager:
|
|||
@property
|
||||
def accounts(self):
|
||||
for wallet in self.wallets:
|
||||
for account in wallet.accounts:
|
||||
yield account
|
||||
yield from wallet.accounts
|
||||
|
||||
async def start(self):
|
||||
self.running = True
|
||||
|
|
|
@ -177,7 +177,7 @@ class Base58Error(Exception):
|
|||
class Base58:
|
||||
""" Class providing base 58 functionality. """
|
||||
|
||||
chars = u'123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
|
||||
chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
|
||||
assert len(chars) == 58
|
||||
char_map = {c: n for n, c in enumerate(chars)}
|
||||
|
||||
|
@ -212,7 +212,7 @@ class Base58:
|
|||
# Prepend leading zero bytes if necessary
|
||||
count = 0
|
||||
for c in txt:
|
||||
if c != u'1':
|
||||
if c != '1':
|
||||
break
|
||||
count += 1
|
||||
if count:
|
||||
|
@ -225,7 +225,7 @@ class Base58:
|
|||
"""Converts a big-endian bytearray into a base58 string."""
|
||||
value = bytes_to_int(be_bytes)
|
||||
|
||||
txt = u''
|
||||
txt = ''
|
||||
while value:
|
||||
value, mod = divmod(value, 58)
|
||||
txt += cls.chars[mod]
|
||||
|
@ -233,7 +233,7 @@ class Base58:
|
|||
for byte in be_bytes:
|
||||
if byte != 0:
|
||||
break
|
||||
txt += u'1'
|
||||
txt += '1'
|
||||
|
||||
return txt[::-1]
|
||||
|
||||
|
|
|
@ -66,11 +66,11 @@ def normalize_text(seed):
|
|||
seed = unicodedata.normalize('NFKD', seed)
|
||||
seed = seed.lower()
|
||||
# remove accents
|
||||
seed = u''.join([c for c in seed if not unicodedata.combining(c)])
|
||||
seed = ''.join([c for c in seed if not unicodedata.combining(c)])
|
||||
# normalize whitespaces
|
||||
seed = u' '.join(seed.split())
|
||||
seed = ' '.join(seed.split())
|
||||
# remove whitespaces between CJK
|
||||
seed = u''.join([
|
||||
seed = ''.join([
|
||||
seed[i] for i in range(len(seed))
|
||||
if not (seed[i] in string.whitespace and is_cjk(seed[i-1]) and is_cjk(seed[i+1]))
|
||||
])
|
||||
|
@ -105,7 +105,7 @@ class Mnemonic:
|
|||
self.words = load_words(language_name)
|
||||
|
||||
@staticmethod
|
||||
def mnemonic_to_seed(mnemonic, passphrase=u''):
|
||||
def mnemonic_to_seed(mnemonic, passphrase=''):
|
||||
pbkdf2_rounds = 2048
|
||||
mnemonic = normalize_text(mnemonic)
|
||||
passphrase = normalize_text(passphrase)
|
||||
|
|
|
@ -277,7 +277,7 @@ class WalletStorage:
|
|||
if self.path is None:
|
||||
return json_data
|
||||
|
||||
temp_path = "%s.tmp.%s" % (self.path, os.getpid())
|
||||
temp_path = "{}.tmp.{}".format(self.path, os.getpid())
|
||||
with open(temp_path, "w") as f:
|
||||
f.write(json_data)
|
||||
f.flush()
|
||||
|
|
|
@ -33,7 +33,7 @@ from struct import Struct
|
|||
from asyncio import Queue
|
||||
|
||||
|
||||
class FramerBase(object):
|
||||
class FramerBase:
|
||||
"""Abstract base class for a framer.
|
||||
|
||||
A framer breaks an incoming byte stream into protocol messages,
|
||||
|
@ -104,7 +104,7 @@ class NewlineFramer(FramerBase):
|
|||
return b''.join(parts)
|
||||
|
||||
|
||||
class ByteQueue(object):
|
||||
class ByteQueue:
|
||||
"""A producer-comsumer queue. Incoming network data is put as it
|
||||
arrives, and the consumer calls an async method waiting for data of
|
||||
a specific length."""
|
||||
|
@ -126,7 +126,7 @@ class ByteQueue(object):
|
|||
return whole[:size]
|
||||
|
||||
|
||||
class BinaryFramer(object):
|
||||
class BinaryFramer:
|
||||
"""A framer for binary messaging protocols."""
|
||||
|
||||
def __init__(self):
|
||||
|
|
|
@ -42,7 +42,7 @@ from asyncio import Queue, Event, CancelledError
|
|||
from .util import signature_info
|
||||
|
||||
|
||||
class SingleRequest(object):
|
||||
class SingleRequest:
|
||||
__slots__ = ('method', 'args')
|
||||
|
||||
def __init__(self, method, args):
|
||||
|
@ -98,7 +98,7 @@ class Batch:
|
|||
return f'Batch({len(self.items)} items)'
|
||||
|
||||
|
||||
class Response(object):
|
||||
class Response:
|
||||
__slots__ = ('result', )
|
||||
|
||||
def __init__(self, result):
|
||||
|
@ -157,7 +157,7 @@ class ProtocolError(CodeMessageError):
|
|||
self.response_msg_id = id
|
||||
|
||||
|
||||
class JSONRPC(object):
|
||||
class JSONRPC:
|
||||
"""Abstract base class that interprets and constructs JSON RPC messages."""
|
||||
|
||||
# Error codes. See http://www.jsonrpc.org/specification
|
||||
|
@ -569,7 +569,7 @@ class JSONRPCAutoDetect(JSONRPCv2):
|
|||
return JSONRPCLoose
|
||||
|
||||
if isinstance(main, list):
|
||||
parts = set(protocol_for_payload(payload) for payload in main)
|
||||
parts = {protocol_for_payload(payload) for payload in main}
|
||||
# If all same protocol, return it
|
||||
if len(parts) == 1:
|
||||
return parts.pop()
|
||||
|
@ -584,7 +584,7 @@ class JSONRPCAutoDetect(JSONRPCv2):
|
|||
return protocol_for_payload(main)
|
||||
|
||||
|
||||
class JSONRPCConnection(object):
|
||||
class JSONRPCConnection:
|
||||
"""Maintains state of a JSON RPC connection, in particular
|
||||
encapsulating the handling of request IDs.
|
||||
|
||||
|
|
|
@ -343,7 +343,7 @@ class BatchError(Exception):
|
|||
self.request = request # BatchRequest object
|
||||
|
||||
|
||||
class BatchRequest(object):
|
||||
class BatchRequest:
|
||||
"""Used to build a batch request to send to the server. Stores
|
||||
the
|
||||
|
||||
|
@ -512,7 +512,7 @@ class RPCSession(SessionBase):
|
|||
return BatchRequest(self, raise_errors)
|
||||
|
||||
|
||||
class Server(object):
|
||||
class Server:
|
||||
"""A simple wrapper around an asyncio.Server object."""
|
||||
|
||||
def __init__(self, session_factory, host=None, port=None, *,
|
||||
|
|
|
@ -58,7 +58,7 @@ class NeedData(Exception):
|
|||
pass
|
||||
|
||||
|
||||
class SOCKSBase(object):
|
||||
class SOCKSBase:
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
|
@ -266,7 +266,7 @@ class SOCKS5(SOCKSBase):
|
|||
return None
|
||||
|
||||
|
||||
class SOCKSProxy(object):
|
||||
class SOCKSProxy:
|
||||
|
||||
def __init__(self, address, protocol, auth):
|
||||
"""A SOCKS proxy at an address following a SOCKS protocol. auth is an
|
||||
|
@ -342,7 +342,7 @@ class SOCKSProxy(object):
|
|||
return sock, address
|
||||
exceptions.append(sock)
|
||||
|
||||
strings = set(f'{exc!r}' for exc in exceptions)
|
||||
strings = {f'{exc!r}' for exc in exceptions}
|
||||
raise (exceptions[0] if len(strings) == 1 else
|
||||
OSError(f'multiple exceptions: {", ".join(strings)}'))
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ def signature_info(func):
|
|||
return SignatureInfo(min_args, max_args, required_names, other_names)
|
||||
|
||||
|
||||
class Concurrency(object):
|
||||
class Concurrency:
|
||||
|
||||
def __init__(self, max_concurrent):
|
||||
self._require_non_negative(max_concurrent)
|
||||
|
|
|
@ -206,7 +206,7 @@ class MemPool:
|
|||
hex_hashes = await self.api.mempool_hashes()
|
||||
if height != await self.api.height():
|
||||
continue
|
||||
hashes = set(hex_str_to_hash(hh) for hh in hex_hashes)
|
||||
hashes = {hex_str_to_hash(hh) for hh in hex_hashes}
|
||||
async with self.lock:
|
||||
touched = await self._process_mempool(hashes)
|
||||
synchronized_event.set()
|
||||
|
@ -223,7 +223,7 @@ class MemPool:
|
|||
# First handle txs that have disappeared
|
||||
for tx_hash in set(txs).difference(all_hashes):
|
||||
tx = txs.pop(tx_hash)
|
||||
tx_hashXs = set(hashX for hashX, value in tx.in_pairs)
|
||||
tx_hashXs = {hashX for hashX, value in tx.in_pairs}
|
||||
tx_hashXs.update(hashX for hashX, value in tx.out_pairs)
|
||||
for hashX in tx_hashXs:
|
||||
hashXs[hashX].remove(tx_hash)
|
||||
|
|
|
@ -463,8 +463,8 @@ class PeerManager:
|
|||
onion_peers = []
|
||||
|
||||
# Always report ourselves if valid (even if not public)
|
||||
peers = set(myself for myself in self.myselves
|
||||
if myself.last_good > cutoff)
|
||||
peers = {myself for myself in self.myselves
|
||||
if myself.last_good > cutoff}
|
||||
|
||||
# Bucket the clearnet peers and select up to two from each
|
||||
buckets = defaultdict(list)
|
||||
|
|
|
@ -15,12 +15,12 @@ def sessions_lines(data):
|
|||
for (id_, flags, peer, client, proto, reqs, txs_sent, subs,
|
||||
recv_count, recv_size, send_count, send_size, time) in data:
|
||||
yield fmt.format(id_, flags, client, proto,
|
||||
'{:,d}'.format(reqs),
|
||||
'{:,d}'.format(txs_sent),
|
||||
'{:,d}'.format(subs),
|
||||
'{:,d}'.format(recv_count),
|
||||
f'{reqs:,d}',
|
||||
f'{txs_sent:,d}',
|
||||
f'{subs:,d}',
|
||||
f'{recv_count:,d}',
|
||||
'{:,d}'.format(recv_size // 1024),
|
||||
'{:,d}'.format(send_count),
|
||||
f'{send_count:,d}',
|
||||
'{:,d}'.format(send_size // 1024),
|
||||
util.formatted_time(time, sep=''), peer)
|
||||
|
||||
|
@ -37,14 +37,14 @@ def groups_lines(data):
|
|||
for (id_, session_count, bandwidth, reqs, txs_sent, subs,
|
||||
recv_count, recv_size, send_count, send_size) in data:
|
||||
yield fmt.format(id_,
|
||||
'{:,d}'.format(session_count),
|
||||
f'{session_count:,d}',
|
||||
'{:,d}'.format(bandwidth // 1024),
|
||||
'{:,d}'.format(reqs),
|
||||
'{:,d}'.format(txs_sent),
|
||||
'{:,d}'.format(subs),
|
||||
'{:,d}'.format(recv_count),
|
||||
f'{reqs:,d}',
|
||||
f'{txs_sent:,d}',
|
||||
f'{subs:,d}',
|
||||
f'{recv_count:,d}',
|
||||
'{:,d}'.format(recv_size // 1024),
|
||||
'{:,d}'.format(send_count),
|
||||
f'{send_count:,d}',
|
||||
'{:,d}'.format(send_size // 1024))
|
||||
|
||||
|
||||
|
|
|
@ -462,7 +462,7 @@ class TxInputTokenPay(TxInput):
|
|||
# a coin burn.
|
||||
if self._is_anon_input():
|
||||
return True
|
||||
return super(TxInputTokenPay, self).is_generation()
|
||||
return super().is_generation()
|
||||
|
||||
|
||||
class TxInputTokenPayStealth(
|
||||
|
|
|
@ -43,7 +43,7 @@ class ColorHandler(logging.StreamHandler):
|
|||
color_name = self.level_color.get(record.levelno, "black")
|
||||
color_code = self.color_code[color_name]
|
||||
stream = self.stream
|
||||
stream.write('\x1b[%sm%s\x1b[0m' % (color_code, msg))
|
||||
stream.write(f'\x1b[{color_code}m{msg}\x1b[0m')
|
||||
stream.write(self.terminator)
|
||||
self.flush()
|
||||
except Exception:
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Form implementation generated from reading ui file 'blockchain_dock.ui',
|
||||
# licensing of 'blockchain_dock.ui' applies.
|
||||
#
|
||||
|
@ -10,7 +8,7 @@
|
|||
|
||||
from PySide2 import QtCore, QtGui, QtWidgets
|
||||
|
||||
class Ui_BlockchainDock(object):
|
||||
class Ui_BlockchainDock:
|
||||
def setupUi(self, BlockchainDock):
|
||||
BlockchainDock.setObjectName("BlockchainDock")
|
||||
BlockchainDock.resize(416, 167)
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Form implementation generated from reading ui file 'output_dock.ui',
|
||||
# licensing of 'output_dock.ui' applies.
|
||||
#
|
||||
|
@ -10,7 +8,7 @@
|
|||
|
||||
from PySide2 import QtCore, QtGui, QtWidgets
|
||||
|
||||
class Ui_OutputDock(object):
|
||||
class Ui_OutputDock:
|
||||
def setupUi(self, OutputDock):
|
||||
OutputDock.setObjectName("OutputDock")
|
||||
OutputDock.resize(700, 397)
|
||||
|
|
Loading…
Reference in a new issue