Convert some format() strings in lbry and torba to fstrings

This commit is contained in:
Julin S 2019-10-08 21:49:01 +05:30 committed by Lex Berezhny
parent e3cb5bfa07
commit c61fae05d6
31 changed files with 100 additions and 130 deletions

View file

@ -4117,7 +4117,7 @@ def get_loggly_query_string(installation_id):
now = utils.now() now = utils.now()
yesterday = now - utils.timedelta(days=1) yesterday = now - utils.timedelta(days=1)
params = { params = {
'terms': 'json.installation_id:{}*'.format(installation_id[:SHORT_ID_LEN]), 'terms': f'json.installation_id:{installation_id[:SHORT_ID_LEN]}*',
'from': loggly_time_string(yesterday), 'from': loggly_time_string(yesterday),
'to': loggly_time_string(now) 'to': loggly_time_string(now)
} }

View file

@ -26,8 +26,7 @@ class ExchangeRate:
self.ts = ts self.ts = ts
def __repr__(self): def __repr__(self):
out = "Currency pair:{}, spot:{}, ts:{}".format( out = f"Currency pair:{self.currency_pair}, spot:{self.spot}, ts:{self.ts}"
self.currency_pair, self.spot, self.ts)
return out return out
def as_dict(self): def as_dict(self):

View file

@ -30,7 +30,7 @@ def migrate_db(conf, start, end):
elif current == 10: elif current == 10:
from .migrate10to11 import do_migration from .migrate10to11 import do_migration
else: else:
raise Exception("DB migration of version {} to {} is not available".format(current, current+1)) raise Exception(f"DB migration of version {current} to {current+1} is not available")
try: try:
do_migration(conf) do_migration(conf)
except Exception as err: except Exception as err:

View file

@ -71,7 +71,7 @@ class StreamReflectorClient(asyncio.Protocol):
raise ValueError("Need protocol version number!") raise ValueError("Need protocol version number!")
server_version = int(response_dict['version']) server_version = int(response_dict['version'])
if server_version != REFLECTOR_V2: if server_version != REFLECTOR_V2:
raise ValueError("I can't handle protocol version {}!".format(server_version)) raise ValueError(f"I can't handle protocol version {server_version}!")
return return
async def send_descriptor(self) -> typing.Tuple[bool, typing.List[str]]: # returns a list of needed blob hashes async def send_descriptor(self) -> typing.Tuple[bool, typing.List[str]]: # returns a list of needed blob hashes

View file

@ -50,12 +50,12 @@ def verify_proof(proof, root_hash, name):
raise InvalidProofError("did not find the alleged child") raise InvalidProofError("did not find the alleged child")
if i == 0 and 'txhash' in proof and 'nOut' in proof and 'last takeover height' in proof: if i == 0 and 'txhash' in proof and 'nOut' in proof and 'last takeover height' in proof:
if len(proof['txhash']) != 64: if len(proof['txhash']) != 64:
raise InvalidProofError("txhash was invalid: {}".format(proof['txhash'])) raise InvalidProofError(f"txhash was invalid: {proof['txhash']}")
if not isinstance(proof['nOut'], int): if not isinstance(proof['nOut'], int):
raise InvalidProofError("nOut was invalid: {}".format(proof['nOut'])) raise InvalidProofError(f"nOut was invalid: {proof['nOut']}")
if not isinstance(proof['last takeover height'], int): if not isinstance(proof['last takeover height'], int):
raise InvalidProofError( raise InvalidProofError(
'last takeover height was invalid: {}'.format(proof['last takeover height'])) f"last takeover height was invalid: {proof['last takeover height']}")
to_hash += get_hash_for_outpoint( to_hash += get_hash_for_outpoint(
binascii.unhexlify(proof['txhash'])[::-1], binascii.unhexlify(proof['txhash'])[::-1],
proof['nOut'], proof['nOut'],

View file

@ -7,9 +7,9 @@ def lbc_to_dewies(lbc: str) -> int:
return coins_to_satoshis(lbc) return coins_to_satoshis(lbc)
except ValueError: except ValueError:
raise ValueError(textwrap.dedent( raise ValueError(textwrap.dedent(
""" f"""
Decimal inputs require a value in the ones place and in the tenths place Decimal inputs require a value in the ones place and in the tenths place
separated by a period. The value provided, '{}', is not of the correct separated by a period. The value provided, '{lbc}', is not of the correct
format. format.
The following are examples of valid decimal inputs: The following are examples of valid decimal inputs:
@ -25,7 +25,7 @@ def lbc_to_dewies(lbc: str) -> int:
83 83
.456 .456
123. 123.
""".format(lbc) """
)) ))

View file

@ -77,7 +77,7 @@ class LbryWalletManager(BaseWalletManager):
while os.path.isfile(backup_path_template % i): while os.path.isfile(backup_path_template % i):
i += 1 i += 1
os.rename(path, backup_path_template % i) os.rename(path, backup_path_template % i)
temp_path = "{}.tmp.{}".format(path, os.getpid()) temp_path = f"{path}.tmp.{os.getpid()}"
with open(temp_path, "w") as f: with open(temp_path, "w") as f:
f.write(migrated_json) f.write(migrated_json)
f.flush() f.flush()

View file

@ -48,8 +48,7 @@ class LBC(Coin):
header = cls.block_header(block, 0) header = cls.block_header(block, 0)
header_hex_hash = hash_to_hex_str(cls.header_hash(header)) header_hex_hash = hash_to_hex_str(cls.header_hash(header))
if header_hex_hash != cls.GENESIS_HASH: if header_hex_hash != cls.GENESIS_HASH:
raise CoinError('genesis block has hash {} expected {}' raise CoinError(f'genesis block has hash {header_hex_hash} expected {cls.GENESIS_HASH}')
.format(header_hex_hash, cls.GENESIS_HASH))
return block return block

View file

@ -189,7 +189,7 @@ class SQLDB:
constraints: Union[list, tuple]) -> Tuple[str, list]: constraints: Union[list, tuple]) -> Tuple[str, list]:
columns, values = [], [] columns, values = [], []
for column, value in data.items(): for column, value in data.items():
columns.append("{} = ?".format(column)) columns.append(f"{column} = ?")
values.append(value) values.append(value)
values.extend(constraints) values.extend(constraints)
return f"UPDATE {table} SET {', '.join(columns)} WHERE {where}", values return f"UPDATE {table} SET {', '.join(columns)} WHERE {where}", values

View file

@ -29,7 +29,7 @@ def main():
build_type = get_build_type(os.environ.get('TRAVIS_TAG', None)) build_type = get_build_type(os.environ.get('TRAVIS_TAG', None))
log.debug("setting build type=%s, build commit=%s", build_type, travis_commit) log.debug("setting build type=%s, build commit=%s", build_type, travis_commit)
with open(build_type_path, 'w') as f: with open(build_type_path, 'w') as f:
f.write("BUILD = \"{}\"\nBUILD_COMMIT = \"{}\"\n".format(build_type, travis_commit)) f.write(f"BUILD = \"{build_type}\"\nBUILD_COMMIT = \"{travis_commit}\"\n")
if __name__ == '__main__': if __name__ == '__main__':

View file

@ -23,7 +23,7 @@ def get_time_accelerator(loop: asyncio.AbstractEventLoop,
async def accelerate_time(seconds: float) -> None: async def accelerate_time(seconds: float) -> None:
nonlocal _time nonlocal _time
if seconds < 0: if seconds < 0:
raise ValueError('Cannot go back in time ({} seconds)'.format(seconds)) raise ValueError(f'Cannot go back in time ({seconds} seconds)')
_time += seconds _time += seconds
await past_events() await past_events()
await asyncio.sleep(0) await asyncio.sleep(0)

View file

@ -176,7 +176,7 @@ class TestFileListSorting(unittest.TestCase):
'file_name=remember.mp3, points_paid=4.8', 'file_name=remember.mp3, points_paid=4.8',
'file_name=day.tiff, points_paid=2.5' 'file_name=day.tiff, points_paid=2.5'
] ]
format_result = lambda f: 'file_name={}, points_paid={}'.format(f['file_name'], f['points_paid']) format_result = lambda f: f"file_name={f['file_name']}, points_paid={f['points_paid']}"
sort_options = ['file_name,asc', 'points_paid,desc'] sort_options = ['file_name,asc', 'points_paid,desc']
file_list = yield f2d(self.test_daemon.jsonrpc_file_list(sort=sort_options)) file_list = yield f2d(self.test_daemon.jsonrpc_file_list(sort=sort_options))

View file

@ -33,7 +33,7 @@ class BasicAccountingTests(LedgerTestCase):
.add_outputs([Output.pay_pubkey_hash(100, hash160)]) .add_outputs([Output.pay_pubkey_hash(100, hash160)])
await self.ledger.db.insert_transaction(tx) await self.ledger.db.insert_transaction(tx)
await self.ledger.db.save_transaction_io( await self.ledger.db.save_transaction_io(
tx, address, hash160, '{}:{}:'.format(tx.id, 1) tx, address, hash160, f'{tx.id}:1:'
) )
self.assertEqual(await self.account.get_balance(), 100) self.assertEqual(await self.account.get_balance(), 100)
@ -41,7 +41,7 @@ class BasicAccountingTests(LedgerTestCase):
.add_outputs([Output.pay_claim_name_pubkey_hash(100, 'foo', b'', hash160)]) .add_outputs([Output.pay_claim_name_pubkey_hash(100, 'foo', b'', hash160)])
await self.ledger.db.insert_transaction(tx) await self.ledger.db.insert_transaction(tx)
await self.ledger.db.save_transaction_io( await self.ledger.db.save_transaction_io(
tx, address, hash160, '{}:{}:'.format(tx.id, 1) tx, address, hash160, f'{tx.id}:1:'
) )
self.assertEqual(await self.account.get_balance(), 100) # claim names don't count towards balance self.assertEqual(await self.account.get_balance(), 100) # claim names don't count towards balance
self.assertEqual(await self.account.get_balance(include_claims=True), 200) self.assertEqual(await self.account.get_balance(include_claims=True), 200)
@ -53,7 +53,7 @@ class BasicAccountingTests(LedgerTestCase):
tx = Transaction(is_verified=True)\ tx = Transaction(is_verified=True)\
.add_outputs([Output.pay_pubkey_hash(100, hash160)]) .add_outputs([Output.pay_pubkey_hash(100, hash160)])
await self.ledger.db.save_transaction_io( await self.ledger.db.save_transaction_io(
'insert', tx, address, hash160, '{}:{}:'.format(tx.id, 1) 'insert', tx, address, hash160, f'{tx.id}:1:'
) )
utxos = await self.account.get_utxos() utxos = await self.account.get_utxos()
@ -62,7 +62,7 @@ class BasicAccountingTests(LedgerTestCase):
tx = Transaction(is_verified=True)\ tx = Transaction(is_verified=True)\
.add_inputs([Input.spend(utxos[0])]) .add_inputs([Input.spend(utxos[0])])
await self.ledger.db.save_transaction_io( await self.ledger.db.save_transaction_io(
'insert', tx, address, hash160, '{}:{}:'.format(tx.id, 1) 'insert', tx, address, hash160, f'{tx.id}:1:'
) )
self.assertEqual(await self.account.get_balance(include_claims=True), 0) self.assertEqual(await self.account.get_balance(include_claims=True), 0)

View file

@ -278,7 +278,7 @@ class BaseAccount:
seed, private_key, public_key = cls.keys_from_dict(ledger, d) seed, private_key, public_key = cls.keys_from_dict(ledger, d)
name = d.get('name') name = d.get('name')
if not name: if not name:
name = 'Account #{}'.format(public_key.address) name = f'Account #{public_key.address}'
return cls( return cls(
ledger=ledger, ledger=ledger,
wallet=wallet, wallet=wallet,

View file

@ -184,10 +184,10 @@ def query(select, **constraints) -> Tuple[str, Dict[str, Any]]:
raise ValueError("order_by must be string or list") raise ValueError("order_by must be string or list")
if limit is not None: if limit is not None:
sql.append('LIMIT {}'.format(limit)) sql.append(f'LIMIT {limit}')
if offset is not None: if offset is not None:
sql.append('OFFSET {}'.format(offset)) sql.append(f'OFFSET {offset}')
return ' '.join(sql), values return ' '.join(sql), values
@ -273,7 +273,7 @@ class SQLiteMixin:
constraints: Union[list, tuple]) -> Tuple[str, list]: constraints: Union[list, tuple]) -> Tuple[str, list]:
columns, values = [], [] columns, values = [], []
for column, value in data.items(): for column, value in data.items():
columns.append("{} = ?".format(column)) columns.append(f"{column} = ?")
values.append(value) values.append(value)
values.extend(constraints) values.extend(constraints)
sql = "UPDATE {} SET {} WHERE {}".format( sql = "UPDATE {} SET {} WHERE {}".format(

View file

@ -144,9 +144,8 @@ class BaseHeaders:
if previous_hash is None: if previous_hash is None:
if self.genesis_hash is not None and self.genesis_hash != current_hash: if self.genesis_hash is not None and self.genesis_hash != current_hash:
raise InvalidHeader( raise InvalidHeader(
height, "genesis header doesn't match: {} vs expected {}".format( height, f"genesis header doesn't match: {current_hash.decode()} "
current_hash.decode(), self.genesis_hash.decode()) f"vs expected {self.genesis_hash.decode()}")
)
return return
if header['prev_block_hash'] != previous_hash: if header['prev_block_hash'] != previous_hash:
@ -166,8 +165,7 @@ class BaseHeaders:
proof_of_work = self.get_proof_of_work(current_hash) proof_of_work = self.get_proof_of_work(current_hash)
if proof_of_work > target: if proof_of_work > target:
raise InvalidHeader( raise InvalidHeader(
height, "insufficient proof of work: {} vs target {}".format( height, f"insufficient proof of work: {proof_of_work.value} vs target {target.value}"
proof_of_work.value, target.value)
) )
async def repair(self): async def repair(self):

View file

@ -35,7 +35,7 @@ class LedgerRegistry(type):
if not (name == 'BaseLedger' and not bases): if not (name == 'BaseLedger' and not bases):
ledger_id = cls.get_id() ledger_id = cls.get_id()
assert ledger_id not in mcs.ledgers,\ assert ledger_id not in mcs.ledgers,\
'Ledger with id "{}" already registered.'.format(ledger_id) f'Ledger with id "{ledger_id}" already registered.'
mcs.ledgers[ledger_id] = cls mcs.ledgers[ledger_id] = cls
return cls return cls
@ -346,7 +346,7 @@ class BaseLedger(metaclass=LedgerRegistry):
) )
else: else:
raise IndexError("headers.connect() returned negative number ({})".format(added)) raise IndexError(f"headers.connect() returned negative number ({added})")
if height < 0: if height < 0:
raise IndexError( raise IndexError(

View file

@ -111,14 +111,14 @@ class DataToken(Token):
__slots__ = () __slots__ = ()
def __repr__(self): def __repr__(self):
return '"{}"'.format(hexlify(self.value)) return f'"{hexlify(self.value)}"'
class SmallIntegerToken(Token): class SmallIntegerToken(Token):
__slots__ = () __slots__ = ()
def __repr__(self): def __repr__(self):
return 'SmallIntegerToken({})'.format(self.value) return f'SmallIntegerToken({self.value})'
def token_producer(source): def token_producer(source):
@ -166,16 +166,16 @@ class Parser:
elif isinstance(opcode, PUSH_MANY): elif isinstance(opcode, PUSH_MANY):
self.consume_many_non_greedy() self.consume_many_non_greedy()
else: else:
raise ParseError("DataToken found but opcode was '{}'.".format(opcode)) raise ParseError(f"DataToken found but opcode was '{opcode}'.")
elif isinstance(token, SmallIntegerToken): elif isinstance(token, SmallIntegerToken):
if isinstance(opcode, SMALL_INTEGER): if isinstance(opcode, SMALL_INTEGER):
self.values[opcode.name] = token.value self.values[opcode.name] = token.value
else: else:
raise ParseError("SmallIntegerToken found but opcode was '{}'.".format(opcode)) raise ParseError(f"SmallIntegerToken found but opcode was '{opcode}'.")
elif token.value == opcode: elif token.value == opcode:
pass pass
else: else:
raise ParseError("Token is '{}' and opcode is '{}'.".format(token.value, opcode)) raise ParseError(f"Token is '{token.value}' and opcode is '{opcode}'.")
self.token_index += 1 self.token_index += 1
self.opcode_index += 1 self.opcode_index += 1
@ -243,7 +243,7 @@ class Parser:
elif isinstance(opcode, PUSH_SUBSCRIPT): elif isinstance(opcode, PUSH_SUBSCRIPT):
self.values[opcode.name] = Script.from_source_with_template(value, opcode.template) self.values[opcode.name] = Script.from_source_with_template(value, opcode.template)
else: else:
raise ParseError("Not a push single or subscript: {}".format(opcode)) raise ParseError(f"Not a push single or subscript: {opcode}")
class Template: class Template:
@ -331,7 +331,7 @@ class Script:
return return
except ParseError: except ParseError:
continue continue
raise ValueError('No matching templates for source: {}'.format(hexlify(self.source))) raise ValueError(f'No matching templates for source: {hexlify(self.source)}')
def generate(self): def generate(self):
self.source = self.template.generate(self._values) self.source = self.template.generate(self._values)

View file

@ -56,7 +56,7 @@ class TXORef:
@property @property
def id(self): def id(self):
return '{}:{}'.format(self.tx_ref.id, self.position) return f'{self.tx_ref.id}:{self.position}'
@property @property
def hash(self): def hash(self):

View file

@ -185,7 +185,7 @@ class Base58:
def char_value(cls, c): def char_value(cls, c):
val = cls.char_map.get(c) val = cls.char_map.get(c)
if val is None: if val is None:
raise Base58Error('invalid base 58 character "{}"'.format(c)) raise Base58Error(f'invalid base 58 character "{c}"')
return val return val
@classmethod @classmethod
@ -243,7 +243,7 @@ class Base58:
be_bytes = cls.decode(txt) be_bytes = cls.decode(txt)
result, check = be_bytes[:-4], be_bytes[-4:] result, check = be_bytes[:-4], be_bytes[-4:]
if check != hash_fn(result)[:4]: if check != hash_fn(result)[:4]:
raise Base58Error('invalid base 58 checksum for {}'.format(txt)) raise Base58Error(f'invalid base 58 checksum for {txt}')
return result return result
@classmethod @classmethod

View file

@ -78,12 +78,10 @@ class Prefetcher:
daemon_height = await self.daemon.height() daemon_height = await self.daemon.height()
behind = daemon_height - height behind = daemon_height - height
if behind > 0: if behind > 0:
self.logger.info('catching up to daemon height {:,d} ' self.logger.info(f'catching up to daemon height {daemon_height:,d} '
'({:,d} blocks behind)' f'({behind:,d} blocks behind)')
.format(daemon_height, behind))
else: else:
self.logger.info('caught up to daemon height {:,d}' self.logger.info(f'caught up to daemon height {daemon_height:,d}')
.format(daemon_height))
async def _prefetch_blocks(self): async def _prefetch_blocks(self):
"""Prefetch some blocks and put them on the queue. """Prefetch some blocks and put them on the queue.
@ -116,8 +114,7 @@ class Prefetcher:
# Special handling for genesis block # Special handling for genesis block
if first == 0: if first == 0:
blocks[0] = self.coin.genesis_block(blocks[0]) blocks[0] = self.coin.genesis_block(blocks[0])
self.logger.info('verified genesis block with hash {}' self.logger.info(f'verified genesis block with hash {hex_hashes[0]}')
.format(hex_hashes[0]))
# Update our recent average block size estimate # Update our recent average block size estimate
size = sum(len(block) for block in blocks) size = sum(len(block) for block in blocks)
@ -461,15 +458,14 @@ class BlockProcessor:
self.height -= 1 self.height -= 1
self.db.tx_counts.pop() self.db.tx_counts.pop()
self.logger.info('backed up to height {:,d}'.format(self.height)) self.logger.info(f'backed up to height {self.height:,d}')
def backup_txs(self, txs): def backup_txs(self, txs):
# Prevout values, in order down the block (coinbase first if present) # Prevout values, in order down the block (coinbase first if present)
# undo_info is in reverse block order # undo_info is in reverse block order
undo_info = self.db.read_undo_info(self.height) undo_info = self.db.read_undo_info(self.height)
if undo_info is None: if undo_info is None:
raise ChainError('no undo information found for height {:,d}' raise ChainError(f'no undo information found for height {self.height:,d}')
.format(self.height))
n = len(undo_info) n = len(undo_info)
# Use local vars for speed in the loops # Use local vars for speed in the loops

View file

@ -100,11 +100,9 @@ class Coin:
missing = [attr for attr in coin_req_attrs missing = [attr for attr in coin_req_attrs
if not hasattr(coin, attr)] if not hasattr(coin, attr)]
if missing: if missing:
raise CoinError('coin {} missing {} attributes' raise CoinError(f'coin {name} missing {missing} attributes')
.format(name, missing))
return coin return coin
raise CoinError('unknown coin {} and network {} combination' raise CoinError(f'unknown coin {name} and network {net} combination')
.format(name, net))
@classmethod @classmethod
def sanitize_url(cls, url): def sanitize_url(cls, url):
@ -112,9 +110,9 @@ class Coin:
url = url.strip().rstrip('/') url = url.strip().rstrip('/')
match = cls.RPC_URL_REGEX.match(url) match = cls.RPC_URL_REGEX.match(url)
if not match: if not match:
raise CoinError('invalid daemon URL: "{}"'.format(url)) raise CoinError(f'invalid daemon URL: "{url}"')
if match.groups()[1] is None: if match.groups()[1] is None:
url += ':{:d}'.format(cls.RPC_PORT) url += f':{cls.RPC_PORT:d}'
if not url.startswith('http://') and not url.startswith('https://'): if not url.startswith('http://') and not url.startswith('https://'):
url = 'http://' + url url = 'http://' + url
return url + '/' return url + '/'
@ -128,8 +126,7 @@ class Coin:
header = cls.block_header(block, 0) header = cls.block_header(block, 0)
header_hex_hash = hash_to_hex_str(cls.header_hash(header)) header_hex_hash = hash_to_hex_str(cls.header_hash(header))
if header_hex_hash != cls.GENESIS_HASH: if header_hex_hash != cls.GENESIS_HASH:
raise CoinError('genesis block has hash {} expected {}' raise CoinError(f'genesis block has hash {header_hex_hash} expected {cls.GENESIS_HASH}')
.format(header_hex_hash, cls.GENESIS_HASH))
return header + bytes(1) return header + bytes(1)
@ -202,7 +199,7 @@ class Coin:
if verbyte in cls.P2SH_VERBYTES: if verbyte in cls.P2SH_VERBYTES:
return ScriptPubKey.P2SH_script(hash160) return ScriptPubKey.P2SH_script(hash160)
raise CoinError('invalid address: {}'.format(address)) raise CoinError(f'invalid address: {address}')
@classmethod @classmethod
def privkey_WIF(cls, privkey_bytes, compressed): def privkey_WIF(cls, privkey_bytes, compressed):

View file

@ -422,8 +422,7 @@ class DB:
async def fs_block_hashes(self, height, count): async def fs_block_hashes(self, height, count):
headers_concat, headers_count = await self.read_headers(height, count) headers_concat, headers_count = await self.read_headers(height, count)
if headers_count != count: if headers_count != count:
raise self.DBError('only got {:,d} headers starting at {:,d}, not ' raise self.DBError(f'only got {headers_count:,d} headers starting at {height:,d}, not {count:,d}')
'{:,d}'.format(headers_count, height, count))
offset = 0 offset = 0
headers = [] headers = []
for n in range(count): for n in range(count):
@ -543,17 +542,15 @@ class DB:
raise self.DBError('failed reading state from DB') raise self.DBError('failed reading state from DB')
self.db_version = state['db_version'] self.db_version = state['db_version']
if self.db_version not in self.DB_VERSIONS: if self.db_version not in self.DB_VERSIONS:
raise self.DBError('your UTXO DB version is {} but this ' raise self.DBError(f'your UTXO DB version is {self.db_version} but this '
'software only handles versions {}' f'software only handles versions {self.DB_VERSIONS}')
.format(self.db_version, self.DB_VERSIONS))
# backwards compat # backwards compat
genesis_hash = state['genesis'] genesis_hash = state['genesis']
if isinstance(genesis_hash, bytes): if isinstance(genesis_hash, bytes):
genesis_hash = genesis_hash.decode() genesis_hash = genesis_hash.decode()
if genesis_hash != self.coin.GENESIS_HASH: if genesis_hash != self.coin.GENESIS_HASH:
raise self.DBError('DB genesis hash {} does not match coin {}' raise self.DBError(f'DB genesis hash {genesis_hash} does not '
.format(genesis_hash, f'match coin {self.coin.GENESIS_HASH}')
self.coin.GENESIS_HASH))
self.db_height = state['height'] self.db_height = state['height']
self.db_tx_count = state['tx_count'] self.db_tx_count = state['tx_count']
self.db_tip = state['tip'] self.db_tip = state['tip']
@ -567,17 +564,16 @@ class DB:
self.last_flush_tx_count = self.fs_tx_count self.last_flush_tx_count = self.fs_tx_count
# Log some stats # Log some stats
self.logger.info('DB version: {:d}'.format(self.db_version)) self.logger.info(f'DB version: {self.db_version:d}')
self.logger.info('coin: {}'.format(self.coin.NAME)) self.logger.info(f'coin: {self.coin.NAME}')
self.logger.info('network: {}'.format(self.coin.NET)) self.logger.info(f'network: {self.coin.NET}')
self.logger.info('height: {:,d}'.format(self.db_height)) self.logger.info(f'height: {self.db_height:,d}')
self.logger.info('tip: {}'.format(hash_to_hex_str(self.db_tip))) self.logger.info(f'tip: {hash_to_hex_str(self.db_tip)}')
self.logger.info('tx count: {:,d}'.format(self.db_tx_count)) self.logger.info(f'tx count: {self.db_tx_count:,d}')
if self.utxo_db.for_sync: if self.utxo_db.for_sync:
self.logger.info(f'flushing DB cache at {self.env.cache_MB:,d} MB') self.logger.info(f'flushing DB cache at {self.env.cache_MB:,d} MB')
if self.first_sync: if self.first_sync:
self.logger.info('sync time so far: {}' self.logger.info(f'sync time so far: {util.formatted_time(self.wall_time)}')
.format(util.formatted_time(self.wall_time)))
def write_utxo_state(self, batch): def write_utxo_state(self, batch):
"""Write (UTXO) state to the batch.""" """Write (UTXO) state to the batch."""

View file

@ -104,7 +104,7 @@ class Env:
def required(cls, envvar): def required(cls, envvar):
value = environ.get(envvar) value = environ.get(envvar)
if value is None: if value is None:
raise cls.Error('required envvar {} not set'.format(envvar)) raise cls.Error(f'required envvar {envvar} not set')
return value return value
@classmethod @classmethod
@ -115,8 +115,7 @@ class Env:
try: try:
return int(value) return int(value)
except Exception: except Exception:
raise cls.Error('cannot convert envvar {} value {} to an integer' raise cls.Error(f'cannot convert envvar {envvar} value {value} to an integer')
.format(envvar, value))
@classmethod @classmethod
def custom(cls, envvar, default, parse): def custom(cls, envvar, default, parse):
@ -126,15 +125,13 @@ class Env:
try: try:
return parse(value) return parse(value)
except Exception as e: except Exception as e:
raise cls.Error('cannot parse envvar {} value {}' raise cls.Error(f'cannot parse envvar {envvar} value {value}') from e
.format(envvar, value)) from e
@classmethod @classmethod
def obsolete(cls, envvars): def obsolete(cls, envvars):
bad = [envvar for envvar in envvars if environ.get(envvar)] bad = [envvar for envvar in envvars if environ.get(envvar)]
if bad: if bad:
raise cls.Error('remove obsolete environment variables {}' raise cls.Error(f'remove obsolete environment variables {bad}')
.format(bad))
def set_event_loop_policy(self): def set_event_loop_policy(self):
policy_name = self.default('EVENT_LOOP_POLICY', None) policy_name = self.default('EVENT_LOOP_POLICY', None)
@ -147,7 +144,7 @@ class Env:
loop_policy = uvloop.EventLoopPolicy() loop_policy = uvloop.EventLoopPolicy()
asyncio.set_event_loop_policy(loop_policy) asyncio.set_event_loop_policy(loop_policy)
return loop_policy return loop_policy
raise self.Error('unknown event loop policy "{}"'.format(policy_name)) raise self.Error(f'unknown event loop policy "{policy_name}"')
def cs_host(self, *, for_rpc): def cs_host(self, *, for_rpc):
"""Returns the 'host' argument to pass to asyncio's create_server """Returns the 'host' argument to pass to asyncio's create_server
@ -180,9 +177,8 @@ class Env:
# We give the DB 250 files; allow ElectrumX 100 for itself # We give the DB 250 files; allow ElectrumX 100 for itself
value = max(0, min(env_value, nofile_limit - 350)) value = max(0, min(env_value, nofile_limit - 350))
if value < env_value: if value < env_value:
self.logger.warning('lowered maximum sessions from {:,d} to {:,d} ' self.logger.warning(f'lowered maximum sessions from {env_value:,d} to {value:,d} '
'because your open file limit is {:,d}' f'because your open file limit is {nofile_limit:,d}')
.format(env_value, value, nofile_limit))
return value return value
def clearnet_identity(self): def clearnet_identity(self):
@ -198,12 +194,12 @@ class Env:
bad = (ip.is_multicast or ip.is_unspecified bad = (ip.is_multicast or ip.is_unspecified
or (ip.is_private and self.peer_announce)) or (ip.is_private and self.peer_announce))
if bad: if bad:
raise self.Error('"{}" is not a valid REPORT_HOST'.format(host)) raise self.Error(f'"{host}" is not a valid REPORT_HOST')
tcp_port = self.integer('REPORT_TCP_PORT', self.tcp_port) or None tcp_port = self.integer('REPORT_TCP_PORT', self.tcp_port) or None
ssl_port = self.integer('REPORT_SSL_PORT', self.ssl_port) or None ssl_port = self.integer('REPORT_SSL_PORT', self.ssl_port) or None
if tcp_port == ssl_port: if tcp_port == ssl_port:
raise self.Error('REPORT_TCP_PORT and REPORT_SSL_PORT ' raise self.Error('REPORT_TCP_PORT and REPORT_SSL_PORT '
'both resolve to {}'.format(tcp_port)) f'both resolve to {tcp_port}')
return NetIdentity( return NetIdentity(
host, host,
tcp_port, tcp_port,
@ -216,8 +212,7 @@ class Env:
if host is None: if host is None:
return None return None
if not host.endswith('.onion'): if not host.endswith('.onion'):
raise self.Error('tor host "{}" must end with ".onion"' raise self.Error(f'tor host "{host}" must end with ".onion"')
.format(host))
def port(port_kind): def port(port_kind):
"""Returns the clearnet identity port, if any and not zero, """Returns the clearnet identity port, if any and not zero,
@ -233,7 +228,7 @@ class Env:
port('ssl_port')) or None port('ssl_port')) or None
if tcp_port == ssl_port: if tcp_port == ssl_port:
raise self.Error('REPORT_TCP_PORT_TOR and REPORT_SSL_PORT_TOR ' raise self.Error('REPORT_TCP_PORT_TOR and REPORT_SSL_PORT_TOR '
'both resolve to {}'.format(tcp_port)) f'both resolve to {tcp_port}')
return NetIdentity( return NetIdentity(
host, host,

View file

@ -95,7 +95,7 @@ class Base58:
def char_value(c): def char_value(c):
val = Base58.cmap.get(c) val = Base58.cmap.get(c)
if val is None: if val is None:
raise Base58Error('invalid base 58 character "{}"'.format(c)) raise Base58Error(f'invalid base 58 character "{c}"')
return val return val
@staticmethod @staticmethod
@ -148,7 +148,7 @@ class Base58:
be_bytes = Base58.decode(txt) be_bytes = Base58.decode(txt)
result, check = be_bytes[:-4], be_bytes[-4:] result, check = be_bytes[:-4], be_bytes[-4:]
if check != hash_fn(result)[:4]: if check != hash_fn(result)[:4]:
raise Base58Error('invalid base 58 checksum for {}'.format(txt)) raise Base58Error(f'invalid base 58 checksum for {txt}')
return result return result
@staticmethod @staticmethod

View file

@ -261,7 +261,7 @@ class Peer:
parts = [self.host, 'v' + self.protocol_max] parts = [self.host, 'v' + self.protocol_max]
if self.pruning: if self.pruning:
parts.append('p{:d}'.format(self.pruning)) parts.append(f'p{self.pruning:d}')
for letter, port in (('s', self.ssl_port), ('t', self.tcp_port)): for letter, port in (('s', self.ssl_port), ('t', self.tcp_port)):
if port: if port:
parts.append(port_text(letter, port)) parts.append(port_text(letter, port))

View file

@ -51,13 +51,13 @@ class Enumeration:
if isinstance(x, tuple): if isinstance(x, tuple):
x, i = x x, i = x
if not isinstance(x, str): if not isinstance(x, str):
raise EnumError("enum name {} not a string".format(x)) raise EnumError(f"enum name {x} not a string")
if not isinstance(i, int): if not isinstance(i, int):
raise EnumError("enum value {} not an integer".format(i)) raise EnumError(f"enum value {i} not an integer")
if x in uniqueNames: if x in uniqueNames:
raise EnumError("enum name {} not unique".format(x)) raise EnumError(f"enum name {x} not unique")
if i in uniqueValues: if i in uniqueValues:
raise EnumError("enum value {} not unique".format(x)) raise EnumError(f"enum value {i} not unique")
uniqueNames.add(x) uniqueNames.add(x)
uniqueValues.add(i) uniqueValues.add(i)
lookup[x] = i lookup[x] = i
@ -69,7 +69,7 @@ class Enumeration:
def __getattr__(self, attr): def __getattr__(self, attr):
result = self.lookup.get(attr) result = self.lookup.get(attr)
if result is None: if result is None:
raise AttributeError('enumeration has no member {}'.format(attr)) raise AttributeError(f'enumeration has no member {attr}')
return result return result
def whatis(self, value): def whatis(self, value):
@ -194,7 +194,7 @@ class ScriptPubKey:
if not req_compressed: if not req_compressed:
return return
raise PubKeyError('uncompressed pubkeys are invalid') raise PubKeyError('uncompressed pubkeys are invalid')
raise PubKeyError('invalid pubkey {}'.format(pubkey)) raise PubKeyError(f'invalid pubkey {pubkey}')
@classmethod @classmethod
def pubkey_script(cls, pubkey): def pubkey_script(cls, pubkey):
@ -206,8 +206,7 @@ class ScriptPubKey:
"""Returns the script for a pay-to-multisig transaction.""" """Returns the script for a pay-to-multisig transaction."""
n = len(pubkeys) n = len(pubkeys)
if not 1 <= m <= n <= 15: if not 1 <= m <= n <= 15:
raise ScriptError('{:d} of {:d} multisig script not possible' raise ScriptError(f'{m:d} of {n:d} multisig script not possible')
.format(m, n))
for pubkey in pubkeys: for pubkey in pubkeys:
cls.validate_pubkey(pubkey, req_compressed=True) cls.validate_pubkey(pubkey, req_compressed=True)
# See https://bitcoin.org/en/developer-guide # See https://bitcoin.org/en/developer-guide
@ -273,11 +272,11 @@ class Script:
@classmethod @classmethod
def opcode_name(cls, opcode): def opcode_name(cls, opcode):
if OpCodes.OP_0 < opcode < OpCodes.OP_PUSHDATA1: if OpCodes.OP_0 < opcode < OpCodes.OP_PUSHDATA1:
return 'OP_{:d}'.format(opcode) return f'OP_{opcode:d}'
try: try:
return OpCodes.whatis(opcode) return OpCodes.whatis(opcode)
except KeyError: except KeyError:
return 'OP_UNKNOWN:{:d}'.format(opcode) return f'OP_UNKNOWN:{opcode:d}'
@classmethod @classmethod
def dump(cls, script): def dump(cls, script):
@ -287,5 +286,4 @@ class Script:
if data is None: if data is None:
print(name) print(name)
else: else:
print('{} {} ({:d} bytes)' print(f'{name} {data.hex()} ({len(data):d} bytes)')
.format(name, data.hex(), len(data)))

View file

@ -382,7 +382,7 @@ class SessionManager:
real_name: "bch.electrumx.cash t50001 s50002" for example real_name: "bch.electrumx.cash t50001 s50002" for example
""" """
await self.peer_mgr.add_localRPC_peer(real_name) await self.peer_mgr.add_localRPC_peer(real_name)
return "peer '{}' added".format(real_name) return f"peer '{real_name}' added"
async def rpc_disconnect(self, session_ids): async def rpc_disconnect(self, session_ids):
"""Disconnect sessions. """Disconnect sessions.
@ -511,17 +511,12 @@ class SessionManager:
self.logger.info(f'max session count: {self.env.max_sessions:,d}') self.logger.info(f'max session count: {self.env.max_sessions:,d}')
self.logger.info(f'session timeout: ' self.logger.info(f'session timeout: '
f'{self.env.session_timeout:,d} seconds') f'{self.env.session_timeout:,d} seconds')
self.logger.info('session bandwidth limit {:,d} bytes' self.logger.info(f'session bandwidth limit {self.env.bandwidth_limit:,d} bytes')
.format(self.env.bandwidth_limit)) self.logger.info(f'max response size {self.env.max_send:,d} bytes')
self.logger.info('max response size {:,d} bytes' self.logger.info(f'max subscriptions across all sessions: {self.max_subs:,d}')
.format(self.env.max_send)) self.logger.info(f'max subscriptions per session: {self.env.max_session_subs:,d}')
self.logger.info('max subscriptions across all sessions: {:,d}'
.format(self.max_subs))
self.logger.info('max subscriptions per session: {:,d}'
.format(self.env.max_session_subs))
if self.env.drop_client is not None: if self.env.drop_client is not None:
self.logger.info('drop clients matching: {}' self.logger.info(f'drop clients matching: {self.env.drop_client.pattern}')
.format(self.env.drop_client.pattern))
# Start notifications; initialize hsub_results # Start notifications; initialize hsub_results
await notifications.start(self.db.db_height, self._notify_sessions) await notifications.start(self.db.db_height, self._notify_sessions)
await self.start_other() await self.start_other()
@ -1079,7 +1074,7 @@ class ElectrumX(SessionBase):
major, minor = divmod(ni_version, 1000000) major, minor = divmod(ni_version, 1000000)
minor, revision = divmod(minor, 10000) minor, revision = divmod(minor, 10000)
revision //= 100 revision //= 100
daemon_version = '{:d}.{:d}.{:d}'.format(major, minor, revision) daemon_version = f'{major:d}.{minor:d}.{revision:d}'
for pair in [ for pair in [
('$SERVER_VERSION', torba.__version__), ('$SERVER_VERSION', torba.__version__),
('$DAEMON_VERSION', daemon_version), ('$DAEMON_VERSION', daemon_version),

View file

@ -19,7 +19,7 @@ def db_class(db_dir, name):
if db_class.__name__.lower() == name.lower(): if db_class.__name__.lower() == name.lower():
db_class.import_module() db_class.import_module()
return partial(db_class, db_dir) return partial(db_class, db_dir)
raise RuntimeError('unrecognised DB engine "{}"'.format(name)) raise RuntimeError(f'unrecognised DB engine "{name}"')
class Storage: class Storage:

View file

@ -60,8 +60,7 @@ class TxInput(namedtuple("TxInput", "prev_hash prev_idx script sequence")):
def __str__(self): def __str__(self):
script = self.script.hex() script = self.script.hex()
prev_hash = hash_to_hex_str(self.prev_hash) prev_hash = hash_to_hex_str(self.prev_hash)
return ("Input({}, {:d}, script={}, sequence={:d})" return (f"Input({prev_hash}, {self.prev_idx:d}, script={script}, sequence={self.sequence:d})")
.format(prev_hash, self.prev_idx, script, self.sequence))
def is_generation(self): def is_generation(self):
"""Test if an input is generation/coinbase like""" """Test if an input is generation/coinbase like"""
@ -473,8 +472,7 @@ class TxInputTokenPayStealth(
def __str__(self): def __str__(self):
script = self.script.hex() script = self.script.hex()
keyimage = bytes(self.keyimage).hex() keyimage = bytes(self.keyimage).hex()
return ("Input({}, {:d}, script={}, sequence={:d})" return (f"Input({keyimage}, {self.ringsize[1]:d}, script={script}, sequence={self.sequence:d})")
.format(keyimage, self.ringsize[1], script, self.sequence))
def is_generation(self): def is_generation(self):
return True return True
@ -518,8 +516,7 @@ class TxInputDcr(namedtuple("TxInput", "prev_hash prev_idx tree sequence")):
def __str__(self): def __str__(self):
prev_hash = hash_to_hex_str(self.prev_hash) prev_hash = hash_to_hex_str(self.prev_hash)
return ("Input({}, {:d}, tree={}, sequence={:d})" return (f"Input({prev_hash}, {self.prev_idx:d}, tree={self.tree}, sequence={self.sequence:d})")
.format(prev_hash, self.prev_idx, self.tree, self.sequence))
def is_generation(self): def is_generation(self):
"""Test if an input is generation/coinbase like""" """Test if an input is generation/coinbase like"""

View file

@ -94,7 +94,7 @@ def formatted_time(t, sep=' '):
parts.append(fmt.format(val)) parts.append(fmt.format(val))
t %= n t %= n
if len(parts) < 3: if len(parts) < 3:
parts.append('{:02d}s'.format(t)) parts.append(f'{t:02d}s')
return sep.join(parts) return sep.join(parts)
@ -182,7 +182,7 @@ class LogicalFile:
"""A logical binary file split across several separate files on disk.""" """A logical binary file split across several separate files on disk."""
def __init__(self, prefix, digits, file_size): def __init__(self, prefix, digits, file_size):
digit_fmt = '{' + ':0{:d}d'.format(digits) + '}' digit_fmt = f'{{:0{digits:d}d}}'
self.filename_fmt = prefix + digit_fmt self.filename_fmt = prefix + digit_fmt
self.file_size = file_size self.file_size = file_size