forked from LBRYCommunity/lbry-sdk
Convert some format() strings in lbry and torba to fstrings
This commit is contained in:
parent
e3cb5bfa07
commit
c61fae05d6
31 changed files with 100 additions and 130 deletions
|
@ -4117,7 +4117,7 @@ def get_loggly_query_string(installation_id):
|
|||
now = utils.now()
|
||||
yesterday = now - utils.timedelta(days=1)
|
||||
params = {
|
||||
'terms': 'json.installation_id:{}*'.format(installation_id[:SHORT_ID_LEN]),
|
||||
'terms': f'json.installation_id:{installation_id[:SHORT_ID_LEN]}*',
|
||||
'from': loggly_time_string(yesterday),
|
||||
'to': loggly_time_string(now)
|
||||
}
|
||||
|
|
|
@ -26,8 +26,7 @@ class ExchangeRate:
|
|||
self.ts = ts
|
||||
|
||||
def __repr__(self):
|
||||
out = "Currency pair:{}, spot:{}, ts:{}".format(
|
||||
self.currency_pair, self.spot, self.ts)
|
||||
out = f"Currency pair:{self.currency_pair}, spot:{self.spot}, ts:{self.ts}"
|
||||
return out
|
||||
|
||||
def as_dict(self):
|
||||
|
|
|
@ -30,7 +30,7 @@ def migrate_db(conf, start, end):
|
|||
elif current == 10:
|
||||
from .migrate10to11 import do_migration
|
||||
else:
|
||||
raise Exception("DB migration of version {} to {} is not available".format(current, current+1))
|
||||
raise Exception(f"DB migration of version {current} to {current+1} is not available")
|
||||
try:
|
||||
do_migration(conf)
|
||||
except Exception as err:
|
||||
|
|
|
@ -71,7 +71,7 @@ class StreamReflectorClient(asyncio.Protocol):
|
|||
raise ValueError("Need protocol version number!")
|
||||
server_version = int(response_dict['version'])
|
||||
if server_version != REFLECTOR_V2:
|
||||
raise ValueError("I can't handle protocol version {}!".format(server_version))
|
||||
raise ValueError(f"I can't handle protocol version {server_version}!")
|
||||
return
|
||||
|
||||
async def send_descriptor(self) -> typing.Tuple[bool, typing.List[str]]: # returns a list of needed blob hashes
|
||||
|
|
|
@ -50,12 +50,12 @@ def verify_proof(proof, root_hash, name):
|
|||
raise InvalidProofError("did not find the alleged child")
|
||||
if i == 0 and 'txhash' in proof and 'nOut' in proof and 'last takeover height' in proof:
|
||||
if len(proof['txhash']) != 64:
|
||||
raise InvalidProofError("txhash was invalid: {}".format(proof['txhash']))
|
||||
raise InvalidProofError(f"txhash was invalid: {proof['txhash']}")
|
||||
if not isinstance(proof['nOut'], int):
|
||||
raise InvalidProofError("nOut was invalid: {}".format(proof['nOut']))
|
||||
raise InvalidProofError(f"nOut was invalid: {proof['nOut']}")
|
||||
if not isinstance(proof['last takeover height'], int):
|
||||
raise InvalidProofError(
|
||||
'last takeover height was invalid: {}'.format(proof['last takeover height']))
|
||||
f"last takeover height was invalid: {proof['last takeover height']}")
|
||||
to_hash += get_hash_for_outpoint(
|
||||
binascii.unhexlify(proof['txhash'])[::-1],
|
||||
proof['nOut'],
|
||||
|
|
|
@ -7,9 +7,9 @@ def lbc_to_dewies(lbc: str) -> int:
|
|||
return coins_to_satoshis(lbc)
|
||||
except ValueError:
|
||||
raise ValueError(textwrap.dedent(
|
||||
"""
|
||||
f"""
|
||||
Decimal inputs require a value in the ones place and in the tenths place
|
||||
separated by a period. The value provided, '{}', is not of the correct
|
||||
separated by a period. The value provided, '{lbc}', is not of the correct
|
||||
format.
|
||||
|
||||
The following are examples of valid decimal inputs:
|
||||
|
@ -25,7 +25,7 @@ def lbc_to_dewies(lbc: str) -> int:
|
|||
83
|
||||
.456
|
||||
123.
|
||||
""".format(lbc)
|
||||
"""
|
||||
))
|
||||
|
||||
|
||||
|
|
|
@ -77,7 +77,7 @@ class LbryWalletManager(BaseWalletManager):
|
|||
while os.path.isfile(backup_path_template % i):
|
||||
i += 1
|
||||
os.rename(path, backup_path_template % i)
|
||||
temp_path = "{}.tmp.{}".format(path, os.getpid())
|
||||
temp_path = f"{path}.tmp.{os.getpid()}"
|
||||
with open(temp_path, "w") as f:
|
||||
f.write(migrated_json)
|
||||
f.flush()
|
||||
|
|
|
@ -48,8 +48,7 @@ class LBC(Coin):
|
|||
header = cls.block_header(block, 0)
|
||||
header_hex_hash = hash_to_hex_str(cls.header_hash(header))
|
||||
if header_hex_hash != cls.GENESIS_HASH:
|
||||
raise CoinError('genesis block has hash {} expected {}'
|
||||
.format(header_hex_hash, cls.GENESIS_HASH))
|
||||
raise CoinError(f'genesis block has hash {header_hex_hash} expected {cls.GENESIS_HASH}')
|
||||
|
||||
return block
|
||||
|
||||
|
|
|
@ -189,7 +189,7 @@ class SQLDB:
|
|||
constraints: Union[list, tuple]) -> Tuple[str, list]:
|
||||
columns, values = [], []
|
||||
for column, value in data.items():
|
||||
columns.append("{} = ?".format(column))
|
||||
columns.append(f"{column} = ?")
|
||||
values.append(value)
|
||||
values.extend(constraints)
|
||||
return f"UPDATE {table} SET {', '.join(columns)} WHERE {where}", values
|
||||
|
|
|
@ -29,7 +29,7 @@ def main():
|
|||
build_type = get_build_type(os.environ.get('TRAVIS_TAG', None))
|
||||
log.debug("setting build type=%s, build commit=%s", build_type, travis_commit)
|
||||
with open(build_type_path, 'w') as f:
|
||||
f.write("BUILD = \"{}\"\nBUILD_COMMIT = \"{}\"\n".format(build_type, travis_commit))
|
||||
f.write(f"BUILD = \"{build_type}\"\nBUILD_COMMIT = \"{travis_commit}\"\n")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -23,7 +23,7 @@ def get_time_accelerator(loop: asyncio.AbstractEventLoop,
|
|||
async def accelerate_time(seconds: float) -> None:
|
||||
nonlocal _time
|
||||
if seconds < 0:
|
||||
raise ValueError('Cannot go back in time ({} seconds)'.format(seconds))
|
||||
raise ValueError(f'Cannot go back in time ({seconds} seconds)')
|
||||
_time += seconds
|
||||
await past_events()
|
||||
await asyncio.sleep(0)
|
||||
|
|
|
@ -176,7 +176,7 @@ class TestFileListSorting(unittest.TestCase):
|
|||
'file_name=remember.mp3, points_paid=4.8',
|
||||
'file_name=day.tiff, points_paid=2.5'
|
||||
]
|
||||
format_result = lambda f: 'file_name={}, points_paid={}'.format(f['file_name'], f['points_paid'])
|
||||
format_result = lambda f: f"file_name={f['file_name']}, points_paid={f['points_paid']}"
|
||||
|
||||
sort_options = ['file_name,asc', 'points_paid,desc']
|
||||
file_list = yield f2d(self.test_daemon.jsonrpc_file_list(sort=sort_options))
|
||||
|
|
|
@ -33,7 +33,7 @@ class BasicAccountingTests(LedgerTestCase):
|
|||
.add_outputs([Output.pay_pubkey_hash(100, hash160)])
|
||||
await self.ledger.db.insert_transaction(tx)
|
||||
await self.ledger.db.save_transaction_io(
|
||||
tx, address, hash160, '{}:{}:'.format(tx.id, 1)
|
||||
tx, address, hash160, f'{tx.id}:1:'
|
||||
)
|
||||
self.assertEqual(await self.account.get_balance(), 100)
|
||||
|
||||
|
@ -41,7 +41,7 @@ class BasicAccountingTests(LedgerTestCase):
|
|||
.add_outputs([Output.pay_claim_name_pubkey_hash(100, 'foo', b'', hash160)])
|
||||
await self.ledger.db.insert_transaction(tx)
|
||||
await self.ledger.db.save_transaction_io(
|
||||
tx, address, hash160, '{}:{}:'.format(tx.id, 1)
|
||||
tx, address, hash160, f'{tx.id}:1:'
|
||||
)
|
||||
self.assertEqual(await self.account.get_balance(), 100) # claim names don't count towards balance
|
||||
self.assertEqual(await self.account.get_balance(include_claims=True), 200)
|
||||
|
@ -53,7 +53,7 @@ class BasicAccountingTests(LedgerTestCase):
|
|||
tx = Transaction(is_verified=True)\
|
||||
.add_outputs([Output.pay_pubkey_hash(100, hash160)])
|
||||
await self.ledger.db.save_transaction_io(
|
||||
'insert', tx, address, hash160, '{}:{}:'.format(tx.id, 1)
|
||||
'insert', tx, address, hash160, f'{tx.id}:1:'
|
||||
)
|
||||
|
||||
utxos = await self.account.get_utxos()
|
||||
|
@ -62,7 +62,7 @@ class BasicAccountingTests(LedgerTestCase):
|
|||
tx = Transaction(is_verified=True)\
|
||||
.add_inputs([Input.spend(utxos[0])])
|
||||
await self.ledger.db.save_transaction_io(
|
||||
'insert', tx, address, hash160, '{}:{}:'.format(tx.id, 1)
|
||||
'insert', tx, address, hash160, f'{tx.id}:1:'
|
||||
)
|
||||
self.assertEqual(await self.account.get_balance(include_claims=True), 0)
|
||||
|
||||
|
|
|
@ -278,7 +278,7 @@ class BaseAccount:
|
|||
seed, private_key, public_key = cls.keys_from_dict(ledger, d)
|
||||
name = d.get('name')
|
||||
if not name:
|
||||
name = 'Account #{}'.format(public_key.address)
|
||||
name = f'Account #{public_key.address}'
|
||||
return cls(
|
||||
ledger=ledger,
|
||||
wallet=wallet,
|
||||
|
|
|
@ -184,10 +184,10 @@ def query(select, **constraints) -> Tuple[str, Dict[str, Any]]:
|
|||
raise ValueError("order_by must be string or list")
|
||||
|
||||
if limit is not None:
|
||||
sql.append('LIMIT {}'.format(limit))
|
||||
sql.append(f'LIMIT {limit}')
|
||||
|
||||
if offset is not None:
|
||||
sql.append('OFFSET {}'.format(offset))
|
||||
sql.append(f'OFFSET {offset}')
|
||||
|
||||
return ' '.join(sql), values
|
||||
|
||||
|
@ -273,7 +273,7 @@ class SQLiteMixin:
|
|||
constraints: Union[list, tuple]) -> Tuple[str, list]:
|
||||
columns, values = [], []
|
||||
for column, value in data.items():
|
||||
columns.append("{} = ?".format(column))
|
||||
columns.append(f"{column} = ?")
|
||||
values.append(value)
|
||||
values.extend(constraints)
|
||||
sql = "UPDATE {} SET {} WHERE {}".format(
|
||||
|
|
|
@ -144,9 +144,8 @@ class BaseHeaders:
|
|||
if previous_hash is None:
|
||||
if self.genesis_hash is not None and self.genesis_hash != current_hash:
|
||||
raise InvalidHeader(
|
||||
height, "genesis header doesn't match: {} vs expected {}".format(
|
||||
current_hash.decode(), self.genesis_hash.decode())
|
||||
)
|
||||
height, f"genesis header doesn't match: {current_hash.decode()} "
|
||||
f"vs expected {self.genesis_hash.decode()}")
|
||||
return
|
||||
|
||||
if header['prev_block_hash'] != previous_hash:
|
||||
|
@ -166,8 +165,7 @@ class BaseHeaders:
|
|||
proof_of_work = self.get_proof_of_work(current_hash)
|
||||
if proof_of_work > target:
|
||||
raise InvalidHeader(
|
||||
height, "insufficient proof of work: {} vs target {}".format(
|
||||
proof_of_work.value, target.value)
|
||||
height, f"insufficient proof of work: {proof_of_work.value} vs target {target.value}"
|
||||
)
|
||||
|
||||
async def repair(self):
|
||||
|
|
|
@ -35,7 +35,7 @@ class LedgerRegistry(type):
|
|||
if not (name == 'BaseLedger' and not bases):
|
||||
ledger_id = cls.get_id()
|
||||
assert ledger_id not in mcs.ledgers,\
|
||||
'Ledger with id "{}" already registered.'.format(ledger_id)
|
||||
f'Ledger with id "{ledger_id}" already registered.'
|
||||
mcs.ledgers[ledger_id] = cls
|
||||
return cls
|
||||
|
||||
|
@ -346,7 +346,7 @@ class BaseLedger(metaclass=LedgerRegistry):
|
|||
)
|
||||
|
||||
else:
|
||||
raise IndexError("headers.connect() returned negative number ({})".format(added))
|
||||
raise IndexError(f"headers.connect() returned negative number ({added})")
|
||||
|
||||
if height < 0:
|
||||
raise IndexError(
|
||||
|
|
|
@ -111,14 +111,14 @@ class DataToken(Token):
|
|||
__slots__ = ()
|
||||
|
||||
def __repr__(self):
|
||||
return '"{}"'.format(hexlify(self.value))
|
||||
return f'"{hexlify(self.value)}"'
|
||||
|
||||
|
||||
class SmallIntegerToken(Token):
|
||||
__slots__ = ()
|
||||
|
||||
def __repr__(self):
|
||||
return 'SmallIntegerToken({})'.format(self.value)
|
||||
return f'SmallIntegerToken({self.value})'
|
||||
|
||||
|
||||
def token_producer(source):
|
||||
|
@ -166,16 +166,16 @@ class Parser:
|
|||
elif isinstance(opcode, PUSH_MANY):
|
||||
self.consume_many_non_greedy()
|
||||
else:
|
||||
raise ParseError("DataToken found but opcode was '{}'.".format(opcode))
|
||||
raise ParseError(f"DataToken found but opcode was '{opcode}'.")
|
||||
elif isinstance(token, SmallIntegerToken):
|
||||
if isinstance(opcode, SMALL_INTEGER):
|
||||
self.values[opcode.name] = token.value
|
||||
else:
|
||||
raise ParseError("SmallIntegerToken found but opcode was '{}'.".format(opcode))
|
||||
raise ParseError(f"SmallIntegerToken found but opcode was '{opcode}'.")
|
||||
elif token.value == opcode:
|
||||
pass
|
||||
else:
|
||||
raise ParseError("Token is '{}' and opcode is '{}'.".format(token.value, opcode))
|
||||
raise ParseError(f"Token is '{token.value}' and opcode is '{opcode}'.")
|
||||
self.token_index += 1
|
||||
self.opcode_index += 1
|
||||
|
||||
|
@ -243,7 +243,7 @@ class Parser:
|
|||
elif isinstance(opcode, PUSH_SUBSCRIPT):
|
||||
self.values[opcode.name] = Script.from_source_with_template(value, opcode.template)
|
||||
else:
|
||||
raise ParseError("Not a push single or subscript: {}".format(opcode))
|
||||
raise ParseError(f"Not a push single or subscript: {opcode}")
|
||||
|
||||
|
||||
class Template:
|
||||
|
@ -331,7 +331,7 @@ class Script:
|
|||
return
|
||||
except ParseError:
|
||||
continue
|
||||
raise ValueError('No matching templates for source: {}'.format(hexlify(self.source)))
|
||||
raise ValueError(f'No matching templates for source: {hexlify(self.source)}')
|
||||
|
||||
def generate(self):
|
||||
self.source = self.template.generate(self._values)
|
||||
|
|
|
@ -56,7 +56,7 @@ class TXORef:
|
|||
|
||||
@property
|
||||
def id(self):
|
||||
return '{}:{}'.format(self.tx_ref.id, self.position)
|
||||
return f'{self.tx_ref.id}:{self.position}'
|
||||
|
||||
@property
|
||||
def hash(self):
|
||||
|
|
|
@ -185,7 +185,7 @@ class Base58:
|
|||
def char_value(cls, c):
|
||||
val = cls.char_map.get(c)
|
||||
if val is None:
|
||||
raise Base58Error('invalid base 58 character "{}"'.format(c))
|
||||
raise Base58Error(f'invalid base 58 character "{c}"')
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
|
@ -243,7 +243,7 @@ class Base58:
|
|||
be_bytes = cls.decode(txt)
|
||||
result, check = be_bytes[:-4], be_bytes[-4:]
|
||||
if check != hash_fn(result)[:4]:
|
||||
raise Base58Error('invalid base 58 checksum for {}'.format(txt))
|
||||
raise Base58Error(f'invalid base 58 checksum for {txt}')
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -78,12 +78,10 @@ class Prefetcher:
|
|||
daemon_height = await self.daemon.height()
|
||||
behind = daemon_height - height
|
||||
if behind > 0:
|
||||
self.logger.info('catching up to daemon height {:,d} '
|
||||
'({:,d} blocks behind)'
|
||||
.format(daemon_height, behind))
|
||||
self.logger.info(f'catching up to daemon height {daemon_height:,d} '
|
||||
f'({behind:,d} blocks behind)')
|
||||
else:
|
||||
self.logger.info('caught up to daemon height {:,d}'
|
||||
.format(daemon_height))
|
||||
self.logger.info(f'caught up to daemon height {daemon_height:,d}')
|
||||
|
||||
async def _prefetch_blocks(self):
|
||||
"""Prefetch some blocks and put them on the queue.
|
||||
|
@ -116,8 +114,7 @@ class Prefetcher:
|
|||
# Special handling for genesis block
|
||||
if first == 0:
|
||||
blocks[0] = self.coin.genesis_block(blocks[0])
|
||||
self.logger.info('verified genesis block with hash {}'
|
||||
.format(hex_hashes[0]))
|
||||
self.logger.info(f'verified genesis block with hash {hex_hashes[0]}')
|
||||
|
||||
# Update our recent average block size estimate
|
||||
size = sum(len(block) for block in blocks)
|
||||
|
@ -461,15 +458,14 @@ class BlockProcessor:
|
|||
self.height -= 1
|
||||
self.db.tx_counts.pop()
|
||||
|
||||
self.logger.info('backed up to height {:,d}'.format(self.height))
|
||||
self.logger.info(f'backed up to height {self.height:,d}')
|
||||
|
||||
def backup_txs(self, txs):
|
||||
# Prevout values, in order down the block (coinbase first if present)
|
||||
# undo_info is in reverse block order
|
||||
undo_info = self.db.read_undo_info(self.height)
|
||||
if undo_info is None:
|
||||
raise ChainError('no undo information found for height {:,d}'
|
||||
.format(self.height))
|
||||
raise ChainError(f'no undo information found for height {self.height:,d}')
|
||||
n = len(undo_info)
|
||||
|
||||
# Use local vars for speed in the loops
|
||||
|
|
|
@ -100,11 +100,9 @@ class Coin:
|
|||
missing = [attr for attr in coin_req_attrs
|
||||
if not hasattr(coin, attr)]
|
||||
if missing:
|
||||
raise CoinError('coin {} missing {} attributes'
|
||||
.format(name, missing))
|
||||
raise CoinError(f'coin {name} missing {missing} attributes')
|
||||
return coin
|
||||
raise CoinError('unknown coin {} and network {} combination'
|
||||
.format(name, net))
|
||||
raise CoinError(f'unknown coin {name} and network {net} combination')
|
||||
|
||||
@classmethod
|
||||
def sanitize_url(cls, url):
|
||||
|
@ -112,9 +110,9 @@ class Coin:
|
|||
url = url.strip().rstrip('/')
|
||||
match = cls.RPC_URL_REGEX.match(url)
|
||||
if not match:
|
||||
raise CoinError('invalid daemon URL: "{}"'.format(url))
|
||||
raise CoinError(f'invalid daemon URL: "{url}"')
|
||||
if match.groups()[1] is None:
|
||||
url += ':{:d}'.format(cls.RPC_PORT)
|
||||
url += f':{cls.RPC_PORT:d}'
|
||||
if not url.startswith('http://') and not url.startswith('https://'):
|
||||
url = 'http://' + url
|
||||
return url + '/'
|
||||
|
@ -128,8 +126,7 @@ class Coin:
|
|||
header = cls.block_header(block, 0)
|
||||
header_hex_hash = hash_to_hex_str(cls.header_hash(header))
|
||||
if header_hex_hash != cls.GENESIS_HASH:
|
||||
raise CoinError('genesis block has hash {} expected {}'
|
||||
.format(header_hex_hash, cls.GENESIS_HASH))
|
||||
raise CoinError(f'genesis block has hash {header_hex_hash} expected {cls.GENESIS_HASH}')
|
||||
|
||||
return header + bytes(1)
|
||||
|
||||
|
@ -202,7 +199,7 @@ class Coin:
|
|||
if verbyte in cls.P2SH_VERBYTES:
|
||||
return ScriptPubKey.P2SH_script(hash160)
|
||||
|
||||
raise CoinError('invalid address: {}'.format(address))
|
||||
raise CoinError(f'invalid address: {address}')
|
||||
|
||||
@classmethod
|
||||
def privkey_WIF(cls, privkey_bytes, compressed):
|
||||
|
|
|
@ -422,8 +422,7 @@ class DB:
|
|||
async def fs_block_hashes(self, height, count):
|
||||
headers_concat, headers_count = await self.read_headers(height, count)
|
||||
if headers_count != count:
|
||||
raise self.DBError('only got {:,d} headers starting at {:,d}, not '
|
||||
'{:,d}'.format(headers_count, height, count))
|
||||
raise self.DBError(f'only got {headers_count:,d} headers starting at {height:,d}, not {count:,d}')
|
||||
offset = 0
|
||||
headers = []
|
||||
for n in range(count):
|
||||
|
@ -543,17 +542,15 @@ class DB:
|
|||
raise self.DBError('failed reading state from DB')
|
||||
self.db_version = state['db_version']
|
||||
if self.db_version not in self.DB_VERSIONS:
|
||||
raise self.DBError('your UTXO DB version is {} but this '
|
||||
'software only handles versions {}'
|
||||
.format(self.db_version, self.DB_VERSIONS))
|
||||
raise self.DBError(f'your UTXO DB version is {self.db_version} but this '
|
||||
f'software only handles versions {self.DB_VERSIONS}')
|
||||
# backwards compat
|
||||
genesis_hash = state['genesis']
|
||||
if isinstance(genesis_hash, bytes):
|
||||
genesis_hash = genesis_hash.decode()
|
||||
if genesis_hash != self.coin.GENESIS_HASH:
|
||||
raise self.DBError('DB genesis hash {} does not match coin {}'
|
||||
.format(genesis_hash,
|
||||
self.coin.GENESIS_HASH))
|
||||
raise self.DBError(f'DB genesis hash {genesis_hash} does not '
|
||||
f'match coin {self.coin.GENESIS_HASH}')
|
||||
self.db_height = state['height']
|
||||
self.db_tx_count = state['tx_count']
|
||||
self.db_tip = state['tip']
|
||||
|
@ -567,17 +564,16 @@ class DB:
|
|||
self.last_flush_tx_count = self.fs_tx_count
|
||||
|
||||
# Log some stats
|
||||
self.logger.info('DB version: {:d}'.format(self.db_version))
|
||||
self.logger.info('coin: {}'.format(self.coin.NAME))
|
||||
self.logger.info('network: {}'.format(self.coin.NET))
|
||||
self.logger.info('height: {:,d}'.format(self.db_height))
|
||||
self.logger.info('tip: {}'.format(hash_to_hex_str(self.db_tip)))
|
||||
self.logger.info('tx count: {:,d}'.format(self.db_tx_count))
|
||||
self.logger.info(f'DB version: {self.db_version:d}')
|
||||
self.logger.info(f'coin: {self.coin.NAME}')
|
||||
self.logger.info(f'network: {self.coin.NET}')
|
||||
self.logger.info(f'height: {self.db_height:,d}')
|
||||
self.logger.info(f'tip: {hash_to_hex_str(self.db_tip)}')
|
||||
self.logger.info(f'tx count: {self.db_tx_count:,d}')
|
||||
if self.utxo_db.for_sync:
|
||||
self.logger.info(f'flushing DB cache at {self.env.cache_MB:,d} MB')
|
||||
if self.first_sync:
|
||||
self.logger.info('sync time so far: {}'
|
||||
.format(util.formatted_time(self.wall_time)))
|
||||
self.logger.info(f'sync time so far: {util.formatted_time(self.wall_time)}')
|
||||
|
||||
def write_utxo_state(self, batch):
|
||||
"""Write (UTXO) state to the batch."""
|
||||
|
|
|
@ -104,7 +104,7 @@ class Env:
|
|||
def required(cls, envvar):
|
||||
value = environ.get(envvar)
|
||||
if value is None:
|
||||
raise cls.Error('required envvar {} not set'.format(envvar))
|
||||
raise cls.Error(f'required envvar {envvar} not set')
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
|
@ -115,8 +115,7 @@ class Env:
|
|||
try:
|
||||
return int(value)
|
||||
except Exception:
|
||||
raise cls.Error('cannot convert envvar {} value {} to an integer'
|
||||
.format(envvar, value))
|
||||
raise cls.Error(f'cannot convert envvar {envvar} value {value} to an integer')
|
||||
|
||||
@classmethod
|
||||
def custom(cls, envvar, default, parse):
|
||||
|
@ -126,15 +125,13 @@ class Env:
|
|||
try:
|
||||
return parse(value)
|
||||
except Exception as e:
|
||||
raise cls.Error('cannot parse envvar {} value {}'
|
||||
.format(envvar, value)) from e
|
||||
raise cls.Error(f'cannot parse envvar {envvar} value {value}') from e
|
||||
|
||||
@classmethod
|
||||
def obsolete(cls, envvars):
|
||||
bad = [envvar for envvar in envvars if environ.get(envvar)]
|
||||
if bad:
|
||||
raise cls.Error('remove obsolete environment variables {}'
|
||||
.format(bad))
|
||||
raise cls.Error(f'remove obsolete environment variables {bad}')
|
||||
|
||||
def set_event_loop_policy(self):
|
||||
policy_name = self.default('EVENT_LOOP_POLICY', None)
|
||||
|
@ -147,7 +144,7 @@ class Env:
|
|||
loop_policy = uvloop.EventLoopPolicy()
|
||||
asyncio.set_event_loop_policy(loop_policy)
|
||||
return loop_policy
|
||||
raise self.Error('unknown event loop policy "{}"'.format(policy_name))
|
||||
raise self.Error(f'unknown event loop policy "{policy_name}"')
|
||||
|
||||
def cs_host(self, *, for_rpc):
|
||||
"""Returns the 'host' argument to pass to asyncio's create_server
|
||||
|
@ -180,9 +177,8 @@ class Env:
|
|||
# We give the DB 250 files; allow ElectrumX 100 for itself
|
||||
value = max(0, min(env_value, nofile_limit - 350))
|
||||
if value < env_value:
|
||||
self.logger.warning('lowered maximum sessions from {:,d} to {:,d} '
|
||||
'because your open file limit is {:,d}'
|
||||
.format(env_value, value, nofile_limit))
|
||||
self.logger.warning(f'lowered maximum sessions from {env_value:,d} to {value:,d} '
|
||||
f'because your open file limit is {nofile_limit:,d}')
|
||||
return value
|
||||
|
||||
def clearnet_identity(self):
|
||||
|
@ -198,12 +194,12 @@ class Env:
|
|||
bad = (ip.is_multicast or ip.is_unspecified
|
||||
or (ip.is_private and self.peer_announce))
|
||||
if bad:
|
||||
raise self.Error('"{}" is not a valid REPORT_HOST'.format(host))
|
||||
raise self.Error(f'"{host}" is not a valid REPORT_HOST')
|
||||
tcp_port = self.integer('REPORT_TCP_PORT', self.tcp_port) or None
|
||||
ssl_port = self.integer('REPORT_SSL_PORT', self.ssl_port) or None
|
||||
if tcp_port == ssl_port:
|
||||
raise self.Error('REPORT_TCP_PORT and REPORT_SSL_PORT '
|
||||
'both resolve to {}'.format(tcp_port))
|
||||
f'both resolve to {tcp_port}')
|
||||
return NetIdentity(
|
||||
host,
|
||||
tcp_port,
|
||||
|
@ -216,8 +212,7 @@ class Env:
|
|||
if host is None:
|
||||
return None
|
||||
if not host.endswith('.onion'):
|
||||
raise self.Error('tor host "{}" must end with ".onion"'
|
||||
.format(host))
|
||||
raise self.Error(f'tor host "{host}" must end with ".onion"')
|
||||
|
||||
def port(port_kind):
|
||||
"""Returns the clearnet identity port, if any and not zero,
|
||||
|
@ -233,7 +228,7 @@ class Env:
|
|||
port('ssl_port')) or None
|
||||
if tcp_port == ssl_port:
|
||||
raise self.Error('REPORT_TCP_PORT_TOR and REPORT_SSL_PORT_TOR '
|
||||
'both resolve to {}'.format(tcp_port))
|
||||
f'both resolve to {tcp_port}')
|
||||
|
||||
return NetIdentity(
|
||||
host,
|
||||
|
|
|
@ -95,7 +95,7 @@ class Base58:
|
|||
def char_value(c):
|
||||
val = Base58.cmap.get(c)
|
||||
if val is None:
|
||||
raise Base58Error('invalid base 58 character "{}"'.format(c))
|
||||
raise Base58Error(f'invalid base 58 character "{c}"')
|
||||
return val
|
||||
|
||||
@staticmethod
|
||||
|
@ -148,7 +148,7 @@ class Base58:
|
|||
be_bytes = Base58.decode(txt)
|
||||
result, check = be_bytes[:-4], be_bytes[-4:]
|
||||
if check != hash_fn(result)[:4]:
|
||||
raise Base58Error('invalid base 58 checksum for {}'.format(txt))
|
||||
raise Base58Error(f'invalid base 58 checksum for {txt}')
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
|
|
|
@ -261,7 +261,7 @@ class Peer:
|
|||
|
||||
parts = [self.host, 'v' + self.protocol_max]
|
||||
if self.pruning:
|
||||
parts.append('p{:d}'.format(self.pruning))
|
||||
parts.append(f'p{self.pruning:d}')
|
||||
for letter, port in (('s', self.ssl_port), ('t', self.tcp_port)):
|
||||
if port:
|
||||
parts.append(port_text(letter, port))
|
||||
|
|
|
@ -51,13 +51,13 @@ class Enumeration:
|
|||
if isinstance(x, tuple):
|
||||
x, i = x
|
||||
if not isinstance(x, str):
|
||||
raise EnumError("enum name {} not a string".format(x))
|
||||
raise EnumError(f"enum name {x} not a string")
|
||||
if not isinstance(i, int):
|
||||
raise EnumError("enum value {} not an integer".format(i))
|
||||
raise EnumError(f"enum value {i} not an integer")
|
||||
if x in uniqueNames:
|
||||
raise EnumError("enum name {} not unique".format(x))
|
||||
raise EnumError(f"enum name {x} not unique")
|
||||
if i in uniqueValues:
|
||||
raise EnumError("enum value {} not unique".format(x))
|
||||
raise EnumError(f"enum value {i} not unique")
|
||||
uniqueNames.add(x)
|
||||
uniqueValues.add(i)
|
||||
lookup[x] = i
|
||||
|
@ -69,7 +69,7 @@ class Enumeration:
|
|||
def __getattr__(self, attr):
|
||||
result = self.lookup.get(attr)
|
||||
if result is None:
|
||||
raise AttributeError('enumeration has no member {}'.format(attr))
|
||||
raise AttributeError(f'enumeration has no member {attr}')
|
||||
return result
|
||||
|
||||
def whatis(self, value):
|
||||
|
@ -194,7 +194,7 @@ class ScriptPubKey:
|
|||
if not req_compressed:
|
||||
return
|
||||
raise PubKeyError('uncompressed pubkeys are invalid')
|
||||
raise PubKeyError('invalid pubkey {}'.format(pubkey))
|
||||
raise PubKeyError(f'invalid pubkey {pubkey}')
|
||||
|
||||
@classmethod
|
||||
def pubkey_script(cls, pubkey):
|
||||
|
@ -206,8 +206,7 @@ class ScriptPubKey:
|
|||
"""Returns the script for a pay-to-multisig transaction."""
|
||||
n = len(pubkeys)
|
||||
if not 1 <= m <= n <= 15:
|
||||
raise ScriptError('{:d} of {:d} multisig script not possible'
|
||||
.format(m, n))
|
||||
raise ScriptError(f'{m:d} of {n:d} multisig script not possible')
|
||||
for pubkey in pubkeys:
|
||||
cls.validate_pubkey(pubkey, req_compressed=True)
|
||||
# See https://bitcoin.org/en/developer-guide
|
||||
|
@ -273,11 +272,11 @@ class Script:
|
|||
@classmethod
|
||||
def opcode_name(cls, opcode):
|
||||
if OpCodes.OP_0 < opcode < OpCodes.OP_PUSHDATA1:
|
||||
return 'OP_{:d}'.format(opcode)
|
||||
return f'OP_{opcode:d}'
|
||||
try:
|
||||
return OpCodes.whatis(opcode)
|
||||
except KeyError:
|
||||
return 'OP_UNKNOWN:{:d}'.format(opcode)
|
||||
return f'OP_UNKNOWN:{opcode:d}'
|
||||
|
||||
@classmethod
|
||||
def dump(cls, script):
|
||||
|
@ -287,5 +286,4 @@ class Script:
|
|||
if data is None:
|
||||
print(name)
|
||||
else:
|
||||
print('{} {} ({:d} bytes)'
|
||||
.format(name, data.hex(), len(data)))
|
||||
print(f'{name} {data.hex()} ({len(data):d} bytes)')
|
||||
|
|
|
@ -382,7 +382,7 @@ class SessionManager:
|
|||
real_name: "bch.electrumx.cash t50001 s50002" for example
|
||||
"""
|
||||
await self.peer_mgr.add_localRPC_peer(real_name)
|
||||
return "peer '{}' added".format(real_name)
|
||||
return f"peer '{real_name}' added"
|
||||
|
||||
async def rpc_disconnect(self, session_ids):
|
||||
"""Disconnect sessions.
|
||||
|
@ -511,17 +511,12 @@ class SessionManager:
|
|||
self.logger.info(f'max session count: {self.env.max_sessions:,d}')
|
||||
self.logger.info(f'session timeout: '
|
||||
f'{self.env.session_timeout:,d} seconds')
|
||||
self.logger.info('session bandwidth limit {:,d} bytes'
|
||||
.format(self.env.bandwidth_limit))
|
||||
self.logger.info('max response size {:,d} bytes'
|
||||
.format(self.env.max_send))
|
||||
self.logger.info('max subscriptions across all sessions: {:,d}'
|
||||
.format(self.max_subs))
|
||||
self.logger.info('max subscriptions per session: {:,d}'
|
||||
.format(self.env.max_session_subs))
|
||||
self.logger.info(f'session bandwidth limit {self.env.bandwidth_limit:,d} bytes')
|
||||
self.logger.info(f'max response size {self.env.max_send:,d} bytes')
|
||||
self.logger.info(f'max subscriptions across all sessions: {self.max_subs:,d}')
|
||||
self.logger.info(f'max subscriptions per session: {self.env.max_session_subs:,d}')
|
||||
if self.env.drop_client is not None:
|
||||
self.logger.info('drop clients matching: {}'
|
||||
.format(self.env.drop_client.pattern))
|
||||
self.logger.info(f'drop clients matching: {self.env.drop_client.pattern}')
|
||||
# Start notifications; initialize hsub_results
|
||||
await notifications.start(self.db.db_height, self._notify_sessions)
|
||||
await self.start_other()
|
||||
|
@ -1079,7 +1074,7 @@ class ElectrumX(SessionBase):
|
|||
major, minor = divmod(ni_version, 1000000)
|
||||
minor, revision = divmod(minor, 10000)
|
||||
revision //= 100
|
||||
daemon_version = '{:d}.{:d}.{:d}'.format(major, minor, revision)
|
||||
daemon_version = f'{major:d}.{minor:d}.{revision:d}'
|
||||
for pair in [
|
||||
('$SERVER_VERSION', torba.__version__),
|
||||
('$DAEMON_VERSION', daemon_version),
|
||||
|
|
|
@ -19,7 +19,7 @@ def db_class(db_dir, name):
|
|||
if db_class.__name__.lower() == name.lower():
|
||||
db_class.import_module()
|
||||
return partial(db_class, db_dir)
|
||||
raise RuntimeError('unrecognised DB engine "{}"'.format(name))
|
||||
raise RuntimeError(f'unrecognised DB engine "{name}"')
|
||||
|
||||
|
||||
class Storage:
|
||||
|
|
|
@ -60,8 +60,7 @@ class TxInput(namedtuple("TxInput", "prev_hash prev_idx script sequence")):
|
|||
def __str__(self):
|
||||
script = self.script.hex()
|
||||
prev_hash = hash_to_hex_str(self.prev_hash)
|
||||
return ("Input({}, {:d}, script={}, sequence={:d})"
|
||||
.format(prev_hash, self.prev_idx, script, self.sequence))
|
||||
return (f"Input({prev_hash}, {self.prev_idx:d}, script={script}, sequence={self.sequence:d})")
|
||||
|
||||
def is_generation(self):
|
||||
"""Test if an input is generation/coinbase like"""
|
||||
|
@ -473,8 +472,7 @@ class TxInputTokenPayStealth(
|
|||
def __str__(self):
|
||||
script = self.script.hex()
|
||||
keyimage = bytes(self.keyimage).hex()
|
||||
return ("Input({}, {:d}, script={}, sequence={:d})"
|
||||
.format(keyimage, self.ringsize[1], script, self.sequence))
|
||||
return (f"Input({keyimage}, {self.ringsize[1]:d}, script={script}, sequence={self.sequence:d})")
|
||||
|
||||
def is_generation(self):
|
||||
return True
|
||||
|
@ -518,8 +516,7 @@ class TxInputDcr(namedtuple("TxInput", "prev_hash prev_idx tree sequence")):
|
|||
|
||||
def __str__(self):
|
||||
prev_hash = hash_to_hex_str(self.prev_hash)
|
||||
return ("Input({}, {:d}, tree={}, sequence={:d})"
|
||||
.format(prev_hash, self.prev_idx, self.tree, self.sequence))
|
||||
return (f"Input({prev_hash}, {self.prev_idx:d}, tree={self.tree}, sequence={self.sequence:d})")
|
||||
|
||||
def is_generation(self):
|
||||
"""Test if an input is generation/coinbase like"""
|
||||
|
|
|
@ -94,7 +94,7 @@ def formatted_time(t, sep=' '):
|
|||
parts.append(fmt.format(val))
|
||||
t %= n
|
||||
if len(parts) < 3:
|
||||
parts.append('{:02d}s'.format(t))
|
||||
parts.append(f'{t:02d}s')
|
||||
return sep.join(parts)
|
||||
|
||||
|
||||
|
@ -182,7 +182,7 @@ class LogicalFile:
|
|||
"""A logical binary file split across several separate files on disk."""
|
||||
|
||||
def __init__(self, prefix, digits, file_size):
|
||||
digit_fmt = '{' + ':0{:d}d'.format(digits) + '}'
|
||||
digit_fmt = f'{{:0{digits:d}d}}'
|
||||
self.filename_fmt = prefix + digit_fmt
|
||||
self.file_size = file_size
|
||||
|
||||
|
|
Loading…
Reference in a new issue