py2(iteritems, itervalues) -> py3(items, values)

This commit is contained in:
Lex Berezhny 2018-07-21 17:11:44 -04:00 committed by Jack Robison
parent fbdbcc8070
commit ba80c0e594
No known key found for this signature in database
GPG key ID: DF25C68FE0239BB2
12 changed files with 23 additions and 23 deletions

View file

@ -340,7 +340,7 @@ class Config(object):
return self.get_current_settings_dict().__repr__()
def __iter__(self):
for k in self._data[TYPE_DEFAULT].iterkeys():
for k in self._data[TYPE_DEFAULT].keys():
yield k
def __getitem__(self, name):
@ -477,7 +477,7 @@ class Config(object):
def get_adjustable_settings_dict(self):
return {
key: val for key, val in self.get_current_settings_dict().iteritems()
key: val for key, val in self.get_current_settings_dict().items()
if key in self._adjustable_defaults
}

View file

@ -326,7 +326,7 @@ class Wallet(object):
tx_heights = yield DeferredDict({txid: self.get_height_for_txid(txid) for txid in pending_outpoints},
consumeErrors=True)
outpoint_heights = {}
for txid, outputs in pending_outpoints.iteritems():
for txid, outputs in pending_outpoints.items():
if txid in tx_heights:
for nout in outputs:
outpoint_heights["%s:%i" % (txid, nout)] = tx_heights[txid]
@ -442,7 +442,7 @@ class Wallet(object):
result = {}
batch_results = yield self._get_values_for_uris(page, page_size, *uris)
to_save = []
for uri, resolve_results in batch_results.iteritems():
for uri, resolve_results in batch_results.items():
try:
result[uri] = self._handle_claim_result(resolve_results)
to_save.append(result[uri])
@ -454,7 +454,7 @@ class Wallet(object):
@defer.inlineCallbacks
def get_claims_by_ids(self, *claim_ids):
claims = yield self._get_claims_by_claimids(*claim_ids)
for claim in claims.itervalues():
for claim in claims.values():
yield self.save_claim(claim)
defer.returnValue(claims)
@ -1129,7 +1129,7 @@ class LBRYumWallet(Wallet):
return payto_out['txid']
log.debug("Doing send many. payments to send: %s", str(payments_to_send))
d = self._run_cmd_as_defer_succeed('payto', payments_to_send.iteritems())
d = self._run_cmd_as_defer_succeed('payto', payments_to_send.items())
d.addCallback(lambda out: handle_payto_out(out))
return d

View file

@ -79,14 +79,14 @@ class DownloadManager(object):
return self.blob_handler.handle_blob(self.blobs[blob_num], self.blob_infos[blob_num])
def calculate_total_bytes(self):
return sum([bi.length for bi in self.blob_infos.itervalues()])
return sum([bi.length for bi in self.blob_infos.values()])
def calculate_bytes_left_to_output(self):
if not self.blobs:
return self.calculate_total_bytes()
else:
to_be_outputted = [
b for n, b in self.blobs.iteritems()
b for n, b in self.blobs.items()
if n >= self.progress_manager.last_blob_outputted
]
return sum([b.length for b in to_be_outputted if b.length is not None])

View file

@ -69,7 +69,7 @@ class SingleProgressManager(object):
def needed_blobs(self):
blobs = self.download_manager.blobs
assert len(blobs) == 1
return [b for b in blobs.itervalues() if not b.get_is_verified()]
return [b for b in blobs.values() if not b.get_is_verified()]
class DummyBlobHandler(object):

View file

@ -101,15 +101,15 @@ class FullStreamProgressManager(StreamProgressManager):
if not blobs:
return 0
else:
for i in range(max(blobs.iterkeys())):
for i in range(max(blobs.keys())):
if self._done(i, blobs):
return i
return max(blobs.iterkeys()) + 1
return max(blobs.keys()) + 1
def needed_blobs(self):
blobs = self.download_manager.blobs
return [
b for n, b in blobs.iteritems()
b for n, b in blobs.items()
if not b.get_is_verified() and not n in self.provided_blob_nums
]

View file

@ -915,7 +915,7 @@ class Daemon(AuthJSONRPCServer):
'auto_renew_claim_height_delta': int
}
for key, setting_type in setting_types.iteritems():
for key, setting_type in setting_types.items():
if key in new_settings:
if isinstance(new_settings[key], setting_type):
conf.settings.update({key: new_settings[key]},
@ -2916,7 +2916,7 @@ class Daemon(AuthJSONRPCServer):
hosts = {}
if datastore_len:
for k, v in data_store.iteritems():
for k, v in data_store.items():
for contact, value, lastPublished, originallyPublished, originalPublisherID in v:
if contact in hosts:
blobs = hosts[contact]

View file

@ -122,7 +122,7 @@ def get_methods(daemon):
_fn.__doc__ = fn.__doc__
return {name: _fn}
for method_name, method in daemon.callable_methods.iteritems():
for method_name, method in daemon.callable_methods.items():
locs.update(wrapped(method_name, method))
return locs

View file

@ -247,7 +247,7 @@ def do_migration(db_dir):
claim_queries = {} # <sd_hash>: claim query tuple
# get the claim queries ready, only keep those with associated files
for outpoint, sd_hash in file_outpoints.iteritems():
for outpoint, sd_hash in file_outpoints.items():
if outpoint in claim_outpoint_queries:
claim_queries[sd_hash] = claim_outpoint_queries[outpoint]
@ -260,7 +260,7 @@ def do_migration(db_dir):
claim_arg_tup[7], claim_arg_tup[6], claim_arg_tup[8],
smart_decode(claim_arg_tup[8]).certificate_id, claim_arg_tup[5], claim_arg_tup[4]
)
for sd_hash, claim_arg_tup in claim_queries.iteritems() if claim_arg_tup
for sd_hash, claim_arg_tup in claim_queries.items() if claim_arg_tup
] # sd_hash, (txid, nout, claim_id, name, sequence, address, height, amount, serialized)
)
@ -268,7 +268,7 @@ def do_migration(db_dir):
damaged_stream_sds = []
# import the files and get sd hashes of streams to attempt recovering
for sd_hash, file_query in file_args.iteritems():
for sd_hash, file_query in file_args.items():
failed_sd = _import_file(*file_query)
if failed_sd:
damaged_stream_sds.append(failed_sd)

View file

@ -839,7 +839,7 @@ class SQLiteStorage(WalletDatabase):
def save_claim_tx_heights(self, claim_tx_heights):
def _save_claim_heights(transaction):
for outpoint, height in claim_tx_heights.iteritems():
for outpoint, height in claim_tx_heights.items():
transaction.execute(
"update claim set height=? where claim_outpoint=? and height=-1",
(height, outpoint)

View file

@ -59,7 +59,7 @@ def format_contact(contact):
def format_datastore(node):
datastore = deepcopy(node._dataStore._dict)
result = {}
for key, values in datastore.iteritems():
for key, values in datastore.items():
contacts = []
for (contact, value, last_published, originally_published, original_publisher_id) in values:
contact_dict = format_contact(contact)
@ -201,7 +201,7 @@ class MultiSeedRPCServer(AuthJSONRPCServer):
nodes = []
for node_id in [n.node_id.encode('hex') for n in self._nodes]:
routing_info = yield self.jsonrpc_node_routing_table(node_id=node_id)
for index, bucket in routing_info.iteritems():
for index, bucket in routing_info.items():
if ip_address in map(lambda c: c['address'], bucket['contacts']):
nodes.append(node_id)
break

View file

@ -228,7 +228,7 @@ class TreeRoutingTableTest(unittest.TestCase):
# # math.log(bucket.rangeMax, 2)) + ")"
# # for c in bucket.getContacts():
# # print " contact " + str(c.id)
# # for key, bucket in self.table._replacementCache.iteritems():
# # for key, bucket in self.table._replacementCache.items():
# # print "Replacement Cache for Bucket " + str(key)
# # for c in bucket:
# # print " contact " + str(c.id)

View file

@ -6,7 +6,7 @@ from lbrynet.daemon.Daemon import Daemon
class DaemonDocsTests(unittest.TestCase):
def test_can_parse_api_method_docs(self):
failures = []
for name, fn in Daemon.callable_methods.iteritems():
for name, fn in Daemon.callable_methods.items():
try:
docopt.docopt(fn.__doc__, ())
except docopt.DocoptLanguageError as err: