Explicitly specify encoding when opening text files in Python code
This commit is contained in:
parent
fa4b9065a8
commit
634bd97001
19 changed files with 38 additions and 38 deletions
|
@ -37,7 +37,7 @@ for arg in sys.argv[1:]:
|
||||||
# TODO: implement support for multiple include directories
|
# TODO: implement support for multiple include directories
|
||||||
for arg in sorted(files.keys()):
|
for arg in sorted(files.keys()):
|
||||||
module = files[arg]
|
module = files[arg]
|
||||||
with open(arg, 'r') as f:
|
with open(arg, 'r', encoding="utf8") as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
match = RE.match(line)
|
match = RE.match(line)
|
||||||
if match:
|
if match:
|
||||||
|
|
|
@ -152,7 +152,7 @@ def main():
|
||||||
sys.exit(p.returncode)
|
sys.exit(p.returncode)
|
||||||
|
|
||||||
if not args.i:
|
if not args.i:
|
||||||
with open(filename) as f:
|
with open(filename, encoding="utf8") as f:
|
||||||
code = f.readlines()
|
code = f.readlines()
|
||||||
formatted_code = io.StringIO(stdout).readlines()
|
formatted_code = io.StringIO(stdout).readlines()
|
||||||
diff = difflib.unified_diff(code, formatted_code,
|
diff = difflib.unified_diff(code, formatted_code,
|
||||||
|
|
|
@ -146,7 +146,7 @@ def file_has_without_c_style_copyright_for_holder(contents, holder_name):
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
def read_file(filename):
|
def read_file(filename):
|
||||||
return open(os.path.abspath(filename), 'r').read()
|
return open(os.path.abspath(filename), 'r', encoding="utf8").read()
|
||||||
|
|
||||||
def gather_file_info(filename):
|
def gather_file_info(filename):
|
||||||
info = {}
|
info = {}
|
||||||
|
@ -325,13 +325,13 @@ def get_most_recent_git_change_year(filename):
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
def read_file_lines(filename):
|
def read_file_lines(filename):
|
||||||
f = open(os.path.abspath(filename), 'r')
|
f = open(os.path.abspath(filename), 'r', encoding="utf8")
|
||||||
file_lines = f.readlines()
|
file_lines = f.readlines()
|
||||||
f.close()
|
f.close()
|
||||||
return file_lines
|
return file_lines
|
||||||
|
|
||||||
def write_file_lines(filename, file_lines):
|
def write_file_lines(filename, file_lines):
|
||||||
f = open(os.path.abspath(filename), 'w')
|
f = open(os.path.abspath(filename), 'w', encoding="utf8")
|
||||||
f.write(''.join(file_lines))
|
f.write(''.join(file_lines))
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
|
|
@ -191,7 +191,7 @@ def main():
|
||||||
merge_branch = 'pull/'+pull+'/merge'
|
merge_branch = 'pull/'+pull+'/merge'
|
||||||
local_merge_branch = 'pull/'+pull+'/local-merge'
|
local_merge_branch = 'pull/'+pull+'/local-merge'
|
||||||
|
|
||||||
devnull = open(os.devnull,'w')
|
devnull = open(os.devnull, 'w', encoding="utf8")
|
||||||
try:
|
try:
|
||||||
subprocess.check_call([GIT,'checkout','-q',branch])
|
subprocess.check_call([GIT,'checkout','-q',branch])
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
|
|
|
@ -9,7 +9,7 @@ import subprocess
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
def write_testcode(filename):
|
def write_testcode(filename):
|
||||||
with open(filename, 'w') as f:
|
with open(filename, 'w', encoding="utf8") as f:
|
||||||
f.write('''
|
f.write('''
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
int main()
|
int main()
|
||||||
|
|
|
@ -13,8 +13,8 @@ pattern = args.pattern
|
||||||
outfile = args.outfile
|
outfile = args.outfile
|
||||||
|
|
||||||
in_remove = False
|
in_remove = False
|
||||||
with open(tracefile, 'r') as f:
|
with open(tracefile, 'r', encoding="utf8") as f:
|
||||||
with open(outfile, 'w') as wf:
|
with open(outfile, 'w', encoding="utf8") as wf:
|
||||||
for line in f:
|
for line in f:
|
||||||
for p in pattern:
|
for p in pattern:
|
||||||
if line.startswith("SF:") and p in line:
|
if line.startswith("SF:") and p in line:
|
||||||
|
|
|
@ -75,7 +75,7 @@ def get_blk_dt(blk_hdr):
|
||||||
# When getting the list of block hashes, undo any byte reversals.
|
# When getting the list of block hashes, undo any byte reversals.
|
||||||
def get_block_hashes(settings):
|
def get_block_hashes(settings):
|
||||||
blkindex = []
|
blkindex = []
|
||||||
f = open(settings['hashlist'], "r")
|
f = open(settings['hashlist'], "r", encoding="utf8")
|
||||||
for line in f:
|
for line in f:
|
||||||
line = line.rstrip()
|
line = line.rstrip()
|
||||||
if settings['rev_hash_bytes'] == 'true':
|
if settings['rev_hash_bytes'] == 'true':
|
||||||
|
@ -261,7 +261,7 @@ if __name__ == '__main__':
|
||||||
print("Usage: linearize-data.py CONFIG-FILE")
|
print("Usage: linearize-data.py CONFIG-FILE")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
f = open(sys.argv[1])
|
f = open(sys.argv[1], encoding="utf8")
|
||||||
for line in f:
|
for line in f:
|
||||||
# skip comment lines
|
# skip comment lines
|
||||||
m = re.search('^\s*#', line)
|
m = re.search('^\s*#', line)
|
||||||
|
|
|
@ -96,7 +96,7 @@ def get_block_hashes(settings, max_blocks_per_call=10000):
|
||||||
|
|
||||||
def get_rpc_cookie():
|
def get_rpc_cookie():
|
||||||
# Open the cookie file
|
# Open the cookie file
|
||||||
with open(os.path.join(os.path.expanduser(settings['datadir']), '.cookie'), 'r') as f:
|
with open(os.path.join(os.path.expanduser(settings['datadir']), '.cookie'), 'r', encoding="ascii") as f:
|
||||||
combined = f.readline()
|
combined = f.readline()
|
||||||
combined_split = combined.split(":")
|
combined_split = combined.split(":")
|
||||||
settings['rpcuser'] = combined_split[0]
|
settings['rpcuser'] = combined_split[0]
|
||||||
|
@ -107,7 +107,7 @@ if __name__ == '__main__':
|
||||||
print("Usage: linearize-hashes.py CONFIG-FILE")
|
print("Usage: linearize-hashes.py CONFIG-FILE")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
f = open(sys.argv[1])
|
f = open(sys.argv[1], encoding="utf8")
|
||||||
for line in f:
|
for line in f:
|
||||||
# skip comment lines
|
# skip comment lines
|
||||||
m = re.search('^\s*#', line)
|
m = re.search('^\s*#', line)
|
||||||
|
|
|
@ -127,10 +127,10 @@ def main():
|
||||||
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
|
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
|
||||||
g.write(' * IPv4 as well as onion addresses are wrapped inside an IPv6 address accordingly.\n')
|
g.write(' * IPv4 as well as onion addresses are wrapped inside an IPv6 address accordingly.\n')
|
||||||
g.write(' */\n')
|
g.write(' */\n')
|
||||||
with open(os.path.join(indir,'nodes_main.txt'),'r') as f:
|
with open(os.path.join(indir,'nodes_main.txt'), 'r', encoding="utf8") as f:
|
||||||
process_nodes(g, f, 'pnSeed6_main', 8333)
|
process_nodes(g, f, 'pnSeed6_main', 8333)
|
||||||
g.write('\n')
|
g.write('\n')
|
||||||
with open(os.path.join(indir,'nodes_test.txt'),'r') as f:
|
with open(os.path.join(indir,'nodes_test.txt'), 'r', encoding="utf8") as f:
|
||||||
process_nodes(g, f, 'pnSeed6_test', 18333)
|
process_nodes(g, f, 'pnSeed6_test', 18333)
|
||||||
g.write('#endif // BITCOIN_CHAINPARAMSSEEDS_H\n')
|
g.write('#endif // BITCOIN_CHAINPARAMSSEEDS_H\n')
|
||||||
|
|
||||||
|
|
|
@ -76,11 +76,11 @@ def main():
|
||||||
# get directory of this program and read data files
|
# get directory of this program and read data files
|
||||||
dirname = os.path.dirname(os.path.abspath(__file__))
|
dirname = os.path.dirname(os.path.abspath(__file__))
|
||||||
print("Using verify-commits data from " + dirname)
|
print("Using verify-commits data from " + dirname)
|
||||||
verified_root = open(dirname + "/trusted-git-root", "r").read().splitlines()[0]
|
verified_root = open(dirname + "/trusted-git-root", "r", encoding="utf8").read().splitlines()[0]
|
||||||
verified_sha512_root = open(dirname + "/trusted-sha512-root-commit", "r").read().splitlines()[0]
|
verified_sha512_root = open(dirname + "/trusted-sha512-root-commit", "r", encoding="utf8").read().splitlines()[0]
|
||||||
revsig_allowed = open(dirname + "/allow-revsig-commits", "r").read().splitlines()
|
revsig_allowed = open(dirname + "/allow-revsig-commits", "r", encoding="utf-8").read().splitlines()
|
||||||
unclean_merge_allowed = open(dirname + "/allow-unclean-merge-commits", "r").read().splitlines()
|
unclean_merge_allowed = open(dirname + "/allow-unclean-merge-commits", "r", encoding="utf-8").read().splitlines()
|
||||||
incorrect_sha512_allowed = open(dirname + "/allow-incorrect-sha512-commits", "r").read().splitlines()
|
incorrect_sha512_allowed = open(dirname + "/allow-incorrect-sha512-commits", "r", encoding="utf-8").read().splitlines()
|
||||||
|
|
||||||
# Set commit and branch and set variables
|
# Set commit and branch and set variables
|
||||||
current_commit = args.commit
|
current_commit = args.commit
|
||||||
|
|
|
@ -63,7 +63,7 @@ child = Popen([XGETTEXT,'--output=-','-n','--keyword=_'] + files, stdout=PIPE)
|
||||||
|
|
||||||
messages = parse_po(out.decode('utf-8'))
|
messages = parse_po(out.decode('utf-8'))
|
||||||
|
|
||||||
f = open(OUT_CPP, 'w')
|
f = open(OUT_CPP, 'w', encoding="utf8")
|
||||||
f.write("""
|
f.write("""
|
||||||
|
|
||||||
#include <QtGlobal>
|
#include <QtGlobal>
|
||||||
|
|
|
@ -36,7 +36,7 @@ class NotificationsTest(BitcoinTestFramework):
|
||||||
wait_until(lambda: os.path.isfile(self.block_filename) and os.stat(self.block_filename).st_size >= (block_count * 65), timeout=10)
|
wait_until(lambda: os.path.isfile(self.block_filename) and os.stat(self.block_filename).st_size >= (block_count * 65), timeout=10)
|
||||||
|
|
||||||
# file content should equal the generated blocks hashes
|
# file content should equal the generated blocks hashes
|
||||||
with open(self.block_filename, 'r') as f:
|
with open(self.block_filename, 'r', encoding="utf-8") as f:
|
||||||
assert_equal(sorted(blocks), sorted(l.strip() for l in f.read().splitlines()))
|
assert_equal(sorted(blocks), sorted(l.strip() for l in f.read().splitlines()))
|
||||||
|
|
||||||
self.log.info("test -walletnotify")
|
self.log.info("test -walletnotify")
|
||||||
|
@ -45,7 +45,7 @@ class NotificationsTest(BitcoinTestFramework):
|
||||||
|
|
||||||
# file content should equal the generated transaction hashes
|
# file content should equal the generated transaction hashes
|
||||||
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
|
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
|
||||||
with open(self.tx_filename, 'r') as f:
|
with open(self.tx_filename, 'r', encoding="ascii") as f:
|
||||||
assert_equal(sorted(txids_rpc), sorted(l.strip() for l in f.read().splitlines()))
|
assert_equal(sorted(txids_rpc), sorted(l.strip() for l in f.read().splitlines()))
|
||||||
os.remove(self.tx_filename)
|
os.remove(self.tx_filename)
|
||||||
|
|
||||||
|
@ -58,7 +58,7 @@ class NotificationsTest(BitcoinTestFramework):
|
||||||
|
|
||||||
# file content should equal the generated transaction hashes
|
# file content should equal the generated transaction hashes
|
||||||
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
|
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
|
||||||
with open(self.tx_filename, 'r') as f:
|
with open(self.tx_filename, 'r', encoding="ascii") as f:
|
||||||
assert_equal(sorted(txids_rpc), sorted(l.strip() for l in f.read().splitlines()))
|
assert_equal(sorted(txids_rpc), sorted(l.strip() for l in f.read().splitlines()))
|
||||||
|
|
||||||
# Mine another 41 up-version blocks. -alertnotify should trigger on the 51st.
|
# Mine another 41 up-version blocks. -alertnotify should trigger on the 51st.
|
||||||
|
|
|
@ -81,11 +81,11 @@ class GetblockstatsTest(BitcoinTestFramework):
|
||||||
'mocktime': int(mocktime),
|
'mocktime': int(mocktime),
|
||||||
'stats': self.expected_stats,
|
'stats': self.expected_stats,
|
||||||
}
|
}
|
||||||
with open(filename, 'w') as f:
|
with open(filename, 'w', encoding="utf8") as f:
|
||||||
json.dump(to_dump, f, sort_keys=True, indent=2)
|
json.dump(to_dump, f, sort_keys=True, indent=2)
|
||||||
|
|
||||||
def load_test_data(self, filename):
|
def load_test_data(self, filename):
|
||||||
with open(filename, 'r') as f:
|
with open(filename, 'r', encoding="utf8") as f:
|
||||||
d = json.load(f)
|
d = json.load(f)
|
||||||
blocks = d['blocks']
|
blocks = d['blocks']
|
||||||
mocktime = d['mocktime']
|
mocktime = d['mocktime']
|
||||||
|
|
|
@ -327,7 +327,7 @@ def get_auth_cookie(datadir):
|
||||||
assert password is None # Ensure that there is only one rpcpassword line
|
assert password is None # Ensure that there is only one rpcpassword line
|
||||||
password = line.split("=")[1].strip("\n")
|
password = line.split("=")[1].strip("\n")
|
||||||
if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")):
|
if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")):
|
||||||
with open(os.path.join(datadir, "regtest", ".cookie"), 'r') as f:
|
with open(os.path.join(datadir, "regtest", ".cookie"), 'r', encoding="ascii") as f:
|
||||||
userpass = f.read()
|
userpass = f.read()
|
||||||
split_userpass = userpass.split(':')
|
split_userpass = userpass.split(':')
|
||||||
user = split_userpass[0]
|
user = split_userpass[0]
|
||||||
|
|
|
@ -213,7 +213,7 @@ def main():
|
||||||
# Read config generated by configure.
|
# Read config generated by configure.
|
||||||
config = configparser.ConfigParser()
|
config = configparser.ConfigParser()
|
||||||
configfile = os.path.abspath(os.path.dirname(__file__)) + "/../config.ini"
|
configfile = os.path.abspath(os.path.dirname(__file__)) + "/../config.ini"
|
||||||
config.read_file(open(configfile))
|
config.read_file(open(configfile, encoding="utf8"))
|
||||||
|
|
||||||
passon_args.append("--configfile=%s" % configfile)
|
passon_args.append("--configfile=%s" % configfile)
|
||||||
|
|
||||||
|
@ -590,7 +590,7 @@ class RPCCoverage():
|
||||||
if not os.path.isfile(coverage_ref_filename):
|
if not os.path.isfile(coverage_ref_filename):
|
||||||
raise RuntimeError("No coverage reference found")
|
raise RuntimeError("No coverage reference found")
|
||||||
|
|
||||||
with open(coverage_ref_filename, 'r') as coverage_ref_file:
|
with open(coverage_ref_filename, 'r', encoding="utf8") as coverage_ref_file:
|
||||||
all_cmds.update([line.strip() for line in coverage_ref_file.readlines()])
|
all_cmds.update([line.strip() for line in coverage_ref_file.readlines()])
|
||||||
|
|
||||||
for root, dirs, files in os.walk(self.dir):
|
for root, dirs, files in os.walk(self.dir):
|
||||||
|
@ -599,7 +599,7 @@ class RPCCoverage():
|
||||||
coverage_filenames.add(os.path.join(root, filename))
|
coverage_filenames.add(os.path.join(root, filename))
|
||||||
|
|
||||||
for filename in coverage_filenames:
|
for filename in coverage_filenames:
|
||||||
with open(filename, 'r') as coverage_file:
|
with open(filename, 'r', encoding="utf8") as coverage_file:
|
||||||
covered_cmds.update([line.strip() for line in coverage_file.readlines()])
|
covered_cmds.update([line.strip() for line in coverage_file.readlines()])
|
||||||
|
|
||||||
return all_cmds - covered_cmds
|
return all_cmds - covered_cmds
|
||||||
|
|
|
@ -88,7 +88,7 @@ class MultiWalletTest(BitcoinTestFramework):
|
||||||
self.nodes[0].assert_start_raises_init_error(['-walletdir=bad'], 'Error: Specified -walletdir "bad" does not exist')
|
self.nodes[0].assert_start_raises_init_error(['-walletdir=bad'], 'Error: Specified -walletdir "bad" does not exist')
|
||||||
# should not initialize if the specified walletdir is not a directory
|
# should not initialize if the specified walletdir is not a directory
|
||||||
not_a_dir = wallet_dir('notadir')
|
not_a_dir = wallet_dir('notadir')
|
||||||
open(not_a_dir, 'a').close()
|
open(not_a_dir, 'a', encoding="utf8").close()
|
||||||
self.nodes[0].assert_start_raises_init_error(['-walletdir=' + not_a_dir], 'Error: Specified -walletdir "' + not_a_dir + '" is not a directory')
|
self.nodes[0].assert_start_raises_init_error(['-walletdir=' + not_a_dir], 'Error: Specified -walletdir "' + not_a_dir + '" is not a directory')
|
||||||
|
|
||||||
self.log.info("Do not allow -zapwallettxes with multiwallet")
|
self.log.info("Do not allow -zapwallettxes with multiwallet")
|
||||||
|
|
|
@ -44,7 +44,7 @@ def process_commands(fname):
|
||||||
"""Find and parse dispatch table in implementation file `fname`."""
|
"""Find and parse dispatch table in implementation file `fname`."""
|
||||||
cmds = []
|
cmds = []
|
||||||
in_rpcs = False
|
in_rpcs = False
|
||||||
with open(fname, "r") as f:
|
with open(fname, "r", encoding="utf8") as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
line = line.rstrip()
|
line = line.rstrip()
|
||||||
if not in_rpcs:
|
if not in_rpcs:
|
||||||
|
@ -70,7 +70,7 @@ def process_mapping(fname):
|
||||||
"""Find and parse conversion table in implementation file `fname`."""
|
"""Find and parse conversion table in implementation file `fname`."""
|
||||||
cmds = []
|
cmds = []
|
||||||
in_rpcs = False
|
in_rpcs = False
|
||||||
with open(fname, "r") as f:
|
with open(fname, "r", encoding="utf8") as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
line = line.rstrip()
|
line = line.rstrip()
|
||||||
if not in_rpcs:
|
if not in_rpcs:
|
||||||
|
|
|
@ -28,7 +28,7 @@ import sys
|
||||||
def main():
|
def main():
|
||||||
config = configparser.ConfigParser()
|
config = configparser.ConfigParser()
|
||||||
config.optionxform = str
|
config.optionxform = str
|
||||||
config.readfp(open(os.path.join(os.path.dirname(__file__), "../config.ini")))
|
config.readfp(open(os.path.join(os.path.dirname(__file__), "../config.ini"), encoding="utf8"))
|
||||||
env_conf = dict(config.items('environment'))
|
env_conf = dict(config.items('environment'))
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description=__doc__)
|
parser = argparse.ArgumentParser(description=__doc__)
|
||||||
|
@ -49,7 +49,7 @@ def main():
|
||||||
def bctester(testDir, input_basename, buildenv):
|
def bctester(testDir, input_basename, buildenv):
|
||||||
""" Loads and parses the input file, runs all tests and reports results"""
|
""" Loads and parses the input file, runs all tests and reports results"""
|
||||||
input_filename = os.path.join(testDir, input_basename)
|
input_filename = os.path.join(testDir, input_basename)
|
||||||
raw_data = open(input_filename).read()
|
raw_data = open(input_filename, encoding="utf8").read()
|
||||||
input_data = json.loads(raw_data)
|
input_data = json.loads(raw_data)
|
||||||
|
|
||||||
failed_testcases = []
|
failed_testcases = []
|
||||||
|
@ -86,7 +86,7 @@ def bctest(testDir, testObj, buildenv):
|
||||||
inputData = None
|
inputData = None
|
||||||
if "input" in testObj:
|
if "input" in testObj:
|
||||||
filename = os.path.join(testDir, testObj["input"])
|
filename = os.path.join(testDir, testObj["input"])
|
||||||
inputData = open(filename).read()
|
inputData = open(filename, encoding="utf8").read()
|
||||||
stdinCfg = subprocess.PIPE
|
stdinCfg = subprocess.PIPE
|
||||||
|
|
||||||
# Read the expected output data (if there is any)
|
# Read the expected output data (if there is any)
|
||||||
|
@ -97,7 +97,7 @@ def bctest(testDir, testObj, buildenv):
|
||||||
outputFn = testObj['output_cmp']
|
outputFn = testObj['output_cmp']
|
||||||
outputType = os.path.splitext(outputFn)[1][1:] # output type from file extension (determines how to compare)
|
outputType = os.path.splitext(outputFn)[1][1:] # output type from file extension (determines how to compare)
|
||||||
try:
|
try:
|
||||||
outputData = open(os.path.join(testDir, outputFn)).read()
|
outputData = open(os.path.join(testDir, outputFn), encoding="utf8").read()
|
||||||
except:
|
except:
|
||||||
logging.error("Output file " + outputFn + " can not be opened")
|
logging.error("Output file " + outputFn + " can not be opened")
|
||||||
raise
|
raise
|
||||||
|
|
|
@ -18,7 +18,7 @@ class TestRPCAuth(unittest.TestCase):
|
||||||
config_path = os.path.abspath(
|
config_path = os.path.abspath(
|
||||||
os.path.join(os.sep, os.path.abspath(os.path.dirname(__file__)),
|
os.path.join(os.sep, os.path.abspath(os.path.dirname(__file__)),
|
||||||
"../config.ini"))
|
"../config.ini"))
|
||||||
with open(config_path) as config_file:
|
with open(config_path, encoding="utf8") as config_file:
|
||||||
config.read_file(config_file)
|
config.read_file(config_file)
|
||||||
sys.path.insert(0, os.path.dirname(config['environment']['RPCAUTH']))
|
sys.path.insert(0, os.path.dirname(config['environment']['RPCAUTH']))
|
||||||
self.rpcauth = importlib.import_module('rpcauth')
|
self.rpcauth = importlib.import_module('rpcauth')
|
||||||
|
|
Loading…
Reference in a new issue