fixup: Fix prunning test
This commit is contained in:
parent
97f517dd85
commit
f402012ccf
1 changed files with 3 additions and 16 deletions
|
@ -23,8 +23,6 @@ from test_framework.util import (
|
|||
wait_until,
|
||||
)
|
||||
|
||||
MIN_BLOCKS_TO_KEEP = 288
|
||||
|
||||
# Rescans start at the earliest block up to 2 hours before a key timestamp, so
|
||||
# the manual prune RPC avoids pruning blocks in the same window to be
|
||||
# compatible with pruning based on key creation time.
|
||||
|
@ -273,20 +271,9 @@ class PruneTest(BitcoinTestFramework):
|
|||
else:
|
||||
return index
|
||||
|
||||
def prune(index, expected_ret=None):
|
||||
def prune(index):
|
||||
ret = node.pruneblockchain(height=height(index))
|
||||
# Check the return value. When use_timestamp is True, just check
|
||||
# that the return value is less than or equal to the expected
|
||||
# value, because when more than one block is generated per second,
|
||||
# a timestamp will not be granular enough to uniquely identify an
|
||||
# individual block.
|
||||
if expected_ret is None:
|
||||
expected_ret = index
|
||||
if use_timestamp:
|
||||
assert_greater_than(ret, 0)
|
||||
assert_greater_than(expected_ret + 1, ret)
|
||||
else:
|
||||
assert_equal(ret, expected_ret)
|
||||
assert_equal(ret, node.getblockchaininfo()['pruneheight'])
|
||||
|
||||
def has_block(index):
|
||||
return os.path.isfile(os.path.join(self.nodes[node_number].datadir, "regtest", "blocks", "blk{:05}.dat".format(index)))
|
||||
|
@ -326,7 +313,7 @@ class PruneTest(BitcoinTestFramework):
|
|||
assert not has_block(1), "blk00001.dat is still there, should be pruned by now"
|
||||
|
||||
# height=1000 should not prune anything more, because tip-288 is in blk00002.dat.
|
||||
prune(1000, 1001 - MIN_BLOCKS_TO_KEEP)
|
||||
prune(1000)
|
||||
assert has_block(2), "blk00002.dat is still there, should be pruned by now"
|
||||
|
||||
# advance the tip so blk00002.dat and blk00003.dat can be pruned (the last 288 blocks should now be in blk00004.dat)
|
||||
|
|
Loading…
Reference in a new issue