Merge #13105: [qa] Add --failfast option to functional test runner
58f9a0a
Use --failfast when running functional tests on Travis (James O'Beirne)bf720c1
Add --failfast option to functional test runner (James O'Beirne) Pull request description: Add the option (`--failfast`) to stop the functional test runner's execution when it encounters the first failure. Also cleans up run_test's arguments list ([no more mutable default for `args`](http://docs.python-guide.org/en/latest/writing/gotchas/#mutable-default-arguments)) and call site. Tree-SHA512: e854b1b1634bf613ae8ae88e715df1460982fa68db9d785aafeb5eccf5bf324c7f20dded2ca6840ebf18a28347ecac2138d6c7592507b34939b02609ef55e1b3
This commit is contained in:
commit
9e9b48df72
2 changed files with 35 additions and 3 deletions
|
@ -80,7 +80,7 @@ script:
|
||||||
- export LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/depends/$HOST/lib
|
- export LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/depends/$HOST/lib
|
||||||
- if [ "$RUN_TESTS" = "true" ]; then travis_wait 50 make $MAKEJOBS check VERBOSE=1; fi
|
- if [ "$RUN_TESTS" = "true" ]; then travis_wait 50 make $MAKEJOBS check VERBOSE=1; fi
|
||||||
- if [ "$TRAVIS_EVENT_TYPE" = "cron" ]; then extended="--extended --exclude feature_pruning,feature_dbcrash"; fi
|
- if [ "$TRAVIS_EVENT_TYPE" = "cron" ]; then extended="--extended --exclude feature_pruning,feature_dbcrash"; fi
|
||||||
- if [ "$RUN_TESTS" = "true" ]; then test/functional/test_runner.py --combinedlogslen=4000 --coverage --quiet ${extended}; fi
|
- if [ "$RUN_TESTS" = "true" ]; then test/functional/test_runner.py --combinedlogslen=4000 --coverage --quiet --failfast ${extended}; fi
|
||||||
after_script:
|
after_script:
|
||||||
- echo $TRAVIS_COMMIT_RANGE
|
- echo $TRAVIS_COMMIT_RANGE
|
||||||
- echo $TRAVIS_COMMIT_LOG
|
- echo $TRAVIS_COMMIT_LOG
|
||||||
|
|
|
@ -201,6 +201,7 @@ def main():
|
||||||
parser.add_argument('--keepcache', '-k', action='store_true', help='the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun.')
|
parser.add_argument('--keepcache', '-k', action='store_true', help='the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun.')
|
||||||
parser.add_argument('--quiet', '-q', action='store_true', help='only print results summary and failure logs')
|
parser.add_argument('--quiet', '-q', action='store_true', help='only print results summary and failure logs')
|
||||||
parser.add_argument('--tmpdirprefix', '-t', default=tempfile.gettempdir(), help="Root directory for datadirs")
|
parser.add_argument('--tmpdirprefix', '-t', default=tempfile.gettempdir(), help="Root directory for datadirs")
|
||||||
|
parser.add_argument('--failfast', action='store_true', help='stop execution after the first test failure')
|
||||||
args, unknown_args = parser.parse_known_args()
|
args, unknown_args = parser.parse_known_args()
|
||||||
|
|
||||||
# args to be passed on always start with two dashes; tests are the remaining unknown args
|
# args to be passed on always start with two dashes; tests are the remaining unknown args
|
||||||
|
@ -283,9 +284,21 @@ def main():
|
||||||
if not args.keepcache:
|
if not args.keepcache:
|
||||||
shutil.rmtree("%s/test/cache" % config["environment"]["BUILDDIR"], ignore_errors=True)
|
shutil.rmtree("%s/test/cache" % config["environment"]["BUILDDIR"], ignore_errors=True)
|
||||||
|
|
||||||
run_tests(test_list, config["environment"]["SRCDIR"], config["environment"]["BUILDDIR"], tmpdir, args.jobs, args.coverage, passon_args, args.combinedlogslen)
|
run_tests(
|
||||||
|
test_list,
|
||||||
|
config["environment"]["SRCDIR"],
|
||||||
|
config["environment"]["BUILDDIR"],
|
||||||
|
tmpdir,
|
||||||
|
jobs=args.jobs,
|
||||||
|
enable_coverage=args.coverage,
|
||||||
|
args=passon_args,
|
||||||
|
combined_logs_len=args.combinedlogslen,
|
||||||
|
failfast=args.failfast,
|
||||||
|
)
|
||||||
|
|
||||||
|
def run_tests(test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=False, args=None, combined_logs_len=0, failfast=False):
|
||||||
|
args = args or []
|
||||||
|
|
||||||
def run_tests(test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=False, args=[], combined_logs_len=0):
|
|
||||||
# Warn if bitcoind is already running (unix only)
|
# Warn if bitcoind is already running (unix only)
|
||||||
try:
|
try:
|
||||||
if subprocess.check_output(["pidof", "bitcoind"]) is not None:
|
if subprocess.check_output(["pidof", "bitcoind"]) is not None:
|
||||||
|
@ -346,6 +359,10 @@ def run_tests(test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=Fal
|
||||||
combined_logs, _ = subprocess.Popen([sys.executable, os.path.join(tests_dir, 'combine_logs.py'), '-c', testdir], universal_newlines=True, stdout=subprocess.PIPE).communicate()
|
combined_logs, _ = subprocess.Popen([sys.executable, os.path.join(tests_dir, 'combine_logs.py'), '-c', testdir], universal_newlines=True, stdout=subprocess.PIPE).communicate()
|
||||||
print("\n".join(deque(combined_logs.splitlines(), combined_logs_len)))
|
print("\n".join(deque(combined_logs.splitlines(), combined_logs_len)))
|
||||||
|
|
||||||
|
if failfast:
|
||||||
|
logging.debug("Early exiting after test failure")
|
||||||
|
break
|
||||||
|
|
||||||
print_results(test_results, max_len_name, (int(time.time() - start_time)))
|
print_results(test_results, max_len_name, (int(time.time() - start_time)))
|
||||||
|
|
||||||
if coverage:
|
if coverage:
|
||||||
|
@ -360,6 +377,10 @@ def run_tests(test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=Fal
|
||||||
|
|
||||||
all_passed = all(map(lambda test_result: test_result.was_successful, test_results))
|
all_passed = all(map(lambda test_result: test_result.was_successful, test_results))
|
||||||
|
|
||||||
|
# This will be a no-op unless failfast is True in which case there may be dangling
|
||||||
|
# processes which need to be killed.
|
||||||
|
job_queue.kill_and_join()
|
||||||
|
|
||||||
sys.exit(not all_passed)
|
sys.exit(not all_passed)
|
||||||
|
|
||||||
def print_results(test_results, max_len_name, runtime):
|
def print_results(test_results, max_len_name, runtime):
|
||||||
|
@ -450,6 +471,17 @@ class TestHandler:
|
||||||
return TestResult(name, status, int(time.time() - start_time)), testdir, stdout, stderr
|
return TestResult(name, status, int(time.time() - start_time)), testdir, stdout, stderr
|
||||||
print('.', end='', flush=True)
|
print('.', end='', flush=True)
|
||||||
|
|
||||||
|
def kill_and_join(self):
|
||||||
|
"""Send SIGKILL to all jobs and block until all have ended."""
|
||||||
|
procs = [i[2] for i in self.jobs]
|
||||||
|
|
||||||
|
for proc in procs:
|
||||||
|
proc.kill()
|
||||||
|
|
||||||
|
for proc in procs:
|
||||||
|
proc.wait()
|
||||||
|
|
||||||
|
|
||||||
class TestResult():
|
class TestResult():
|
||||||
def __init__(self, name, status, time):
|
def __init__(self, name, status, time):
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
Loading…
Reference in a new issue