[Verify] Add --detailed flag to rally verify start cmd

This flag allows us to see test failures after the verification finished.

Closes-Bug: #1655098

Change-Id: Ief2d7c0366eded9a8ac7c07df16d6943df9243b8
This commit is contained in:
Yaroslav Lobankov 2017-01-19 19:39:00 +04:00
parent 1e5aa0df77
commit 088a5b16d5
4 changed files with 69 additions and 25 deletions

View File

@ -57,7 +57,7 @@ _rally()
OPTS["verify_report"]="--uuid --type --to --open"
OPTS["verify_rerun"]="--uuid --deployment-id --failed"
OPTS["verify_show"]="--uuid --sort-by --detailed"
OPTS["verify_start"]="--id --deployment-id --tag --pattern --concurrency --load-list --skip-list --xfail-list --no-use"
OPTS["verify_start"]="--id --deployment-id --tag --pattern --concurrency --load-list --skip-list --xfail-list --detailed --no-use"
OPTS["verify_update-verifier"]="--id --update-venv --version --system-wide --no-system-wide"
OPTS["verify_use"]="--uuid"
OPTS["verify_use-verifier"]="--id"

View File

@ -50,13 +50,22 @@ class VerifyCommands(object):
print("\n======\n"
"Totals"
"\n======\n"
"Ran: %(tests_count)s tests in %(tests_duration)s sec.\n"
"\nRan: %(tests_count)s tests in %(tests_duration)s sec.\n"
" - Success: %(success)s\n"
" - Skipped: %(skipped)s\n"
" - Expected failures: %(expected_failures)s\n"
" - Unexpected success: %(unexpected_success)s\n"
" - Failures: %(failures)s\n" % totals)
@staticmethod
def _print_failures(h_text, failures, symbol="-"):
print("\n%s" % cliutils.make_header(
h_text, size=len(h_text), symbol=symbol).strip())
for f in failures:
header = "%s\n%s\n" % (f["name"], "-" * len(f["name"]))
failure = "\n%s%s\n" % (header, f["traceback"].strip())
print(failure)
@cliutils.args("--namespace", dest="namespace", type=str, metavar="<name>",
required=False,
help="Namespace name (for example, openstack).")
@ -383,6 +392,10 @@ class VerifyCommands(object):
"considered as expected failures. "
"Format: json or yaml like a dictionary where keys "
"are test names and values are reasons.")
@cliutils.args("--detailed", dest="detailed", action="store_true",
required=False,
help="Show verification details such as errors of failed "
"tests.")
@cliutils.args("--no-use", dest="do_use", action="store_false",
help="Not to set the finished verification as the default "
"verification for future operations.")
@ -391,7 +404,7 @@ class VerifyCommands(object):
@plugins.ensure_plugins_are_loaded
def start(self, api, verifier_id=None, deployment=None, tags=None,
pattern=None, concur=0, load_list=None, skip_list=None,
xfail_list=None, do_use=True):
xfail_list=None, detailed=False, do_use=True):
"""Start a verification (run verifier tests)."""
if pattern and load_list:
print(_("Arguments '--pattern' and '--load-list' cannot be used "
@ -429,6 +442,17 @@ class VerifyCommands(object):
verification, results = api.verification.start(verifier_id, deployment,
tags=tags, **run_args)
if detailed:
failures = results.filter_tests("fail").values()
if failures:
h_text = "Failed %d %s - output below:" % (
len(failures), "tests" if len(failures) > 1 else "test")
self._print_failures(h_text, failures, "=")
else:
print(_("\nCongratulations! Verification doesn't have failed "
"tests! :)"))
self._print_totals(results.totals)
if do_use:
@ -543,12 +567,7 @@ class VerifyCommands(object):
if detailed:
failures = [t for t in tests.values() if t["status"] == "fail"]
if failures:
h = _("Failures")
print("\n%s" % cliutils.make_header(h, len(h)).strip())
for f in failures:
header = "%s\n%s\n" % (f["name"], "-" * len(f["name"]))
failure = "\n%s%s\n" % (header, f["traceback"].strip())
print(failure)
self._print_failures("Failures", failures)
else:
print(_("\nCongratulations! Verification doesn't have failed "
"tests! :)"))

View File

@ -262,8 +262,9 @@ def main():
# Start a verification, show results and generate reports
skip_list_path = write_file("skip-list.yaml", SKIP_TESTS)
xfail_list_path = write_file("xfail-list.yaml", XFAIL_TESTS)
run_args = ("%s --skip-list %s --xfail-list %s --tag first-run %s-set" %
(MODES[args.mode], skip_list_path, xfail_list_path, args.mode))
run_args = ("%s --skip-list %s --xfail-list %s --tag first-run %s-set "
"--detailed" % (MODES[args.mode], skip_list_path,
xfail_list_path, args.mode))
render_vars["verifications"].append(start_verification(run_args))
if args.compare:
@ -271,8 +272,8 @@ def main():
with gzip.open(render_vars["list_verifier_tests"]["stdout_file"]) as f:
tests = [t for t in f.read().split("\n") if TEST_NAME_RE.match(t)]
load_list_path = write_file("load-list.txt", "\n".join(tests))
run_args = "--load-list %s --tag second-run %s-set" % (load_list_path,
args.mode)
run_args = "--load-list %s --tag second-run %s-set --detailed" % (
load_list_path, args.mode)
render_vars["verifications"].append(start_verification(run_args))
# Generate trends reports for two verifications

View File

@ -177,21 +177,44 @@ class VerifyCommandsTestCase(test.TestCase):
load_list="load-list")
self.assertFalse(self.fake_api.verification.start.called)
verification = mock.Mock(uuid="v_uuid")
failed_test = {
"test_2": {
"name": "test_2",
"status": "fail",
"duration": 2,
"traceback": "Some traceback"
}
}
test_results = {
"tests": {
"test_1": {
"name": "test_1",
"status": "success",
"duration": 2,
"tags": []
}
},
"totals": {
"tests_count": 2,
"tests_duration": 4,
"success": 2,
"skipped": 0,
"expected_failures": 0,
"unexpected_success": 0,
"failures": 0
}
}
test_results["tests"].update(failed_test)
results = mock.Mock(**test_results)
results.filter_tests.return_value = failed_test
self.fake_api.verification.start.return_value = (verification, results)
self.fake_api.verification.get.return_value = verification
mock_exists.return_value = False
self.verify.start(self.fake_api, "v_id", "d_id", load_list="/p/a/t/h")
self.assertFalse(self.fake_api.verification.start.called)
verification = mock.Mock(uuid="v_uuid")
results = mock.Mock(totals={"tests_count": 2,
"tests_duration": 4,
"success": 2,
"skipped": 0,
"expected_failures": 0,
"unexpected_success": 0,
"failures": 0})
self.fake_api.verification.start.return_value = (verification, results)
self.fake_api.verification.get.return_value = verification
mock_exists.return_value = True
tf = tempfile.NamedTemporaryFile()
with open(tf.name, "w") as f:
@ -235,7 +258,8 @@ class VerifyCommandsTestCase(test.TestCase):
self.fake_api.verification.get.reset_mock()
mock_update_globals_file.reset_mock()
self.verify.start(self.fake_api, "v_id", "d_id", do_use=False)
self.verify.start(self.fake_api, "v_id", "d_id", detailed=True,
do_use=False)
self.assertFalse(self.fake_api.verification.get.called)
self.assertFalse(mock_update_globals_file.called)