restart the task
expect to restart one finished or aborted task, or restart some failure scenarios of workloads. Change-Id: Ifebef92bc0f549e4d0c778261bfc9b40e90d39af
This commit is contained in:
parent
b72a9866c0
commit
23883fa0c9
@ -50,6 +50,7 @@ _rally()
|
||||
OPTS["task_import"]="--file --deployment --tag"
|
||||
OPTS["task_list"]="--deployment --all-deployments --status --tag --uuids-only"
|
||||
OPTS["task_report"]="--out --open --html --html-static --json --uuid --deployment"
|
||||
OPTS["task_restart"]="--deployment --uuid --scenario --tag --no-use --abort-on-sla-failure"
|
||||
OPTS["task_results"]="--uuid"
|
||||
OPTS["task_sla-check"]="--uuid --json"
|
||||
OPTS["task_start"]="--deployment --task --task-args --task-args-file --tag --no-use --abort-on-sla-failure"
|
||||
|
@ -201,6 +201,12 @@ class TaskCommands(object):
|
||||
args_file=task_args_file)
|
||||
print("Running Rally version", version.version_string())
|
||||
|
||||
return self._start_task(api, deployment, task_config=input_task,
|
||||
tags=tags, do_use=do_use,
|
||||
abort_on_sla_failure=abort_on_sla_failure)
|
||||
|
||||
def _start_task(self, api, deployment, task_config, tags=None,
|
||||
do_use=False, abort_on_sla_failure=False):
|
||||
try:
|
||||
task_instance = api.task.create(deployment=deployment, tags=tags)
|
||||
tags = "[tags: '%s']" % "', '".join(tags) if tags else ""
|
||||
@ -215,7 +221,7 @@ class TaskCommands(object):
|
||||
if do_use:
|
||||
self.use(api, task_instance["uuid"])
|
||||
|
||||
api.task.start(deployment=deployment, config=input_task,
|
||||
api.task.start(deployment=deployment, config=task_config,
|
||||
task=task_instance["uuid"],
|
||||
abort_on_sla_failure=abort_on_sla_failure)
|
||||
|
||||
@ -227,6 +233,72 @@ class TaskCommands(object):
|
||||
return 2
|
||||
return 0
|
||||
|
||||
@cliutils.args("--deployment", dest="deployment", type=str,
|
||||
metavar="<uuid>", required=False,
|
||||
help="UUID or name of a deployment.")
|
||||
@cliutils.args("--uuid", type=str, dest="task_id", help="UUID of task.")
|
||||
@cliutils.args("--scenario", type=str, dest="scenarios", nargs="+",
|
||||
help="scenario name of workload")
|
||||
@cliutils.args("--tag", nargs="+", dest="tags", type=str, required=False,
|
||||
help="Mark the task with a tag or a few tags.")
|
||||
@cliutils.args("--no-use", action="store_false", dest="do_use",
|
||||
help="Don't set new task as default for future operations.")
|
||||
@cliutils.args("--abort-on-sla-failure", action="store_true",
|
||||
dest="abort_on_sla_failure",
|
||||
help="Abort the execution of a task when any SLA check "
|
||||
"for it fails for subtask or workload.")
|
||||
@envutils.with_default_deployment(cli_arg_name="deployment")
|
||||
@envutils.with_default_task_id
|
||||
@plugins.ensure_plugins_are_loaded
|
||||
def restart(self, api, deployment=None, task_id=None, scenarios=None,
|
||||
tags=None, do_use=False, abort_on_sla_failure=False):
|
||||
"""Restart a task or some scenarios in workloads of task."""
|
||||
if scenarios is not None:
|
||||
scenarios = (isinstance(scenarios, list) and scenarios
|
||||
or [scenarios])
|
||||
task = api.task.get(task_id=task_id, detailed=True)
|
||||
if task["status"] == consts.TaskStatus.CRASHED or task["status"] == (
|
||||
consts.TaskStatus.VALIDATION_FAILED):
|
||||
print("-" * 80)
|
||||
print("\nUnable to restart task.")
|
||||
validation = task["validation_result"]
|
||||
if logging.is_debug():
|
||||
print(yaml.safe_load(validation["trace"]))
|
||||
else:
|
||||
print(validation["etype"])
|
||||
print(validation["msg"])
|
||||
print("\nFor more details run:\nrally -d task detailed %s"
|
||||
% task["uuid"])
|
||||
return 1
|
||||
retask = {"version": 2, "title": task["title"],
|
||||
"description": task["description"],
|
||||
"tags": task["tags"], "subtasks": []}
|
||||
for subtask in task["subtasks"]:
|
||||
workloads = []
|
||||
for workload in subtask["workloads"]:
|
||||
if scenarios is None or workload["name"] in scenarios:
|
||||
workloads.append({
|
||||
"scenario": {workload["name"]: workload["args"]},
|
||||
"contexts": workload["contexts"],
|
||||
"runner": {
|
||||
workload["runner_type"]: workload["runner"]},
|
||||
"hooks": workload["hooks"],
|
||||
"sla": workload["sla"]
|
||||
})
|
||||
if workloads:
|
||||
retask["subtasks"].append({
|
||||
"title": subtask["title"],
|
||||
"description": subtask["description"],
|
||||
"workloads": workloads})
|
||||
|
||||
if retask["subtasks"]:
|
||||
return self._start_task(api, deployment, retask, tags=tags,
|
||||
do_use=do_use,
|
||||
abort_on_sla_failure=abort_on_sla_failure)
|
||||
else:
|
||||
print("Not Found matched scenario.")
|
||||
return 1
|
||||
|
||||
@cliutils.args("--uuid", type=str, dest="task_id", help="UUID of task.")
|
||||
@envutils.with_default_task_id
|
||||
@cliutils.args(
|
||||
|
@ -1050,6 +1050,52 @@ class TaskTestCase(testtools.TestCase):
|
||||
" ".join(task_uuids), html_report))
|
||||
self.assertTrue(os.path.exists(html_report))
|
||||
|
||||
def test_restart(self):
|
||||
rally = utils.Rally()
|
||||
deployment_id = utils.get_global("RALLY_DEPLOYMENT", rally.env)
|
||||
cfg = self._get_sample_task_config()
|
||||
config = utils.TaskConfig(cfg)
|
||||
output = rally(("task start --task %(task_file)s "
|
||||
"--deployment %(deployment_id)s") %
|
||||
{"task_file": config.filename,
|
||||
"deployment_id": deployment_id})
|
||||
|
||||
output = rally("task restart")
|
||||
result = re.search(
|
||||
r"(?P<task_id>[0-9a-f\-]{36}): started", output)
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
def test_restart_with_scenario(self):
|
||||
rally = utils.Rally()
|
||||
deployment_id = utils.get_global("RALLY_DEPLOYMENT", rally.env)
|
||||
cfg = self._get_sample_task_config()
|
||||
config = utils.TaskConfig(cfg)
|
||||
output = rally(("task start --task %(task_file)s "
|
||||
"--deployment %(deployment_id)s") %
|
||||
{"task_file": config.filename,
|
||||
"deployment_id": deployment_id})
|
||||
|
||||
output = rally(
|
||||
"task restart --scenario Dummy.dummy_random_fail_in_atomic")
|
||||
result = re.search(
|
||||
r"(?P<task_id>[0-9a-f\-]{36}): started", output)
|
||||
self.assertIsNotNone(result)
|
||||
result = re.search(
|
||||
r"test scenario Dummy\.dummy_random_fail_in_atomic", output)
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
def test_restart_with_fake_scenario(self):
|
||||
rally = utils.Rally()
|
||||
deployment_id = utils.get_global("RALLY_DEPLOYMENT", rally.env)
|
||||
cfg = self._get_sample_task_config()
|
||||
config = utils.TaskConfig(cfg)
|
||||
rally(("task start --task %(task_file)s "
|
||||
"--deployment %(deployment_id)s") %
|
||||
{"task_file": config.filename,
|
||||
"deployment_id": deployment_id})
|
||||
self.assertRaises(utils.RallyCliError, rally,
|
||||
"task restart --scenario fake.fake_scenario")
|
||||
|
||||
|
||||
class SLATestCase(testtools.TestCase):
|
||||
|
||||
|
@ -322,6 +322,67 @@ class TaskCommandsTestCase(test.TestCase):
|
||||
|
||||
self.assertFalse(mock__detailed.called)
|
||||
|
||||
@mock.patch("rally.cli.commands.task.TaskCommands._start_task")
|
||||
@ddt.data({"scenario": None},
|
||||
{"scenario": "scenario_name"},
|
||||
{"scenario": "none_name"})
|
||||
@ddt.unpack
|
||||
def test_restart(self, mock__start_task, scenario):
|
||||
self.fake_api.task.get.return_value = {
|
||||
"status": "finished",
|
||||
"title": "fake_task",
|
||||
"description": "this is a test",
|
||||
"tags": [],
|
||||
"subtasks": [
|
||||
{"title": "subtask", "description": "",
|
||||
"workloads": [
|
||||
{
|
||||
"name": "scenario_name",
|
||||
"args": {},
|
||||
"contexts": {},
|
||||
"runner": {"times": 20, "concurrency": 5},
|
||||
"runner_type": "constant",
|
||||
"hooks": [],
|
||||
"sla": {}
|
||||
}]}
|
||||
]
|
||||
}
|
||||
if scenario == "none_name":
|
||||
self.assertEqual(
|
||||
1,
|
||||
self.task.restart(self.fake_api, "deployment_uuid",
|
||||
"task_uuid", scenarios=scenario)
|
||||
)
|
||||
else:
|
||||
self.assertEqual(
|
||||
mock__start_task.return_value,
|
||||
self.task.restart(self.fake_api, "deployment_uuid",
|
||||
"task_uuid", scenarios=scenario)
|
||||
)
|
||||
self.fake_api.task.get.assert_called_once_with(task_id="task_uuid",
|
||||
detailed=True)
|
||||
|
||||
def test_restart_by_crashed_task(self):
|
||||
self.fake_api.task.get.return_value = {
|
||||
"uuid": "task_uuid",
|
||||
"status": "crashed",
|
||||
"title": "fake_task",
|
||||
"description": "this is a test",
|
||||
"tags": [],
|
||||
"subtasks": [],
|
||||
"validation_result": {
|
||||
"trace": {},
|
||||
"etype": "",
|
||||
"msg": ""
|
||||
}
|
||||
}
|
||||
self.assertEqual(
|
||||
1,
|
||||
self.task.restart(self.fake_api, "deployment_uuid", "task_uuid")
|
||||
)
|
||||
self.fake_api.task.get.assert_called_once_with(task_id="task_uuid",
|
||||
detailed=True)
|
||||
|
||||
def test_abort(self):
|
||||
test_uuid = "17860c43-2274-498d-8669-448eff7b073f"
|
||||
self.task.abort(self.fake_api, test_uuid)
|
||||
|
Loading…
Reference in New Issue
Block a user