# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import os import re import threading import time import jsonschema import mock import testtools from rally import api from tests.functional import utils FAKE_TASK_UUID = "87ab639d-4968-4638-b9a1-07774c32484a" class TaskTestCase(testtools.TestCase): def _get_sample_task_config(self): return { "Dummy.dummy_random_fail_in_atomic": [ { "runner": { "type": "constant", "times": 20, "concurrency": 5 }, "sla": { "failure_rate": {"max": 100} } } ] } def _get_sample_task_config_v2(self): return { "version": 2, "title": "Dummy task", "tags": ["dummy", "functional_test"], "subtasks": [ { "title": "first-subtask", "group": "Dummy group", "description": "The first subtask in dummy task", "tags": ["dummy", "functional_test"], "workloads": [ { "scenario": { "Dummy.dummy": {"sleep": 0}}, "runner": { "constant": { "times": 10, "concurrency": 2 } } } ] }, { "title": "second-subtask", "description": "The second subtask in dummy task", "tags": ["dummy", "functional_test"], "scenario": { "Dummy.dummy": {"sleep": 1}}, "runner": { "constant": { "times": 10, "concurrency": 2 } } } ] } def _get_deployment_uuid(self, output): return re.search( r"Using deployment: (?P[0-9a-f\-]{36})", output).group("uuid") def _get_task_uuid(self, output): return re.search( r"\trally task report (?P[0-9a-f\-]{36})", output).group("uuid") def test_status(self): rally = utils.Rally() cfg = self._get_sample_task_config() config = utils.TaskConfig(cfg) rally("task start --task %s" % config.filename) self.assertIn("finished", rally("task status")) def test_detailed(self): rally = utils.Rally() cfg = self._get_sample_task_config() config = utils.TaskConfig(cfg) rally("task start --task %s" % config.filename) detailed = rally("task detailed") self.assertIn("Dummy.dummy_random_fail_in_atomic", detailed) self.assertIn("dummy_fail_test (x2)", detailed) detailed_iterations_data = rally("task detailed --iterations-data") self.assertIn(". dummy_fail_test (x2)", detailed_iterations_data) self.assertNotIn("n/a", detailed_iterations_data) def test_detailed_filter_by_scenario(self): rally = utils.Rally() cfg = self._get_sample_task_config() config = utils.TaskConfig(cfg) rally("task start --task %s" % config.filename) detailed = rally("task detailed") self.assertIn("Dummy.dummy_random_fail_in_atomic", detailed) detailed = rally("task detailed --filter-by scenario=" "Dummy.dummy_random_fail_in_atomic") self.assertIn("Dummy.dummy_random_fail_in_atomic", detailed) detailed = rally("task detailed --filter-by scenario=scenario.empty") self.assertNotIn("Dummy.dummy_random_fail_in_atomic", detailed) def test_detailed_filter_by_sla_failures(self): rally = utils.Rally() cfg = { "Dummy.dummy_exception": [ { "runner": { "type": "constant", "times": 1, "concurrency": 1 }, "sla": { "failure_rate": {"max": 0} } } ], "Dummy.dummy_random_action": [ { "runner": { "type": "constant", "times": 1, "concurrency": 1 } } ] } config = utils.TaskConfig(cfg) # it should be failed due to Dummy.dummy_exception self.assertRaises(utils.RallyCliError, rally, "task start --task %s" % config.filename) output = rally("task detailed --filter-by sla-failures") self.assertIn("Dummy.dummy_exception", output) self.assertNotIn("Dummy.dummy_random_action", output) def test_detailed_with_errors(self): rally = utils.Rally() cfg = { "Dummy.dummy_exception": [ { "runner": { "type": "constant", "times": 1, "concurrency": 1 }, "sla": { "failure_rate": {"max": 100} } } ] } config = utils.TaskConfig(cfg) output = rally("task start --task %s" % config.filename) uuid = re.search( r"(?P[0-9a-f\-]{36}): started", output).group("uuid") output = rally("task detailed") self.assertIn("Task %s has 1 error(s)" % uuid, output) def test_detailed_no_atomic_actions(self): rally = utils.Rally() cfg = { "Dummy.dummy": [ { "runner": { "type": "constant", "times": 100, "concurrency": 5 } } ] } config = utils.TaskConfig(cfg) rally("task start --task %s" % config.filename) detailed = rally("task detailed") self.assertIn("Dummy.dummy", detailed) detailed_iterations_data = rally("task detailed --iterations-data") self.assertNotIn("n/a", detailed_iterations_data) def test_start_with_empty_config(self): rally = utils.Rally() config = utils.TaskConfig(None) err = self.assertRaises( utils.RallyCliError, rally, "task start --task %s" % config.filename) self.assertIn("Task config is invalid: `It is empty`", err.output) def test_results(self): rally = utils.Rally() cfg = self._get_sample_task_config() config = utils.TaskConfig(cfg) rally("task start --task %s" % config.filename) self.assertIn("result", rally("task results")) def test_results_with_wrong_task_id(self): rally = utils.Rally() self.assertRaises(utils.RallyCliError, rally, "task results --uuid %s" % FAKE_TASK_UUID) def test_import_results(self): rally = utils.Rally() cfg = self._get_sample_task_config() config = utils.TaskConfig(cfg) rally("task start --task %s" % config.filename) json_report = rally.gen_report_path(extension="json") with open(json_report, "w+") as f: f.write(rally("task results", no_logs=True)) import_print = rally("task import --file %s" % json_report) task_uuid = re.search(r"UUID:\s([a-z0-9\-]+)", import_print).group(1) self.assertIn("Dummy.dummy_random_fail_in_atomic", rally("task results --uuid %s" % task_uuid)) # new json report rally("task report --json --out %s" % json_report, no_logs=True) import_print = rally("task import --file %s" % json_report) task_uuid = re.search(r"UUID:\s([a-z0-9\-]+)", import_print).group(1) self.assertIn("Dummy.dummy_random_fail_in_atomic", rally("task report --uuid %s --json" % task_uuid)) def test_abort_with_wrong_task_id(self): rally = utils.Rally() self.assertRaises(utils.RallyCliError, rally, "task abort --uuid %s" % FAKE_TASK_UUID) def test_delete_with_wrong_task_id(self): rally = utils.Rally() self.assertRaises(utils.RallyCliError, rally, "task delete --uuid %s" % FAKE_TASK_UUID) def test_detailed_with_wrong_task_id(self): rally = utils.Rally() self.assertRaises(utils.RallyCliError, rally, "task detailed --uuid %s" % FAKE_TASK_UUID) def test_report_with_wrong_task_id(self): rally = utils.Rally() e = self.assertRaises(utils.RallyCliError, rally, "task report --uuid %s" % FAKE_TASK_UUID) self.assertIn( "Record for uuid: %s not found in table task" % FAKE_TASK_UUID, str(e)) def test_sla_check_with_wrong_task_id(self): rally = utils.Rally() self.assertRaises(utils.RallyCliError, rally, "task sla-check --uuid %s" % FAKE_TASK_UUID) def test_status_with_wrong_task_id(self): rally = utils.Rally() self.assertRaises(utils.RallyCliError, rally, "task status --uuid %s" % FAKE_TASK_UUID) def _assert_html_report_libs_are_embedded(self, file_path, expected=True): embedded_signatures = ["Copyright (c) 2011-2014 Novus Partners, Inc.", "AngularJS v1.3.3", "Copyright (c) 2010-2015, Michael Bostock"] external_signatures = ["