Merge "Transaction tasks parameters support is added to CLI"

This commit is contained in:
Jenkins
2016-05-30 16:22:15 +00:00
committed by Gerrit Code Review
11 changed files with 416 additions and 129 deletions

View File

@@ -16,15 +16,14 @@ from fuelclient.cli.actions.base import Action
import fuelclient.cli.arguments as Args
from fuelclient.cli.arguments import group
from fuelclient.cli.formatting import format_table
from fuelclient.objects.deployment_history import DeploymentHistory
from fuelclient.v1.deployment_history import DeploymentHistoryClient
class DeploymentTasksAction(Action):
"""Show deployment tasks
"""
action_name = "deployment-tasks"
acceptable_keys = ("deployment_graph_task_name", "node_id", "status",
"time_start", "time_end")
def __init__(self):
super(DeploymentTasksAction, self).__init__()
@@ -38,6 +37,10 @@ class DeploymentTasksAction(Action):
),
Args.get_status_arg(
"Statuses: pending, error, ready, running, skipped"
),
Args.get_tasks_names_arg(
"Show deployment history for specific deployment tasks names "
"and group output by task"
)
]
self.flag_func_map = (
@@ -58,14 +61,35 @@ class DeploymentTasksAction(Action):
To display deployment tasks for some statuses(pending, error,
ready, running) on some nodes:
fuel deployment-tasks --task-id 5 --status error --nodes 1,2
"""
tasks_data = DeploymentHistory.get_all(
params.task,
params.node,
params.status
To display certain deployment tasks results only:
fuel deployment-tasks --task-id 5
--task-name task-name1,task-name2
"""
client = DeploymentHistoryClient()
tasks_names = getattr(params, 'task-name')
group_by_tasks = bool(tasks_names)
statuses = params.status.split(',') if params.status else []
nodes = params.node.split(',') if params.node else []
tasks_names = tasks_names.split(',') if tasks_names else []
data = client.get_all(
transaction_id=params.task,
nodes=nodes,
statuses=statuses,
tasks_names=tasks_names,
group_by_tasks=group_by_tasks
)
if group_by_tasks:
table_keys = client.tasks_records_keys
else:
table_keys = client.history_records_keys
self.serializer.print_to_output(
tasks_data,
format_table(tasks_data, acceptable_keys=self.acceptable_keys)
data,
format_table(
data,
acceptable_keys=table_keys
)
)

View File

@@ -713,9 +713,7 @@ def get_upload_file_arg(help_msg):
def get_status_arg(help_msg):
default_kwargs = {
"action": SetAction,
"flags": ("--status",),
"nargs": '+',
"default": None,
"help": help_msg
}
@@ -724,10 +722,17 @@ def get_status_arg(help_msg):
def get_deployment_node_arg(help_msg):
default_kwargs = {
"action": SetAction,
"flags": ("--node-id",),
"nargs": '+',
"default": None,
"help": help_msg
}
return get_arg("node", **default_kwargs)
def get_tasks_names_arg(help_msg):
default_kwargs = {
"flags": ("-d", "--task-name",),
"default": None,
"help": help_msg
}
return get_arg("task-name", **default_kwargs)

View File

@@ -31,6 +31,8 @@ def format_table(data, acceptable_keys=None, column_to_join=None):
:acceptable_keys list(str): list of keys for which to create table
also specifies their order
"""
# prepare columns
if column_to_join is not None:
for data_dict in data:
for column_name in column_to_join:
@@ -38,13 +40,27 @@ def format_table(data, acceptable_keys=None, column_to_join=None):
sorted(data_dict[column_name])
)
if acceptable_keys is not None:
rows = [tuple(value[key] for key in acceptable_keys)
rows = [tuple(value.get(key, "") for key in acceptable_keys)
for value in data]
header = tuple(acceptable_keys)
else:
rows = [tuple(x.values()) for x in data]
header = tuple(data[0].keys())
number_of_columns = len(header)
# split multi-lines cells if there is no automatic columns merge
if column_to_join:
def format_cell(cell):
return [cell or ""]
else:
def format_cell(cell):
return six.text_type(cell).split('\n')
rows = [
[format_cell(cell) if cell is not None else [''] for cell in row]
for row in rows
]
# calculate columns widths
column_widths = dict(
zip(
range(number_of_columns),
@@ -53,21 +69,42 @@ def format_table(data, acceptable_keys=None, column_to_join=None):
)
for row in rows:
column_widths.update(
(index, max(column_widths[index], len(six.text_type(element))))
for index, element in enumerate(row)
(
index,
max(
column_widths[index],
max(len(six.text_type(line)) for line in cell)
)
)
for index, cell in enumerate(row)
)
# make output
hor_delimeter = u'-+-'.join(column_widths[column_index] * u'-'
for column_index in range(number_of_columns))
row_template = u' | '.join(
u"{{{0}:{1}}}".format(idx, width)
for idx, width in column_widths.items()
)
return u'\n'.join(
(row_template.format(*header),
u'-|-'.join(column_widths[column_index] * u'-'
for column_index in range(number_of_columns)),
u'\n'.join(row_template.format(*map(six.text_type, x))
for x in rows))
)
output_lines = [
row_template.format(*header),
hor_delimeter
]
for row in rows:
max_cell_lines = max(len(cell) for cell in row)
for cell_line_no in range(max_cell_lines):
output_lines.append(
row_template.format(
*list(
cell[cell_line_no] if len(cell) > cell_line_no else u""
for cell in row
)
)
)
return u'\n'.join(output_lines)
def quote_and_join(words):

View File

@@ -144,12 +144,12 @@ class TaskShow(TaskMixIn, base.BaseShowCommand):
class TaskHistoryShow(TaskMixIn, base.BaseListCommand):
"""Show deployment history about task with given id"""
"""Show deployment history about task with given ID."""
entity_name = 'deployment_history'
columns = (
'deployment_graph_task_name',
'task_name',
'node_id',
'status',
'time_start',
@@ -158,8 +158,8 @@ class TaskHistoryShow(TaskMixIn, base.BaseListCommand):
def get_parser(self, prog_name):
parser = super(TaskHistoryShow, self).get_parser(prog_name)
parser.add_argument('id', type=int,
help='Id of the Task.')
parser.add_argument('id', type=int, help='Id of the Task')
parser.add_argument(
'-n',
'--nodes',
@@ -175,17 +175,25 @@ class TaskHistoryShow(TaskMixIn, base.BaseListCommand):
nargs='+',
help='Show deployment history for specific statuses')
parser.add_argument(
'-d',
'--tasks-names',
type=str,
nargs='+',
help='Show deployment history for specific deployment tasks names')
return parser
def take_action(self, parsed_args):
data = self.client.get_all(
transaction_id=parsed_args.id,
nodes=parsed_args.nodes,
statuses=parsed_args.statuses)
statuses=parsed_args.statuses,
tasks_names=parsed_args.tasks_names)
data = data_utils.get_display_data_multi(self.columns, data)
return (self.columns, data)
return self.columns, data
class TaskNetworkConfigurationDownload(TaskInfoFileMixIn, base.BaseCommand):

View File

@@ -17,7 +17,6 @@ functionality from nailgun objects.
"""
from fuelclient.objects.base import BaseObject
from fuelclient.objects.deployment_history import DeploymentHistory
from fuelclient.objects.environment import Environment
from fuelclient.objects.node import Node
from fuelclient.objects.node import NodeCollection

View File

@@ -1,33 +0,0 @@
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from fuelclient.objects.base import BaseObject
class DeploymentHistory(BaseObject):
class_api_path = "transactions/{transaction_id}/deployment_history/"\
"?nodes={nodes}&statuses={statuses}"
@classmethod
def get_all(cls, transaction_id, nodes=None, statuses=None):
statuses = ",".join(str(s) for s in statuses) if statuses else ""
nodes = ",".join(str(n) for n in nodes) if nodes else ""
history = cls.connection.get_request(
cls.class_api_path.format(
transaction_id=transaction_id,
nodes=nodes,
statuses=statuses))
return history

View File

@@ -16,53 +16,87 @@
from mock import patch
from fuelclient.cli.actions import DeploymentTasksAction
from fuelclient.cli.formatting import format_table
from fuelclient.cli.serializers import Serializer
from fuelclient.tests.unit.v1 import base
HISTORY_API_OUTPUT = [
{
"status": "ready",
"time_start": "2016-03-25T17:22:10.687135",
"time_end": "2016-03-25T17:22:30.830701",
"node_id": "1",
"deployment_graph_task_name": "controller_remaining_tasks"
},
{
"status": "skipped",
"time_start": "2016-03-25T17:23:37.313212",
"time_end": "2016-03-25T17:23:37.313234",
"node_id": "2",
"deployment_graph_task_name": "ironic-compute"
}
]
from fuelclient.tests import utils
from fuelclient.v1.deployment_history import DeploymentHistoryClient
class TestDeploymentTasksAction(base.UnitTestCase):
def assert_print_table(self, print_mock, tasks):
print_mock.assert_called_once_with(
tasks, format_table(
tasks,
acceptable_keys=DeploymentTasksAction.acceptable_keys))
@patch.object(Serializer, 'print_to_output')
def test_show_full_history(self, print_mock):
self.m_history_api = self.m_request.get(
'/api/v1/transactions/1/deployment_history/?nodes=&statuses=',
json=HISTORY_API_OUTPUT)
'/api/v1/transactions/1/deployment_history/?'
'nodes=&'
'statuses=&'
'tasks_names=',
json=utils.get_fake_deployment_history())
self.execute(
['fuel', 'deployment-tasks', '--tid', '1']
)
print_mock.assert_called_once_with(
utils.get_fake_deployment_history(convert_legacy_fields=True),
format_table(
utils.get_fake_deployment_history(convert_legacy_fields=True),
acceptable_keys=DeploymentHistoryClient.history_records_keys))
self.assert_print_table(print_mock, HISTORY_API_OUTPUT)
@patch.object(Serializer, 'print_to_output')
def test_show_tasks_history(self, print_mock):
tasks_after_facade = [
{
'task_name': 'controller-remaining-tasks',
'task_parameters': 'parameters: {puppet_manifest: /etc/puppet/'
'modules/osnailyfacter/modular/globals/'
'globals.pp,\n puppet_modules: /etc/'
'puppet/modules, timeout: 3600}\nrole: '
'[controller]\ntype: puppet\nversion: 2.0.0'
'\n',
'status_by_node': '1 - ready - 2016-03-25T17:22:10 - '
'2016-03-25T17:22:30\n'
'2 - ready - 2016-03-25T17:22:10 - '
'2016-03-25T17:22:30'
},
{
'task_name': 'pending-task',
'task_parameters': 'parameters: {puppet_manifest: /etc/puppet/'
'modules/osnailyfacter/modular/globals/'
'globals.pp,\n puppet_modules: /etc/puppet'
'/modules, timeout: 3600}\nrole: '
'[controller]\ntype: puppet\nversion: 2.0.0'
'\n',
'status_by_node': '1 - pending - not started - not ended\n'
'2 - pending - not started - not ended'
}
]
self.m_history_api = self.m_request.get(
'/api/v1/transactions/1/deployment_history/?'
'nodes=&'
'statuses=&'
'tasks_names=controller-remaining-tasks,pending-task',
json=utils.get_fake_deployment_history(add_task_data=True))
self.execute(
['fuel', 'deployment-tasks',
'--tid', '1',
'--task-name', 'controller-remaining-tasks,pending-task',
'--node', '1,2']
)
print_mock.assert_called_once_with(
tasks_after_facade,
format_table(
tasks_after_facade,
acceptable_keys=DeploymentHistoryClient.tasks_records_keys))
def test_show_history_for_special_nodes(self):
self.m_history_api = self.m_request.get(
'/api/v1/transactions/1/deployment_history/?nodes=1,2&statuses=',
'/api/v1/transactions/1/deployment_history/?'
'nodes=1,2&'
'statuses=&'
'tasks_names=',
json={})
self.execute(
@@ -72,10 +106,27 @@ class TestDeploymentTasksAction(base.UnitTestCase):
self.assertEqual(self.m_history_api.call_count, 1)
def test_show_history_for_special_tasks(self):
self.m_history_api = self.m_request.get(
'/api/v1/transactions/1/deployment_history/?'
'nodes=&'
'statuses=&'
'tasks_names=test1,test2',
json={})
self.execute(
['fuel', 'deployment-tasks', '--tid', '1',
'--task-name', 'test1,test2']
)
self.assertEqual(self.m_history_api.call_count, 1)
def test_show_history_with_special_statuses(self):
self.m_history_api = self.m_request.get(
'/api/v1/transactions/1/deployment_history/'
'?nodes=&statuses=ready,skipped',
'/api/v1/transactions/1/deployment_history/?'
'nodes=&'
'statuses=ready,skipped&'
'tasks_names=',
json={})
self.execute(
['fuel', 'deployment-tasks', '--tid', '1',
@@ -83,13 +134,16 @@ class TestDeploymentTasksAction(base.UnitTestCase):
)
self.assertEqual(self.m_history_api.call_count, 1)
def test_show_history_with_special_statuses_for_special_nodes(self):
def test_show_history_for_special_statuses_nodes_and_tasks(self):
self.m_history_api = self.m_request.get(
'/api/v1/transactions/1/deployment_history/'
'?nodes=1,2&statuses=ready,skipped',
'/api/v1/transactions/1/deployment_history/?'
'nodes=1,2&'
'statuses=ready,skipped&'
'tasks_names=test1,test2',
json={})
self.execute(
['fuel', 'deployment-tasks', '--tid', '1',
'--status', 'ready,skipped', '--node', '1,2']
'--status', 'ready,skipped', '--node', '1,2',
'--task-name', 'test1,test2']
)
self.assertEqual(self.m_history_api.call_count, 1)

View File

@@ -60,7 +60,23 @@ class TestTaskCommand(test_engine.BaseCLITest):
mock.ANY)
self.m_client.get_all.assert_called_once_with(transaction_id=task_id,
nodes=None,
statuses=None)
statuses=None,
tasks_names=None)
def test_task_history_parameters(self):
task_id = 42
args = 'task history show {task_id} --tasks-names task1 task2 ' \
'--statuses ready error --nodes 1 2'.format(task_id=task_id)
self.m_client.get_all.return_value = \
utils.get_fake_deployment_history()
self.exec_command(args)
self.m_get_client.assert_called_once_with('deployment_history',
mock.ANY)
self.m_client.get_all.assert_called_once_with(
transaction_id=task_id, nodes=['1', '2'],
statuses=['ready', 'error'], tasks_names=['task1', 'task2'])
def _test_cmd(self, cmd, method, cmd_line, client,
return_data, expected_file_path, expected_kwargs):

View File

@@ -27,17 +27,21 @@ class TestDeploymentHistoryFacade(test_api.BaseLibTest):
self.version = 'v1'
self.transaction_id = '1'
self.res_uri = '/api/{0}/transactions/{1}'\
'/deployment_history/?nodes=&statuses='.format(
self.version, self.transaction_id)
'/deployment_history/' \
''.format(self.version, self.transaction_id)
self.fake_history = utils.get_fake_deployment_history()
self.client = fuelclient.get_client('deployment_history',
self.version)
self.client = fuelclient.get_client('deployment_history', self.version)
def get_url(self, nodes='', statuses='', tasks_names=''):
return self.res_uri + '?nodes={}&statuses={}&tasks_names={}'.format(
nodes, statuses, tasks_names
)
def test_deployment_history_list(self):
matcher = self.m_request.get(self.res_uri, json=self.fake_history)
matcher = self.m_request.get(self.get_url(), json=self.fake_history)
self.client.get_all(
transaction_id=self.transaction_id,
@@ -45,3 +49,21 @@ class TestDeploymentHistoryFacade(test_api.BaseLibTest):
statuses=None)
self.assertTrue(matcher.called)
def test_deployment_history_parameters(self):
matcher = self.m_request.get(
self.get_url(
nodes='1,2',
statuses='ready,error',
tasks_names='custom_task1,custom_task12'
), json=self.fake_history)
self.client.get_all(
transaction_id=self.transaction_id,
nodes=['1', '2'],
statuses=['ready', 'error'],
tasks_names=['custom_task1', 'custom_task12']
)
self.assertTrue(matcher.called)

View File

@@ -13,28 +13,101 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
def get_fake_deployment_history():
"""Create a fake deployment history
def get_fake_deployment_history(
add_task_data=False, convert_legacy_fields=False):
"""Create a fake deployment history.
Returns the serialized and parametrized representation of a dumped Fuel
Deployment History. Represents the average amount of data.
:param add_task_data: add task description to history records using
Fuel 10.0 history output format
:type add_task_data: bool
:param convert_legacy_fields: Fuel 9.0 output fields are renamed to 10.0
if True
:type convert_legacy_fields: True
:returns: fake deployment fixtures
:rtype: list[dict]
"""
return [
{
"status": "ready",
"time_start": "2016-03-25T17:22:10.687135",
"time_end": "2016-03-25T17:22:30.830701",
"node_id": "1",
"deployment_graph_task_name": "controller_remaining_tasks"
},
{
"status": "skipped",
"time_start": "2016-03-25T17:23:37.313212",
"time_end": "2016-03-25T17:23:37.313234",
"node_id": "2",
"deployment_graph_task_name": "ironic-compute"
}
]
if add_task_data:
return list(itertools.chain(*[[
{
'status': 'ready',
'time_start': '2016-03-25T17:22:10.687135',
'time_end': '2016-03-25T17:22:30.830701',
'node_id': node_id,
'task_name': 'controller-remaining-tasks',
'type': 'puppet',
'role': ['controller'],
'version': '2.0.0',
'parameters': {
'puppet_manifest': '/etc/puppet/modules/osnailyfacter'
'/modular/globals/globals.pp',
'puppet_modules': '/etc/puppet/modules',
'timeout': 3600
}
},
{
'status': 'skipped',
'time_start': '2016-03-25T17:23:37.313212',
'time_end': '2016-03-25T17:23:37.313234',
'node_id': node_id,
'task_name': 'ironic-compute',
'type': 'puppet',
'role': ['controller'],
'version': '2.0.0',
'parameters': {
'puppet_manifest': '/etc/puppet/modules/osnailyfacter'
'/modular/globals/globals.pp',
'puppet_modules': '/etc/puppet/modules',
'timeout': 3600
}
},
{
'status': 'pending',
'time_start': None,
'node_id': node_id,
'task_name': 'pending-task',
'type': 'puppet',
'role': ['controller'],
'version': '2.0.0',
'parameters': {
'puppet_manifest': '/etc/puppet/modules/osnailyfacter'
'/modular/globals/globals.pp',
'puppet_modules': '/etc/puppet/modules',
'timeout': 3600
}
}
] for node_id in ['1', '2']]))
else:
result = list(itertools.chain(*[[
{
'status': 'ready',
'time_start': '2016-03-25T17:22:10.687135',
'time_end': '2016-03-25T17:22:30.830701',
'node_id': node_id,
'deployment_graph_task_name': 'controller-remaining-tasks'
},
{
'status': 'skipped',
'time_start': '2016-03-25T17:23:37.313212',
'time_end': '2016-03-25T17:23:37.313234',
'node_id': node_id,
'deployment_graph_task_name': 'ironic-compute'
},
{
'status': 'pending',
'time_start': None,
'node_id': node_id,
'deployment_graph_task_name': 'pending-task'
}
] for node_id in ['1', '2']]))
if convert_legacy_fields:
for record in result:
record['task_name'] = record['deployment_graph_task_name']
record.pop('deployment_graph_task_name', None)
return result

View File

@@ -11,6 +11,10 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import defaultdict
import six
import yaml
from fuelclient import objects
from fuelclient.v1 import base_v1
@@ -18,13 +22,91 @@ from fuelclient.v1 import base_v1
class DeploymentHistoryClient(base_v1.BaseV1Client):
_entity_wrapper = objects.DeploymentHistory
class_api_path = "transactions/{transaction_id}/deployment_history/" \
"?nodes={nodes}&statuses={statuses}" \
"&tasks_names={tasks_names}"
def get_all(self, transaction_id, nodes=None, statuses=None):
return self._entity_wrapper.get_all(
transaction_id=transaction_id,
nodes=nodes,
statuses=statuses)
history_records_keys = ("task_name", "node_id", "status",
"time_start", "time_end")
tasks_records_keys = ("task_name", "task_parameters", "status_by_node")
_entity_wrapper = objects.Environment
def get_all(self, transaction_id, nodes=None, statuses=None,
tasks_names=None, group_by_tasks=False):
parameters = {
'statuses': statuses,
'nodes': nodes,
'tasks_names': tasks_names
}
for k in parameters:
parameters[k] = ",".join(str(s) for s in parameters[k]) \
if parameters[k] else ""
history_with_tasks = self.connection.get_request(
self.class_api_path.format(
transaction_id=transaction_id,
**parameters
)
)
# rename legacy field for Fuel 9.0
for record in history_with_tasks:
if 'deployment_graph_task_name' in record:
record['task_name'] = record['deployment_graph_task_name']
record.pop('deployment_graph_task_name', None)
# metadata for each task
tasks_parameters = defaultdict(dict)
# history records by task ID
history_records_by_task = defaultdict(list)
# history records in initial order
history_records = []
# split keys to history- and task-specific
for record in history_with_tasks:
task_name = record['task_name']
if tasks_names and task_name not in tasks_names:
# API gave us a task, that we actually want to filter out
continue
history_record = {}
for key in record:
if key in self.history_records_keys:
history_record[key] = record[key]
else:
tasks_parameters[task_name][key] = record[key]
history_records.append(history_record)
history_records_by_task[task_name].append(history_record)
if group_by_tasks:
result = []
for task_name, value in sorted(six.iteritems(tasks_parameters)):
statuses_by_node = []
for record in history_records_by_task[task_name]:
time_start = record.get('time_start')
time_start = time_start.partition(u'.')[0] if time_start\
else u'not started'
record['time_start'] = time_start
time_end = record.get('time_end')
time_end = time_end.partition(u'.')[0] if time_end \
else u'not ended'
record['time_end'] = time_end
statuses_by_node.append(
'{node_id} - {status} - {time_start} - {time_end}'
''.format(**record)
)
result.append(
{
"task_name": task_name,
"task_parameters": yaml.safe_dump(
tasks_parameters[task_name]),
"status_by_node": '\n'.join(statuses_by_node)
}
)
return result
else:
return history_records
def get_client(connection):