Fix cluster/node group template tests

The cli tests for the update of cluster templates and node group templates
now check if the change had effect as opposed to only calling the
commands.  Also includes small refactorings on these tests.

Change-Id: I37a1fa8bb2638b88822e7556114f0db9b870f298
This commit is contained in:
Raissa Sarmento 2017-02-07 11:36:21 -03:00
parent 02e2208f76
commit a7cbd470c7
4 changed files with 45 additions and 48 deletions

View File

@ -18,6 +18,7 @@ from tempest import config
from tempest.lib.cli import base
from tempest.test import BaseTestCase
from tempest.lib import exceptions as exc
from tempest.lib.common.utils import data_utils
from sahara_tempest_plugin.common import plugin_utils
@ -67,14 +68,12 @@ class ClientTestBase(base.ClientTestBase):
result = self.parser.listing(command_for_item)
return result
def find_in_listing(self, result, name):
check_table = None
def find_in_listing(self, result, value, field='name'):
for line in result:
if line['Field'] == 'Name':
self.assertEqual(line['Value'], name)
check_table = True
if check_table is None:
raise self.skipException('No table to show information')
if line['Field'].lower() == field.lower():
self.assertEqual(line['Value'].lower(), value.lower())
return
raise self.skipException('No table to show information')
def check_if_delete(self, command, name):
delete_cmd = self.openstack('dataprocessing %s delete' % command,
@ -84,6 +83,14 @@ class ClientTestBase(base.ClientTestBase):
# have the first letter capitalized.
self.assertEqual(delete_cmd.lower(), result.lower())
def update_resource_value(self, command, value, params):
new_value = data_utils.rand_name(value)
command = '%s update %s' % (command, value)
params = '%s %s' % (params, new_value)
update_result = self.listing_result('%s %s' % (command, params))
self.find_in_listing(update_result, new_value)
return new_value
def delete_resource(self, command, name):
list_of_resources = self.listing_result('%s list' % command)
list_of_resource_names = [r['Name'] for r in list_of_resources]

View File

@ -45,17 +45,11 @@ class SaharaClusterTemplateCLITest(base.ClientTestBase):
cluster_template_name)
def openstack_cluster_template_update(self, cluster_template_name):
new_cluster_template_name = ''.join([cluster_template_name, '1'])
self.assertTableStruct(
self.listing_result(
''.join(['cluster template update --name ',
new_cluster_template_name, ' ',
cluster_template_name])),
[
'Field',
'Value'
])
return new_cluster_template_name
cmd = 'cluster template'
new_template_name = self.update_resource_value(cmd,
cluster_template_name,
'--name')
return new_template_name
def openstack_cluster_template_delete(self, cluster_template_name):
self.check_if_delete('cluster template', cluster_template_name)

View File

@ -61,23 +61,19 @@ class SaharaNodeGroupCLITest(base.ClientTestBase):
this arg, there are several available updates of node group:
name, public/private, protected/unprotected
"""
new_node_group_name = None
cmd = 'node group template update %s' % node_group_name
cmd = 'node group template'
if update_field == 'name':
new_node_group_name = data_utils.rand_name(node_group_name)
update_cmd = '--%s %s' % (update_field, new_node_group_name)
elif update_field:
new_node_group_name = self.update_resource_value(cmd,
node_group_name,
'--name')
return new_node_group_name
elif update_field in ('protected', 'unprotected'):
# here we check only updating with public/protected flags for now
update_cmd = '--%s' % update_field
else:
# if update_field is None, update_command should be empty
update_cmd = ''
self.assertTableStruct(
self.listing_result('%s %s' % (cmd, update_cmd)), [
'Field',
'Value'
])
return new_node_group_name
update_cmd = 'update %s --%s' % (node_group_name, update_field)
result = self.listing_result('%s %s' % (cmd, update_cmd))
is_protected_value = str(update_field == 'protected')
self.find_in_listing(result, is_protected_value, 'is protected')
self.assertTableStruct(result, ['Field', 'Value'])
def openstack_node_group_template_delete(self, node_group_name):
self.check_if_delete('node group template', node_group_name)

View File

@ -25,6 +25,7 @@ from sahara_tempest_plugin.tests.cli import data_sources
from sahara_tempest_plugin.tests.cli import job_types
TEMPEST_CONF = config.CONF
NODE_GROUP_TEMPLATE = 'node group template'
class Scenario(images.SaharaImageCLITest,
@ -48,54 +49,53 @@ class Scenario(images.SaharaImageCLITest,
def test_node_group_cli(self):
master_ngt = self.openstack_node_group_template_create('master', '4')
worker_ngt = self.openstack_node_group_template_create('worker', '3')
self.addCleanup(self.delete_resource, 'node group template',
master_ngt)
self.addCleanup(self.delete_resource, 'node group template',
worker_ngt)
self.addCleanup(self.delete_resource, NODE_GROUP_TEMPLATE, master_ngt)
self.addCleanup(self.delete_resource, NODE_GROUP_TEMPLATE, worker_ngt)
self.openstack_node_group_template_list()
new_master_ngt = self.openstack_node_group_template_update(
master_ngt, update_field='name')
self.addCleanup(self.delete_resource, 'node group template',
self.addCleanup(self.delete_resource, NODE_GROUP_TEMPLATE,
new_master_ngt)
self.openstack_node_group_template_show(new_master_ngt)
self.openstack_node_group_template_delete(new_master_ngt)
self.negative_try_to_delete_protected_node_group(worker_ngt)
self.openstack_node_group_template_delete(worker_ngt)
self.wait_for_resource_deletion(new_master_ngt, 'node group template')
self.wait_for_resource_deletion(worker_ngt, 'node group template')
self.wait_for_resource_deletion(new_master_ngt, NODE_GROUP_TEMPLATE)
self.wait_for_resource_deletion(worker_ngt, NODE_GROUP_TEMPLATE)
self.negative_delete_removed_node_group(worker_ngt)
def test_cluster_template_cli(self):
cluster_template_cmd = 'cluster template'
ng_master = (
self.openstack_node_group_template_create('tmp-master', '4'))
ng_worker = (
self.openstack_node_group_template_create('tmp-worker', '3'))
self.addCleanup(self.delete_resource, 'node group template',
self.addCleanup(self.delete_resource, NODE_GROUP_TEMPLATE,
ng_master)
self.addCleanup(self.delete_resource, 'node group template',
self.addCleanup(self.delete_resource, NODE_GROUP_TEMPLATE,
ng_worker)
cluster_template_name = (
self.openstack_cluster_template_create(ng_master, ng_worker))
self.addCleanup(self.delete_resource, 'cluster template',
self.addCleanup(self.delete_resource, cluster_template_cmd,
cluster_template_name)
self.openstack_cluster_template_list()
self.openstack_cluster_template_show(cluster_template_name)
new_cluster_template_name = self.openstack_cluster_template_update(
cluster_template_name)
self.addCleanup(self.delete_resource, 'cluster template',
self.addCleanup(self.delete_resource, cluster_template_cmd,
new_cluster_template_name)
self.openstack_cluster_template_delete(new_cluster_template_name)
self.wait_for_resource_deletion(new_cluster_template_name, 'cluster '
'template')
self.wait_for_resource_deletion(new_cluster_template_name,
cluster_template_cmd)
self.openstack_node_group_template_delete(ng_master)
self.openstack_node_group_template_delete(ng_worker)
self.wait_for_resource_deletion(ng_master, 'node group template')
self.wait_for_resource_deletion(ng_worker, 'node group template')
self.wait_for_resource_deletion(ng_master, NODE_GROUP_TEMPLATE)
self.wait_for_resource_deletion(ng_worker, NODE_GROUP_TEMPLATE)
@decorators.skip_because(bug="1629295")
def test_cluster_cli(self):