Adapt python client tests to use Tempest plugin interface

Use the Tempest plugin interface for python client tests (which
are already tempest-based) instead of copying the tests inside
the tempest tree.

Inspired by the same type of change applied to Manila,
see Ie5ed64a6777ed1acf8dd56522c26705ae897596d

Depends-On: I06f1e13207cc6d661c078c4c4cf1ae7974ecf3da
Change-Id: I6073a528413aefd96882179a1eb8bbe715c6417b
This commit is contained in:
Luigi Toscano 2015-09-18 14:13:04 +02:00
parent 494a019178
commit 09279d54d5
15 changed files with 141 additions and 120 deletions

View File

View File

@ -4,7 +4,21 @@ Tests for Sahara Client in Tempest
How to run
----------
Get the latest tempest resources from GitHub:
Get the latest sahara resources from the appropriate mirror:
.. sourcecode:: console
$ git clone https://github.com/openstack/sahara.git
..
Install sahara, in order to register the tempest plugin interface:
.. sourcecode:: console
$ pip install $SAHARA_ROOT_DIR
..
Get the latest tempest resources from the appropriate mirror:
.. sourcecode:: console
@ -22,58 +36,68 @@ from ``tempest/etc/tempest.conf.sample``:
Some configuration options are required for running tests. Here is the list:
[DEFAULT]
lock_path=
.. sourcecode:: ini
[identity]
uri=
uri_v3=
username=
tenant_name=
password=
admin_username=
admin_tenant_name=
admin_password=
[DEFAULT]
[service_available]
sahara=true
neutron=true
[identity]
uri=
uri_v3=
username=
tenant_name=
password=
admin_username=
admin_tenant_name=
admin_password=
Get the latest sahara resources from GitHub:
[compute]
fixed_network_name=
flavor_ref=
.. sourcecode:: console
[network]
floating_network_name=
[data_processing]
fake_image_id=
[scenario]
ssh_user=
[service_available]
sahara=true
neutron=true
$ git clone https://github.com/openstack/sahara.git
..
Copy Sahara Tempest tests directory to tempest:
All the parameters above are defined by tempest, with the exception of
data_processing.fake_image_id, which is defined by the scenario python
client tests here.
.. sourcecode:: console
Other relevant parameters (all defined by scenario python client tests):
.. sourcecode:: ini
[data_processing]
...
endpoint_type=
catalog_type=
saharaclient_version=1.1
sahara_url=
cluster_timeout=1800
request_timeout=10
$ cp -r $SAHARA_ROOT_DIR/sahara/tests/tempest .
..
Create a configuration file ``tempest/scenario/data_processing/etc/sahara_tests.conf`` from
``tempest/scenario/data_processing/etc/sahara_tests.conf.sample``:
When configuration is finished, you can launch the tests from tempest with:
.. sourcecode:: console
$ cp tempest/scenario/data_processing/etc/sahara_tests.conf.sample tempest/scenario/data_processing/etc/sahara_tests.conf
..
All options should be set. Some of them are defaults and can be left without changing,
other should be specified.
When configuration is finished, you can launch the tests with:
.. sourcecode:: console
$ tox -e all -- tempest.scenario.data_processing.client_tests
$ tox -e all-plugin -- tempest.scenario.data_processing.client_tests
..
If you want to launch all Sahara tests in Tempest, you can do this with ``data_processing`` tag:
.. sourcecode:: console
$ tox -e all -- data_processing
..
$ tox -e all-plugin -- data_processing
..

View File

@ -20,11 +20,9 @@ from saharaclient.api import base as sab
from saharaclient import client as sahara_client
from tempest import config
from tempest import exceptions
from tempest.scenario.data_processing import config as sahara_test_config
from tempest.scenario import manager
CONF = sahara_test_config.SAHARA_TEST_CONF
TEMPEST_CONF = config.CONF
LOG = logging.getLogger(__name__)
@ -43,20 +41,20 @@ class BaseDataProcessingTest(manager.ScenarioTest):
credentials = cls.os_primary.credentials
cls.client = sahara_client.Client(
CONF.data_processing.saharaclient_version,
TEMPEST_CONF.data_processing.saharaclient_version,
credentials.username,
credentials.password,
project_name=credentials.tenant_name,
endpoint_type=endpoint_type,
service_type=catalog_type,
auth_url=auth_url,
sahara_url=CONF.data_processing.sahara_url)
sahara_url=TEMPEST_CONF.data_processing.sahara_url)
cls.object_client = cls.os_primary.object_client
cls.container_client = cls.os_primary.container_client
cls.networks_client = cls.os_primary.compute_networks_client
cls.floating_ip_pool = CONF.data_processing.floating_ip_pool
cls.floating_ip_pool = TEMPEST_CONF.network.floating_network_name
if TEMPEST_CONF.service_available.neutron:
cls.floating_ip_pool = cls.get_floating_ip_pool_id_for_neutron()
@ -68,7 +66,7 @@ class BaseDataProcessingTest(manager.ScenarioTest):
'datanode',
'tasktracker'
],
'flavor_id': CONF.data_processing.flavor_id,
'flavor_id': TEMPEST_CONF.compute.flavor_ref,
'floating_ip_pool': cls.floating_ip_pool
}
@ -80,7 +78,7 @@ class BaseDataProcessingTest(manager.ScenarioTest):
'namenode',
'jobtracker'
],
'flavor_id': CONF.data_processing.flavor_id,
'flavor_id': TEMPEST_CONF.compute.flavor_ref,
'floating_ip_pool': cls.floating_ip_pool,
'auto_security_group': True
}
@ -116,21 +114,21 @@ class BaseDataProcessingTest(manager.ScenarioTest):
@classmethod
def get_floating_ip_pool_id_for_neutron(cls):
net_id = cls._find_network_by_name(
CONF.data_processing.floating_ip_pool)
TEMPEST_CONF.network.floating_network_name)
if not net_id:
raise exceptions.NotFound(
'Floating IP pool \'%s\' not found in pool list.'
% CONF.data_processing.floating_ip_pool)
% TEMPEST_CONF.network.floating_network_name)
return net_id
@classmethod
def get_private_network_id(cls):
net_id = cls._find_network_by_name(
CONF.data_processing.private_network)
TEMPEST_CONF.compute.fixed_network_name)
if not net_id:
raise exceptions.NotFound(
'Private network \'%s\' not found in network list.'
% CONF.data_processing.private_network)
% TEMPEST_CONF.compute.fixed_network_name)
return net_id
@classmethod
@ -216,7 +214,7 @@ class BaseDataProcessingTest(manager.ScenarioTest):
return resp_body
def check_cluster_active(self, cluster_id):
timeout = CONF.data_processing.cluster_timeout
timeout = TEMPEST_CONF.data_processing.cluster_timeout
s_time = timeutils.utcnow()
while timeutils.delta_seconds(s_time, timeutils.utcnow()) < timeout:
cluster = self.client.clusters.get(cluster_id)
@ -225,7 +223,7 @@ class BaseDataProcessingTest(manager.ScenarioTest):
if cluster.status == 'Error':
raise exceptions.BuildErrorException(
'Cluster failed to build and is in "Error" status.')
time.sleep(CONF.data_processing.request_timeout)
time.sleep(TEMPEST_CONF.data_processing.request_timeout)
raise exceptions.TimeoutException(
'Cluster failed to get to "Active status within %d seconds.'
% timeout)
@ -255,7 +253,7 @@ class BaseDataProcessingTest(manager.ScenarioTest):
def delete_timeout(
self, resource_client, resource_id,
timeout=CONF.data_processing.cluster_timeout):
timeout=TEMPEST_CONF.data_processing.cluster_timeout):
start = timeutils.utcnow()
while timeutils.delta_seconds(start, timeutils.utcnow()) < timeout:
@ -266,7 +264,7 @@ class BaseDataProcessingTest(manager.ScenarioTest):
return
raise sahara_api_exception
time.sleep(CONF.data_processing.request_timeout)
time.sleep(TEMPEST_CONF.data_processing.request_timeout)
raise exceptions.TimeoutException(
'Failed to delete resource "%s" in %d seconds.'

View File

@ -12,12 +12,14 @@
# License for the specific language governing permissions and limitations
# under the License.
from tempest.scenario.data_processing.client_tests import base
from tempest.scenario.data_processing import config as sahara_test_config
from tempest import config
from tempest import test
from tempest_lib.common.utils import data_utils
CONF = sahara_test_config.SAHARA_TEST_CONF
from sahara.tests.tempest.scenario.data_processing.client_tests import base
TEMPEST_CONF = config.CONF
class ClusterTemplateTest(base.BaseDataProcessingTest):
@ -30,7 +32,7 @@ class ClusterTemplateTest(base.BaseDataProcessingTest):
full_cluster_template['node_groups'] = [
{
'name': 'master-node',
'flavor_id': CONF.data_processing.flavor_id,
'flavor_id': TEMPEST_CONF.compute.flavor_ref,
'node_processes': ['namenode'],
'count': 1
},

View File

@ -12,10 +12,11 @@
# License for the specific language governing permissions and limitations
# under the License.
from tempest.scenario.data_processing.client_tests import base
from tempest import test
from tempest_lib.common.utils import data_utils
from sahara.tests.tempest.scenario.data_processing.client_tests import base
class DataSourceTest(base.BaseDataProcessingTest):
def _check_data_source_create(self, source_body):

View File

@ -12,10 +12,11 @@
# License for the specific language governing permissions and limitations
# under the License.
from tempest.scenario.data_processing.client_tests import base
from tempest import test
from tempest_lib.common.utils import data_utils
from sahara.tests.tempest.scenario.data_processing.client_tests import base
class JobBinariesTest(base.BaseDataProcessingTest):
def _check_job_binary_create(self, binary_body):

View File

@ -12,10 +12,11 @@
# License for the specific language governing permissions and limitations
# under the License.
from tempest.scenario.data_processing.client_tests import base
from tempest import test
from tempest_lib.common.utils import data_utils
from sahara.tests.tempest.scenario.data_processing.client_tests import base
class JobBinaryInternalsTest(base.BaseDataProcessingTest):
def _check_job_binary_internal_create(self):

View File

@ -18,24 +18,24 @@ from oslo_utils import timeutils
from saharaclient.api import base as sab
from tempest import config
from tempest import exceptions
from tempest.scenario.data_processing.client_tests import base
from tempest.scenario.data_processing import config as sahara_test_config
from tempest import test
from tempest_lib.common.utils import data_utils
from tempest_lib import decorators
CONF = sahara_test_config.SAHARA_TEST_CONF
from sahara.tests.tempest.scenario.data_processing.client_tests import base
TEMPEST_CONF = config.CONF
class JobExecutionTest(base.BaseDataProcessingTest):
def _check_register_image(self, image_id):
self.client.images.update_image(
image_id, CONF.data_processing.ssh_username, '')
image_id, TEMPEST_CONF.scenario.ssh_user, '')
reg_image = self.client.images.get(image_id)
self.assertDictContainsSubset(
{'_sahara_username': CONF.data_processing.ssh_username},
{'_sahara_username': TEMPEST_CONF.scenario.ssh_user},
reg_image.metadata)
def _check_image_get(self, image_id):
@ -106,7 +106,7 @@ class JobExecutionTest(base.BaseDataProcessingTest):
'plugin_name': 'fake',
'hadoop_version': '0.1',
'cluster_template_id': cluster_template.id,
'default_image_id': CONF.data_processing.fake_image_id
'default_image_id': TEMPEST_CONF.data_processing.fake_image_id
}
# create cluster
@ -180,7 +180,7 @@ class JobExecutionTest(base.BaseDataProcessingTest):
cluster = self.client.clusters.get(cluster_id)
self.assertEqual('Deleting', cluster.status)
timeout = CONF.data_processing.cluster_timeout
timeout = TEMPEST_CONF.data_processing.cluster_timeout
s_time = timeutils.utcnow()
while timeutils.delta_seconds(s_time, timeutils.utcnow()) < timeout:
try:
@ -188,7 +188,7 @@ class JobExecutionTest(base.BaseDataProcessingTest):
except sab.APIException:
# cluster is deleted
return
time.sleep(CONF.data_processing.request_timeout)
time.sleep(TEMPEST_CONF.data_processing.request_timeout)
raise exceptions.TimeoutException('Cluster failed to terminate'
'in %d seconds.' % timeout)
@ -271,7 +271,7 @@ class JobExecutionTest(base.BaseDataProcessingTest):
@test.attr(type='slow')
@test.services('data_processing')
def test_job_executions(self):
image_id = CONF.data_processing.fake_image_id
image_id = TEMPEST_CONF.data_processing.fake_image_id
self._check_register_image(image_id)
self._check_image_get(image_id)
self._check_image_list(image_id)
@ -294,7 +294,7 @@ class JobExecutionTest(base.BaseDataProcessingTest):
@classmethod
def tearDownClass(cls):
image_list = cls.client.images.list()
image_id = CONF.data_processing.fake_image_id
image_id = TEMPEST_CONF.data_processing.fake_image_id
if image_id in [image.id for image in image_list]:
cls.client.images.unregister_image(image_id)
super(JobExecutionTest, cls).tearDownClass()

View File

@ -12,10 +12,11 @@
# License for the specific language governing permissions and limitations
# under the License.
from tempest.scenario.data_processing.client_tests import base
from tempest import test
from tempest_lib.common.utils import data_utils
from sahara.tests.tempest.scenario.data_processing.client_tests import base
class JobTest(base.BaseDataProcessingTest):
def _check_create_job(self):

View File

@ -12,10 +12,11 @@
# License for the specific language governing permissions and limitations
# under the License.
from tempest.scenario.data_processing.client_tests import base
from tempest import test
from tempest_lib.common.utils import data_utils
from sahara.tests.tempest.scenario.data_processing.client_tests import base
class NodeGroupTemplateTest(base.BaseDataProcessingTest):
def _check_create_node_group_template(self):

View File

@ -12,9 +12,10 @@
# License for the specific language governing permissions and limitations
# under the License.
from tempest.scenario.data_processing.client_tests import base
from tempest import test
from sahara.tests.tempest.scenario.data_processing.client_tests import base
class PluginsTest(base.BaseDataProcessingTest):

View File

@ -14,24 +14,9 @@
from __future__ import print_function
import os
from oslo_config import cfg
def class_wrapper(cls):
instances = {}
def get_instance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return get_instance
data_processing_group = cfg.OptGroup(name='data_processing',
title='Data Processing options')
DataProcessingGroup = [
cfg.IntOpt('cluster_timeout',
default=3600,
@ -39,45 +24,11 @@ DataProcessingGroup = [
cfg.IntOpt('request_timeout',
default=10,
help='Timeout (in seconds) between status checks.'),
cfg.StrOpt('floating_ip_pool',
help='Name of IP pool.'),
cfg.StrOpt('private_network',
help='Name of the private network '
'that provides internal connectivity.'),
cfg.StrOpt('fake_image_id',
help='ID of an image which is used for cluster creation.'),
cfg.StrOpt('flavor_id',
help='ID of a flavor.'),
cfg.StrOpt('saharaclient_version',
default='1.1',
help='Version of python-saharaclient'),
cfg.StrOpt('sahara_url',
help='Sahara url as http://ip:port/api_version/tenant_id'),
cfg.StrOpt('ssh_username',
help='Username which is used to log into remote nodes via SSH.')
]
@class_wrapper
class SaharaTestConfig(object):
DEFAULT_CONFIG_DIR = os.path.join(
os.path.abspath(os.path.dirname(__file__)), 'etc')
DEFAULT_CONFIG_FILE = 'sahara_tests.conf'
def __init__(self):
config_files = []
path = os.path.join(self.DEFAULT_CONFIG_DIR, self.DEFAULT_CONFIG_FILE)
if os.path.isfile(path):
config_files.append(path)
conf = cfg.ConfigOpts()
conf([], project='Sahara-tests',
default_config_files=config_files)
conf.register_group(data_processing_group)
conf.register_opts(DataProcessingGroup, data_processing_group)
self.data_processing = conf.data_processing
SAHARA_TEST_CONF = SaharaTestConfig()

View File

@ -0,0 +1,37 @@
# Copyright (c) 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from tempest.test_discover import plugins
import sahara.tests.tempest.scenario.data_processing.config as sahara_config
class SaharaClientsScenarioPlugin(plugins.TempestPlugin):
def load_tests(self):
relative_test_dir = 'sahara/tests/tempest/scenario/data_processing'
test_dir = os.path.dirname(os.path.abspath(__file__))
top_level_dir = test_dir[:test_dir.find(relative_test_dir)-1]
return test_dir, top_level_dir
def register_opts(self, conf):
# additional options in the data_processing section
conf.register_opts(sahara_config.DataProcessingGroup,
'data_processing')
def get_opt_lists(self):
return [('data_processing', sahara_config.DataProcessingGroup)]

View File

@ -61,6 +61,9 @@ sahara.run.mode =
oslo.config.opts =
sahara.config = sahara.config:list_opts
tempest.test_plugins =
sahara_clients_scenario_tests = sahara.tests.tempest.scenario.data_processing.plugin:SaharaClientsScenarioPlugin
[build_sphinx]
all_files = 1
build-dir = doc/build