APIv2 support for scenario tests

Add the support for APIv2 clients to scenario tests, while
keeping the code compatible with APIv1.1. Most of the changes
are due to the renaming of job/job_execution to job_template/job.

A new job (sahara-tests-scenario-v2) runs the APIv2 scenario tests.
Story: 2004511
Task: 28239
Depends-On: https://review.openstack.org/581774
Change-Id: I4c69c5f9d263dc69911117282f3b9daee8650d80
This commit is contained in:
Luigi Toscano 2019-01-10 15:53:21 +01:00
parent 66df22936d
commit 99854290a0
8 changed files with 79 additions and 22 deletions

View File

@ -12,6 +12,7 @@
- openstack-tox-pylint:
voting: false
- sahara-tests-scenario-runner-py3
- sahara-tests-scenario-v2
- sahara-tests-tempest
- sahara-tests-tempest-v2
- sahara-tests-scenario-rocky
@ -168,6 +169,13 @@
vars:
sahara_scenario_tox_env: venv-py3
- job:
name: sahara-tests-scenario-v2
parent: sahara-tests-scenario
vars:
sahara_scenario_tox_env: venv-py3
sahara_scenario_use_api_v2: True
- job:
name: sahara-tests-scenario-rocky
parent: sahara-tests-scenario

View File

@ -0,0 +1,8 @@
---
prelude: >
Scenario tests now support APIv2.
features:
- |
Scenario tests can be executed against APIv2.
The usage of APIv2 is enabled through a new command line argument
for sahara-scenario (--v2, -2).

View File

@ -6,6 +6,9 @@
etc/scenario/gate/edp.yaml.mako \
etc/scenario/gate/{{ sahara_scenario_test_template }} \
--os-cloud {{ sahara_cloud_demo }} \
{% if sahara_scenario_use_api_v2|default(False) -%}
--v2 \
{% endif %}
| tee scenario.log
if grep -qE '(FAILED|ERROR:)' scenario.log; then
exit 1

View File

@ -87,6 +87,7 @@ class BaseTestCase(base.BaseTestCase):
cls.report = False
cls.results_dir = '.'
cls.default_templ_dir = '.'
cls.use_api_v2 = False
def setUp(self):
super(BaseTestCase, self).setUp()
@ -107,9 +108,12 @@ class BaseTestCase(base.BaseTestCase):
with open(private_key_file_name, 'w+') as private_key_file:
private_key_file.write(self.private_key)
os.chmod(private_key_file_name, 0o600)
self.plugin_version_option = 'plugin_version'
if not self.use_api_v2:
self.plugin_version_option = 'hadoop_version'
self.plugin_opts = {
'plugin_name': self.testcase['plugin_name'],
'hadoop_version': self.testcase['plugin_version']
self.plugin_version_option: self.testcase['plugin_version']
}
self.cinder = True
self.proxy = False
@ -129,9 +133,11 @@ class BaseTestCase(base.BaseTestCase):
self._get_file_with_defaults(
self.credentials.get('ssl_cert')))
api_version = '2' if self.use_api_v2 else '1.1'
self.sahara = clients.SaharaClient(session=session,
service_type=sahara_service_type,
sahara_url=sahara_url)
sahara_url=sahara_url,
api_version=api_version)
self.nova = clients.NovaClient(session=session)
self.neutron = clients.NeutronClient(session=session)
# swiftclient doesn't support keystone sessions
@ -762,9 +768,9 @@ class BaseTestCase(base.BaseTestCase):
return id
def __create_job(self, *args, **kwargs):
id = self.sahara.create_job(*args, **kwargs)
id = self.sahara.create_job_template(*args, **kwargs)
if not self.testcase['retain_resources']:
self.addCleanup(self.sahara.delete_job, id)
self.addCleanup(self.sahara.delete_job_template, id)
return id
def __run_job(self, *args, **kwargs):
@ -814,7 +820,7 @@ class BaseTestCase(base.BaseTestCase):
tbs.extend(check['traceback'])
tbs.append("")
print("Results of testing plugin", self.plugin_opts['plugin_name'],
self.plugin_opts['hadoop_version'])
self.plugin_opts[self.plugin_version_option])
print(table)
print("\n".join(tbs), file=sys.stderr)
@ -827,9 +833,14 @@ class BaseTestCase(base.BaseTestCase):
filename = {"time": time.strftime('%Y%m%d%H%M%S',
time.localtime())}
filename.update(self.plugin_opts)
# let's normalize this variable so that we can use
# a stable name as formatter later.
if 'hadoop_version' in filename:
filename['plugin_version'] = filename['hadoop_version']
del filename['hadoop_version']
report_file_name = os.path.join(
self.results_dir,
'{plugin_name}_{hadoop_version}-{time}'.format(**filename))
'{plugin_name}_{plugin_version}-{time}'.format(**filename))
time.strftime('%Y%m%d%H%M%S', time.localtime())
with open(report_file_name, 'w+') as report_file:
report_file.write(str(self._results))

View File

@ -62,7 +62,12 @@ class Client(object):
class SaharaClient(Client):
def __init__(self, *args, **kwargs):
self.sahara_client = sahara_client.Client('1.1', *args, **kwargs)
self.api_version = '1.1'
if 'api_version' in kwargs:
self.api_version = kwargs['api_version']
del kwargs['api_version']
self.sahara_client = sahara_client.Client(self.api_version, *args,
**kwargs)
def create_node_group_template(self, *args, **kwargs):
data = self.sahara_client.node_group_templates.create(*args, **kwargs)
@ -128,23 +133,33 @@ class SaharaClient(Client):
self.sahara_client.job_binaries.delete,
job_binary_id)
def create_job(self, *args, **kwargs):
data = self.sahara_client.jobs.create(*args, **kwargs)
def create_job_template(self, *args, **kwargs):
if self.api_version == '1.1':
data = self.sahara_client.jobs.create(*args, **kwargs)
else:
data = self.sahara_client.job_templates.create(*args, **kwargs)
return data.id
def delete_job(self, job_id):
return self.delete_resource(
self.sahara_client.jobs.delete,
job_id)
def delete_job_template(self, job_id):
if self.api_version == '1.1':
delete_function = self.sahara_client.jobs.delete
else:
delete_function = self.sahara_client.job_templates.delete
return self.delete_resource(delete_function, job_id)
def run_job(self, *args, **kwargs):
data = self.sahara_client.job_executions.create(*args, **kwargs)
if self.api_version == '1.1':
data = self.sahara_client.job_executions.create(*args, **kwargs)
else:
data = self.sahara_client.jobs.create(*args, **kwargs)
return data.id
def delete_job_execution(self, job_execution_id):
return self.delete_resource(
self.sahara_client.job_executions.delete,
job_execution_id)
if self.api_version == '1.1':
delete_function = self.sahara_client.job_executions.delete
else:
delete_function = self.sahara_client.jobs.delete
return self.delete_resource(delete_function, job_execution_id)
def get_cluster(self, cluster_id, show_progress=False):
return self.sahara_client.clusters.get(cluster_id, show_progress)
@ -154,11 +169,17 @@ class SaharaClient(Client):
return str(data.status)
def get_job_status(self, exec_id):
data = self.sahara_client.job_executions.get(exec_id)
if self.api_version == '1.1':
data = self.sahara_client.job_executions.get(exec_id)
else:
data = self.sahara_client.jobs.get(exec_id)
return str(data.info['status'])
def get_job_info(self, exec_id):
job_execution = self.sahara_client.job_executions.get(exec_id)
if self.api_version == '1.1':
job_execution = self.sahara_client.job_executions.get(exec_id)
else:
job_execution = self.sahara_client.jobs.get(exec_id)
return self.sahara_client.jobs.get(job_execution.job_id)
def get_cluster_id(self, name):

View File

@ -128,6 +128,8 @@ def get_base_parser():
nargs='?', help='Set of features to enable')
parser.add_argument('--count', default=1, nargs='?', type=valid_count,
help='Specify count of runs current cases.')
parser.add_argument('--v2', '-2', default=False, action='store_true',
help='Use APIv2')
return parser
@ -159,6 +161,7 @@ def main():
report = args.report
features = args.feature
count = args.count
use_api_v2 = args.v2
auth_values = utils.get_auth_values(cloud_config, args)
@ -191,7 +194,7 @@ def main():
testcases.extend(config['clusters'])
test_dir_path = utils.create_testcase_file(testcases, credentials, network,
report)
report, use_api_v2=use_api_v2)
# run tests
concurrency = config.get('concurrency')

View File

@ -15,6 +15,7 @@ class ${testcase['class_name']}TestCase(base.BaseTestCase):
cls.report = ${report}
cls.results_dir = '${results_dir}'
cls.default_templ_dir = '${default_templ_dir}'
cls.use_api_v2 = ${use_api_v2}
def test_plugin(self):
self.create_cluster()

View File

@ -65,7 +65,8 @@ def run_tests(concurrency, test_dir_path):
return tester_runner.returncode
def create_testcase_file(testcases, credentials, network, report):
def create_testcase_file(testcases, credentials, network, report,
use_api_v2=False):
# current directory, where to write reports, key files, etc, if required
results_dir = os.getcwd()
default_templ_dir = os.path.abspath(TEST_TEMPLATE_DIR)
@ -76,7 +77,8 @@ def create_testcase_file(testcases, credentials, network, report):
credentials=credentials,
network=network, report=report,
results_dir=results_dir,
default_templ_dir=default_templ_dir)
default_templ_dir=default_templ_dir,
use_api_v2=use_api_v2)
test_dir_path = tempfile.mkdtemp()
print("The generated test file located at: %s" % test_dir_path)