Merge "Tempest testcases for Keypair_sync"
This commit is contained in:
commit
5fec5dd49d
@ -25,3 +25,13 @@ DEFAULT_QUOTAS = {
|
|||||||
u'port': 50, u'security_groups': 10, u'network': 10
|
u'port': 50, u'security_groups': 10, u'network': 10
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
KEYPAIR_RESOURCE_TYPE = "keypair"
|
||||||
|
|
||||||
|
JOB_SUCCESS = "SUCCESS"
|
||||||
|
|
||||||
|
JOB_PROGRESS = "IN_PROGRESS"
|
||||||
|
|
||||||
|
JOB_ACTIVE = "active"
|
||||||
|
|
||||||
|
JOB_FAILURE = "FAILURE"
|
||||||
|
@ -0,0 +1,151 @@
|
|||||||
|
# Copyright (c) 2017 Ericsson AB.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from keystoneclient.auth.identity import v3
|
||||||
|
from keystoneclient import session
|
||||||
|
from keystoneclient.v3 import client as ks_client
|
||||||
|
from novaclient import client as nv_client
|
||||||
|
from oslo_log import log as logging
|
||||||
|
from tempest import config
|
||||||
|
|
||||||
|
CONF = config.CONF
|
||||||
|
NOVA_API_VERSION = "2.37"
|
||||||
|
KEYPAIRS = ["kb_test_keypair1", "kb_test_keypair2"]
|
||||||
|
resource_sync_url = "/os-sync/"
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_session():
|
||||||
|
return get_current_session(
|
||||||
|
CONF.auth.admin_username,
|
||||||
|
CONF.auth.admin_password,
|
||||||
|
CONF.auth.admin_project_name
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_current_session(username, password, tenant_name):
|
||||||
|
auth = v3.Password(
|
||||||
|
auth_url=CONF.identity.uri_v3,
|
||||||
|
username=username,
|
||||||
|
password=password,
|
||||||
|
project_name=tenant_name,
|
||||||
|
user_domain_name=CONF.auth.admin_domain_name,
|
||||||
|
project_domain_name=CONF.auth.admin_domain_name)
|
||||||
|
sess = session.Session(auth=auth)
|
||||||
|
return sess
|
||||||
|
|
||||||
|
|
||||||
|
def get_openstack_drivers(keystone_client, region, project_name,
|
||||||
|
user_name, password):
|
||||||
|
resources = dict()
|
||||||
|
# Create Project, User and assign role to new user
|
||||||
|
project = keystone_client.projects.create(project_name,
|
||||||
|
CONF.auth.admin_domain_name)
|
||||||
|
user = keystone_client.users.create(user_name, CONF.auth.admin_domain_name,
|
||||||
|
project.id, password)
|
||||||
|
sess = get_current_session(user_name, password, project_name)
|
||||||
|
# Create new client to form session
|
||||||
|
new_key_client = ks_client.Client(session=sess)
|
||||||
|
# Create member role to which we create keypair and Sync
|
||||||
|
mem_role = [current_role.id for current_role in
|
||||||
|
keystone_client.roles.list()
|
||||||
|
if current_role.name == 'Member'][0]
|
||||||
|
keystone_client.roles.grant(mem_role, user=user, project=project)
|
||||||
|
token = new_key_client.session.get_token()
|
||||||
|
nova_client = nv_client.Client(NOVA_API_VERSION,
|
||||||
|
session=sess,
|
||||||
|
region_name=region)
|
||||||
|
resources = {"user_id": user.id, "project_id": project.id,
|
||||||
|
"session": sess, "token": token,
|
||||||
|
"nova_version": NOVA_API_VERSION,
|
||||||
|
"keypairs": KEYPAIRS,
|
||||||
|
"os_drivers": {'keystone': keystone_client,
|
||||||
|
'nova': nova_client}}
|
||||||
|
|
||||||
|
return resources
|
||||||
|
|
||||||
|
|
||||||
|
def get_keystone_client(session):
|
||||||
|
return ks_client.Client(session=session)
|
||||||
|
|
||||||
|
|
||||||
|
def get_resource_sync_url_and_headers(token, project_id, api_url):
|
||||||
|
headers = {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'X-Auth-Token': token,
|
||||||
|
}
|
||||||
|
url_string = CONF.kingbird.endpoint_url + CONF.kingbird.api_version + \
|
||||||
|
"/" + project_id + api_url
|
||||||
|
|
||||||
|
return headers, url_string
|
||||||
|
|
||||||
|
|
||||||
|
def sync_resource(token, project_id, post_body):
|
||||||
|
body = json.dumps(post_body)
|
||||||
|
headers, url_string = get_resource_sync_url_and_headers(token, project_id,
|
||||||
|
resource_sync_url)
|
||||||
|
response = requests.post(url_string, headers=headers, data=body)
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def get_sync_job_list(token, project_id, action=None):
|
||||||
|
headers, url_string = get_resource_sync_url_and_headers(token, project_id,
|
||||||
|
resource_sync_url)
|
||||||
|
if action:
|
||||||
|
url_string = url_string + action
|
||||||
|
response = requests.get(url_string, headers=headers)
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def delete_db_entries(token, project_id, job_id):
|
||||||
|
headers, url_string = get_resource_sync_url_and_headers(token, project_id,
|
||||||
|
resource_sync_url)
|
||||||
|
url_string = url_string + job_id
|
||||||
|
response = requests.delete(url_string, headers=headers)
|
||||||
|
return response.status_code
|
||||||
|
|
||||||
|
|
||||||
|
def get_regions(key_client):
|
||||||
|
return [current_region.id for current_region in
|
||||||
|
key_client.regions.list()]
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_resources(openstack_drivers, resource_ids):
|
||||||
|
keystone_client = openstack_drivers['keystone']
|
||||||
|
keystone_client.projects.delete(resource_ids['project_id'])
|
||||||
|
keystone_client.users.delete(resource_ids['user_id'])
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_keypairs(regions, resource_ids,
|
||||||
|
current_sess):
|
||||||
|
for current_region in regions:
|
||||||
|
for keypair in KEYPAIRS:
|
||||||
|
nova_client = nv_client.Client(NOVA_API_VERSION,
|
||||||
|
session=current_sess,
|
||||||
|
region_name=current_region)
|
||||||
|
nova_client.keypairs.delete(keypair,
|
||||||
|
user_id=resource_ids['user_id'])
|
||||||
|
|
||||||
|
|
||||||
|
def create_keypairs(openstack_drivers):
|
||||||
|
nova_client = openstack_drivers['nova']
|
||||||
|
resources = dict()
|
||||||
|
for keypair in KEYPAIRS:
|
||||||
|
resources[keypair] = nova_client.keypairs.create(keypair)
|
||||||
|
return resources
|
@ -0,0 +1,98 @@
|
|||||||
|
# Copyright 2017 Ericsson AB
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
from tempest.lib.common import api_version_utils
|
||||||
|
from tempest.lib.common.utils import data_utils
|
||||||
|
import tempest.test
|
||||||
|
|
||||||
|
from kingbird.tests.tempest.scenario.resource_management \
|
||||||
|
import resource_sync_client
|
||||||
|
|
||||||
|
|
||||||
|
class BaseKingbirdTest(api_version_utils.BaseMicroversionTest,
|
||||||
|
tempest.test.BaseTestCase):
|
||||||
|
"""Base test case class for all Kingbird Resource sync API tests."""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def skip_checks(cls):
|
||||||
|
super(BaseKingbirdTest, cls).skip_checks()
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(BaseKingbirdTest, self).setUp()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setup_credentials(cls):
|
||||||
|
super(BaseKingbirdTest, cls).setup_credentials()
|
||||||
|
session = resource_sync_client.get_session()
|
||||||
|
cls.keystone_client = resource_sync_client.get_keystone_client(session)
|
||||||
|
cls.regions = resource_sync_client.get_regions(cls.keystone_client)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setup_clients(cls):
|
||||||
|
super(BaseKingbirdTest, cls).setup_clients()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_resources(cls):
|
||||||
|
# Create Project, User, flavor, subnet & network for test
|
||||||
|
project_name = data_utils.rand_name('kb-project')
|
||||||
|
user_name = data_utils.rand_name('kb-user')
|
||||||
|
password = data_utils.rand_name('kb-password')
|
||||||
|
cls.openstack_details = resource_sync_client.get_openstack_drivers(
|
||||||
|
cls.keystone_client,
|
||||||
|
cls.regions[0],
|
||||||
|
project_name,
|
||||||
|
user_name,
|
||||||
|
password)
|
||||||
|
cls.openstack_drivers = cls.openstack_details['os_drivers']
|
||||||
|
cls.token = cls.openstack_details['token']
|
||||||
|
cls.session = cls.openstack_details['session']
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def resource_cleanup(cls):
|
||||||
|
super(BaseKingbirdTest, cls).resource_cleanup()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_keypairs(cls):
|
||||||
|
cls.resource_ids = resource_sync_client.\
|
||||||
|
create_keypairs(cls.openstack_drivers)
|
||||||
|
cls.resource_ids.update(cls.openstack_details)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def delete_keypairs(cls):
|
||||||
|
resource_sync_client.cleanup_keypairs(cls.regions,
|
||||||
|
cls.resource_ids, cls.session)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def delete_resources(cls):
|
||||||
|
resource_sync_client.cleanup_resources(cls.openstack_drivers,
|
||||||
|
cls.resource_ids)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def sync_keypair(cls, project_id, post_body):
|
||||||
|
response = resource_sync_client.sync_resource(cls.token, project_id,
|
||||||
|
post_body)
|
||||||
|
return response
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_sync_list(cls, project_id, action=None):
|
||||||
|
response = resource_sync_client.get_sync_job_list(
|
||||||
|
cls.token, project_id, action)
|
||||||
|
return response
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def delete_db_entries(cls, project_id, job_id):
|
||||||
|
response = resource_sync_client.delete_db_entries(
|
||||||
|
cls.token, project_id, job_id)
|
||||||
|
return response
|
@ -0,0 +1,234 @@
|
|||||||
|
# Copyright 2017 Ericsson AB
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
from kingbird.tests.tempest.scenario.resource_management \
|
||||||
|
import resource_sync_client
|
||||||
|
from kingbird.tests.tempest.scenario.resource_management. \
|
||||||
|
sync_tests import base
|
||||||
|
from kingbird.tests.tempest.scenario import consts
|
||||||
|
from kingbird.tests import utils
|
||||||
|
|
||||||
|
from novaclient import client as nv_client
|
||||||
|
|
||||||
|
FORCE = "True"
|
||||||
|
DEFAULT_FORCE = "False"
|
||||||
|
|
||||||
|
|
||||||
|
class KingbirdKPTest(base.BaseKingbirdTest):
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setup_clients(self):
|
||||||
|
super(KingbirdKPTest, self).setup_clients()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
super(KingbirdKPTest, self).tearDown()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def resource_cleanup(self):
|
||||||
|
super(KingbirdKPTest, self).resource_cleanup()
|
||||||
|
self.delete_keypairs()
|
||||||
|
self.delete_resources()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def resource_setup(self):
|
||||||
|
super(KingbirdKPTest, self).resource_setup()
|
||||||
|
self.create_resources()
|
||||||
|
self.create_keypairs()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setup_credentials(self):
|
||||||
|
super(KingbirdKPTest, self).setup_credentials()
|
||||||
|
self.session = resource_sync_client.get_session()
|
||||||
|
self.key_client = resource_sync_client.\
|
||||||
|
get_keystone_client(self.session)
|
||||||
|
self.regions = resource_sync_client.get_regions(self.key_client)
|
||||||
|
|
||||||
|
def _check_job_status(self):
|
||||||
|
# Wait until the status of the job is not "IN_PROGRESS"
|
||||||
|
job_list_resp = self.get_sync_list(self.resource_ids["project_id"])
|
||||||
|
status = job_list_resp.json().get('job_set')[0].get('sync_status')
|
||||||
|
return status != consts.JOB_PROGRESS
|
||||||
|
|
||||||
|
def _sync_job_create(self, force):
|
||||||
|
body = {"resource_set": {"resource_type": consts.KEYPAIR_RESOURCE_TYPE,
|
||||||
|
"resources": self.resource_ids["keypairs"],
|
||||||
|
"source": self.regions[0], "force": force,
|
||||||
|
"target": self.regions[1:]}}
|
||||||
|
response = self.sync_keypair(self.resource_ids["project_id"], body)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def _delete_entries_in_db(self, project, job):
|
||||||
|
response = self.delete_db_entries(self.resource_ids["project_id"], job)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def test_keypair_sync(self):
|
||||||
|
create_response = self._sync_job_create(force=FORCE)
|
||||||
|
job_id = create_response.json().get('job_status').get('id')
|
||||||
|
self.assertEqual(create_response.status_code, 200)
|
||||||
|
utils.wait_until_true(
|
||||||
|
lambda: self._check_job_status(),
|
||||||
|
exception=RuntimeError("Timed out waiting for job %s " % job_id))
|
||||||
|
target_regions = self.regions[1:]
|
||||||
|
for region in target_regions:
|
||||||
|
for keypair in self.resource_ids["keypairs"]:
|
||||||
|
nova_client = nv_client.Client(
|
||||||
|
self.resource_ids["nova_version"], session=self.session,
|
||||||
|
region_name=region)
|
||||||
|
source_keypair = nova_client.keypairs.get(
|
||||||
|
keypair, self.resource_ids["user_id"])
|
||||||
|
self.assertEqual(source_keypair.name, keypair)
|
||||||
|
# Clean_up the database entries
|
||||||
|
delete_response = self._delete_entries_in_db(
|
||||||
|
self.resource_ids["project_id"], job_id)
|
||||||
|
self.assertEqual(delete_response, 200)
|
||||||
|
|
||||||
|
def test_get_sync_list(self):
|
||||||
|
create_response = self._sync_job_create(force=FORCE)
|
||||||
|
self.assertEqual(create_response.status_code, 200)
|
||||||
|
job_id = create_response.json().get('job_status').get('id')
|
||||||
|
utils.wait_until_true(
|
||||||
|
lambda: self._check_job_status(),
|
||||||
|
exception=RuntimeError("Timed out waiting for job %s " % job_id))
|
||||||
|
job_list_resp = self.get_sync_list(self.resource_ids["project_id"])
|
||||||
|
self.assertEqual(job_list_resp.status_code, 200)
|
||||||
|
# Clean_up the database entries
|
||||||
|
delete_response = self._delete_entries_in_db(
|
||||||
|
self.resource_ids["project_id"], job_id)
|
||||||
|
self.assertEqual(delete_response, 200)
|
||||||
|
|
||||||
|
def test_get_sync_job_details(self):
|
||||||
|
create_response = self._sync_job_create(force=FORCE)
|
||||||
|
self.assertEqual(create_response.status_code, 200)
|
||||||
|
job_id = create_response.json().get('job_status').get('id')
|
||||||
|
utils.wait_until_true(
|
||||||
|
lambda: self._check_job_status(),
|
||||||
|
exception=RuntimeError("Timed out waiting for job %s " % job_id))
|
||||||
|
job_list_resp = self.get_sync_list(self.resource_ids["project_id"],
|
||||||
|
job_id)
|
||||||
|
self.assertEqual(job_list_resp.status_code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
job_list_resp.json().get('job_set')[0].get('resource'),
|
||||||
|
self.resource_ids["keypairs"][0])
|
||||||
|
self.assertEqual(
|
||||||
|
job_list_resp.json().get('job_set')[1].get('resource'),
|
||||||
|
self.resource_ids["keypairs"][1])
|
||||||
|
self.assertEqual(
|
||||||
|
job_list_resp.json().get('job_set')[0].get('resource_type'),
|
||||||
|
consts.KEYPAIR_RESOURCE_TYPE)
|
||||||
|
# Clean_up the database entries
|
||||||
|
delete_response = self._delete_entries_in_db(
|
||||||
|
self.resource_ids["project_id"], job_id)
|
||||||
|
self.assertEqual(delete_response, 200)
|
||||||
|
|
||||||
|
def test_get_active_jobs(self):
|
||||||
|
create_response = self._sync_job_create(force=FORCE)
|
||||||
|
self.assertEqual(create_response.status_code, 200)
|
||||||
|
job_id = create_response.json().get('job_status').get('id')
|
||||||
|
active_job = self.get_sync_list(self.resource_ids["project_id"],
|
||||||
|
consts.JOB_ACTIVE)
|
||||||
|
status = active_job.json().get('job_set')[0].get('sync_status')
|
||||||
|
self.assertEqual(active_job.status_code, 200)
|
||||||
|
self.assertEqual(status, consts.JOB_PROGRESS)
|
||||||
|
utils.wait_until_true(
|
||||||
|
lambda: self._check_job_status(),
|
||||||
|
exception=RuntimeError("Timed out waiting for job %s " % job_id))
|
||||||
|
# Clean_up the database entries
|
||||||
|
delete_response = self._delete_entries_in_db(
|
||||||
|
self.resource_ids["project_id"], job_id)
|
||||||
|
self.assertEqual(delete_response, 200)
|
||||||
|
|
||||||
|
def test_delete_active_jobs(self):
|
||||||
|
create_response = self._sync_job_create(force=FORCE)
|
||||||
|
self.assertEqual(create_response.status_code, 200)
|
||||||
|
job_id = create_response.json().get('job_status').get('id')
|
||||||
|
response = self._delete_entries_in_db(self.resource_ids["project_id"],
|
||||||
|
job_id)
|
||||||
|
# Actual result when we try and delete an active_job
|
||||||
|
self.assertEqual(response, 406)
|
||||||
|
# Clean_up the database entries
|
||||||
|
utils.wait_until_true(
|
||||||
|
lambda: self._check_job_status(),
|
||||||
|
exception=RuntimeError("Timed out waiting for job %s " % job_id))
|
||||||
|
delete_response = self._delete_entries_in_db(
|
||||||
|
self.resource_ids["project_id"], job_id)
|
||||||
|
self.assertEqual(delete_response, 200)
|
||||||
|
|
||||||
|
def test_delete_already_deleted_job(self):
|
||||||
|
create_response = self._sync_job_create(force=FORCE)
|
||||||
|
self.assertEqual(create_response.status_code, 200)
|
||||||
|
job_id = create_response.json().get('job_status').get('id')
|
||||||
|
# Clean_up the database entries
|
||||||
|
utils.wait_until_true(
|
||||||
|
lambda: self._check_job_status(),
|
||||||
|
exception=RuntimeError("Timed out waiting for job %s " % job_id))
|
||||||
|
delete_response = self._delete_entries_in_db(
|
||||||
|
self.resource_ids["project_id"], job_id)
|
||||||
|
self.assertEqual(delete_response, 200)
|
||||||
|
delete_response2 = self._delete_entries_in_db(
|
||||||
|
self.resource_ids["project_id"], job_id)
|
||||||
|
self.assertEqual(delete_response2, 404)
|
||||||
|
|
||||||
|
def test_keypair_sync_with_force_true(self):
|
||||||
|
create_response_1 = self._sync_job_create(force=FORCE)
|
||||||
|
self.assertEqual(create_response_1.status_code, 200)
|
||||||
|
job_id_1 = create_response_1.json().get('job_status').get('id')
|
||||||
|
utils.wait_until_true(
|
||||||
|
lambda: self._check_job_status(),
|
||||||
|
exception=RuntimeError("Timed out waiting for job %s " % job_id_1))
|
||||||
|
delete_response = self._delete_entries_in_db(
|
||||||
|
self.resource_ids["project_id"], job_id_1)
|
||||||
|
self.assertEqual(delete_response, 200)
|
||||||
|
create_response_2 = self._sync_job_create(force=FORCE)
|
||||||
|
self.assertEqual(create_response_2.status_code, 200)
|
||||||
|
job_id_2 = create_response_2.json().get('job_status').get('id')
|
||||||
|
utils.wait_until_true(
|
||||||
|
lambda: self._check_job_status(),
|
||||||
|
exception=RuntimeError("Timed out waiting for job %s " % job_id_2))
|
||||||
|
# Clean_up the database entries
|
||||||
|
delete_response_2 = self._delete_entries_in_db(
|
||||||
|
self.resource_ids["project_id"], job_id_2)
|
||||||
|
self.assertEqual(delete_response_2, 200)
|
||||||
|
|
||||||
|
def test_keypair_sync_with_force_false(self):
|
||||||
|
create_response_1 = self._sync_job_create(force=DEFAULT_FORCE)
|
||||||
|
self.assertEqual(create_response_1.status_code, 200)
|
||||||
|
job_id_1 = create_response_1.json().get('job_status').get('id')
|
||||||
|
utils.wait_until_true(
|
||||||
|
lambda: self._check_job_status(),
|
||||||
|
exception=RuntimeError("Timed out waiting for job %s " % job_id_1))
|
||||||
|
delete_response_1 = self._delete_entries_in_db(
|
||||||
|
self.resource_ids["project_id"], job_id_1)
|
||||||
|
self.assertEqual(delete_response_1, 200)
|
||||||
|
create_response_2 = self._sync_job_create(force=DEFAULT_FORCE)
|
||||||
|
self.assertEqual(create_response_2.status_code, 200)
|
||||||
|
job_id_2 = create_response_2.json().get('job_status').get('id')
|
||||||
|
utils.wait_until_true(
|
||||||
|
lambda: self._check_job_status(),
|
||||||
|
exception=RuntimeError("Timed out waiting for job %s " % job_id_2))
|
||||||
|
job_list_resp = self.get_sync_list(self.resource_ids["project_id"],
|
||||||
|
job_id_2)
|
||||||
|
self.assertEqual(job_list_resp.status_code, 200)
|
||||||
|
# This job fail because resoruce is already created.
|
||||||
|
# We can use force to recreate that resource.
|
||||||
|
self.assertEqual(
|
||||||
|
job_list_resp.json().get('job_set')[0].get('sync_status'),
|
||||||
|
consts.JOB_FAILURE)
|
||||||
|
self.assertEqual(
|
||||||
|
job_list_resp.json().get('job_set')[1].get('sync_status'),
|
||||||
|
consts.JOB_FAILURE)
|
||||||
|
# Clean_up the database entries
|
||||||
|
delete_response_2 = self._delete_entries_in_db(
|
||||||
|
self.resource_ids["project_id"], job_id_2)
|
||||||
|
self.assertEqual(delete_response_2, 200)
|
@ -13,6 +13,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import eventlet
|
||||||
import random
|
import random
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
import string
|
import string
|
||||||
@ -89,3 +90,9 @@ def dummy_context(user='test_username', tenant='test_project_id',
|
|||||||
'is_admin': True,
|
'is_admin': True,
|
||||||
'region_name': region_name
|
'region_name': region_name
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def wait_until_true(predicate, timeout=60, sleep=1, exception=None):
|
||||||
|
with eventlet.timeout.Timeout(timeout, exception):
|
||||||
|
while not predicate():
|
||||||
|
eventlet.sleep(sleep)
|
||||||
|
Loading…
Reference in New Issue
Block a user