Enhancing "Sync show" and "Sync list" features
"Sync show": Existing "sync show" feature displays the details of only one job-identifier but when "Sync template" feature comes into picture,there will be multiple job-identifiers present for a single job request. This commit is to display the details of multiple job-identifiers. "Sync list": Interface of "Sync list" feature is changed for better understanding. Added test-cases for the same. Depends-On: <Iaa8e9568f97581f74e688d15a11c7e7fd832e019> Change-Id: Id85a51f5c13ba1b3c90b75c6820da5d1e443c398
This commit is contained in:
parent
a364c4b11c
commit
a6117883a6
|
@ -91,12 +91,16 @@ class ResourceSyncController(object):
|
|||
result['job_set'] = db_api.sync_job_list(context, action)
|
||||
elif uuidutils.is_uuid_like(action):
|
||||
try:
|
||||
result['job_set'] = db_api.resource_sync_list_by_job(
|
||||
result['job_set'] = db_api.resource_sync_list_by_job_id(
|
||||
context, action)
|
||||
except exceptions.JobNotFound:
|
||||
pecan.abort(404, _('Job not found'))
|
||||
else:
|
||||
pecan.abort(400, _('Invalid request URL'))
|
||||
try:
|
||||
result['job_set'] = db_api.resource_sync_list_by_job_name(
|
||||
context, action)
|
||||
except exceptions.JobNotFound:
|
||||
pecan.abort(404, _('Job not found'))
|
||||
return result
|
||||
|
||||
@index.when(method='POST', template='json')
|
||||
|
@ -117,6 +121,7 @@ class ResourceSyncController(object):
|
|||
job_name = None
|
||||
if 'name' in request_data.keys():
|
||||
job_name = request_data.get('name')
|
||||
db_api.validate_job_name(context, job_name)
|
||||
for iteration in range(len(request_data['Sync'])):
|
||||
payload = request_data['Sync'][iteration]
|
||||
response = self._get_post_data(payload,
|
||||
|
|
|
@ -124,3 +124,7 @@ class InvalidInputError(KingbirdException):
|
|||
|
||||
class ResourceNotFound(NotFound):
|
||||
message = _("Resource not available")
|
||||
|
||||
|
||||
class DuplicateJobEntry(KingbirdException):
|
||||
message = _("Job name is already present")
|
||||
|
|
|
@ -410,6 +410,7 @@ def sync_job_list(context, action=None):
|
|||
for row in rows:
|
||||
result = dict()
|
||||
result['id'] = row.id
|
||||
result['name'] = row.name
|
||||
result['sync_status'] = row.sync_status
|
||||
result['created_at'] = row.created_at
|
||||
if row.updated_at:
|
||||
|
@ -505,7 +506,30 @@ def resource_sync_status(context, job_id):
|
|||
|
||||
|
||||
@require_context
|
||||
def resource_sync_list_by_job(context, job_id):
|
||||
def resource_sync_list_by_job_name(context, job_name):
|
||||
final_response = list()
|
||||
parent_row = model_query(context, models.SyncJob).\
|
||||
filter_by(name=job_name, user_id=context.user,
|
||||
project_id=context.project).all()
|
||||
if not parent_row:
|
||||
raise exception.JobNotFound()
|
||||
for iteration in range(len(parent_row)):
|
||||
rows = model_query(context, models.ResourceSync).\
|
||||
filter_by(job_id=parent_row[iteration].id).all()
|
||||
final_response = final_response + sync_individual_resource(rows)
|
||||
return final_response
|
||||
|
||||
|
||||
def validate_job_name(context, job_name):
|
||||
parent_row = model_query(context, models.SyncJob).\
|
||||
filter_by(name=job_name, user_id=context.user,
|
||||
project_id=context.project).all()
|
||||
if parent_row:
|
||||
raise exception.DuplicateJobEntry()
|
||||
|
||||
|
||||
@require_context
|
||||
def resource_sync_list_by_job_id(context, job_id):
|
||||
parent_row = model_query(context, models.SyncJob).\
|
||||
filter_by(id=job_id, user_id=context.user,
|
||||
project_id=context.project).first()
|
||||
|
@ -513,11 +537,16 @@ def resource_sync_list_by_job(context, job_id):
|
|||
raise exception.JobNotFound()
|
||||
rows = model_query(context, models.ResourceSync).\
|
||||
filter_by(job_id=parent_row.id).all()
|
||||
return sync_individual_resource(rows)
|
||||
|
||||
|
||||
def sync_individual_resource(rows):
|
||||
output = list()
|
||||
if not rows:
|
||||
raise exception.JobNotFound()
|
||||
for row in rows:
|
||||
result = dict()
|
||||
result['id'] = row.job_id
|
||||
result['target_region'] = row.target_region
|
||||
result['source_region'] = row.source_region
|
||||
result['resource'] = row.resource
|
||||
|
|
|
@ -101,6 +101,10 @@ class TestResourceManager(testroot.KBApiTest):
|
|||
"resource_type": "keypair",
|
||||
"source": FAKE_SOURCE_REGION,
|
||||
"target": [FAKE_TARGET_REGION]}
|
||||
mock_db_api.validate_job_name(self.ctx, JOB_NAME)
|
||||
self.assertEqual(1,
|
||||
mock_db_api.validate_job_name
|
||||
.call_count)
|
||||
result = sync_manager.ResourceSyncController().\
|
||||
_get_post_data(payload, self.ctx, JOB_NAME)
|
||||
self.assertEqual(result['job_status'].get('status'),
|
||||
|
@ -365,13 +369,6 @@ class TestResourceManager(testroot.KBApiTest):
|
|||
self.app.get, get_url,
|
||||
headers=FAKE_HEADERS)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_get_wrong_action(self, mock_db_api, mock_rpc_client):
|
||||
get_url = FAKE_URL + '/fake'
|
||||
self.assertRaises(webtest.app.AppError, self.app.get, get_url,
|
||||
headers=FAKE_HEADERS)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_get_active_job(self, mock_db_api, mock_rpc_client):
|
||||
|
@ -381,10 +378,21 @@ class TestResourceManager(testroot.KBApiTest):
|
|||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_get_detail_job(self, mock_db_api, mock_rpc_client):
|
||||
def test_get_detail_job_by_id(self, mock_db_api, mock_rpc_client):
|
||||
get_url = FAKE_URL + '/' + FAKE_JOB
|
||||
self.app.get(get_url, headers=FAKE_HEADERS)
|
||||
self.assertEqual(1, mock_db_api.resource_sync_list_by_job.call_count)
|
||||
self.assertEqual(1,
|
||||
mock_db_api.resource_sync_list_by_job_id
|
||||
.call_count)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
def test_get_detail_job_by_name(self, mock_db_api, mock_rpc_client):
|
||||
get_url = FAKE_URL + '/' + JOB_NAME
|
||||
self.app.get(get_url, headers=FAKE_HEADERS)
|
||||
self.assertEqual(1,
|
||||
mock_db_api.resource_sync_list_by_job_name
|
||||
.call_count)
|
||||
|
||||
@mock.patch.object(rpc_client, 'EngineClient')
|
||||
@mock.patch.object(sync_manager, 'db_api')
|
||||
|
|
|
@ -24,6 +24,7 @@ from kingbird.common import consts
|
|||
from kingbird.common import exceptions
|
||||
from kingbird.db import api as api
|
||||
from kingbird.db.sqlalchemy import api as db_api
|
||||
from kingbird.db.sqlalchemy import models as db_models
|
||||
from kingbird.tests import base
|
||||
from kingbird.tests import utils
|
||||
|
||||
|
@ -145,7 +146,16 @@ class DBAPIResourceSyncTest(base.KingbirdTestCase):
|
|||
self.assertEqual(consts.JOB_PROGRESS, resource_sync_create.sync_status)
|
||||
db_api.resource_sync_update(
|
||||
self.ctx, job.id, 'Fake_region', 'fake_key', consts.JOB_SUCCESS)
|
||||
updated_job = db_api.resource_sync_list_by_job(self.ctx, job.id)
|
||||
rows = db_api.model_query(self.ctx, db_models.ResourceSync).\
|
||||
filter_by(job_id=UUID1).all()
|
||||
individual_result = db_api.sync_individual_resource(rows)
|
||||
self.assertEqual(consts.JOB_SUCCESS, individual_result[0].
|
||||
get('sync_status'))
|
||||
updated_job = db_api.resource_sync_list_by_job_id(self.ctx, job.id)
|
||||
self.assertEqual(consts.JOB_SUCCESS, updated_job[0].get('sync_status'))
|
||||
updated_job = db_api.\
|
||||
resource_sync_list_by_job_name(self.ctx,
|
||||
'fake_job_name')
|
||||
self.assertEqual(consts.JOB_SUCCESS, updated_job[0].get('sync_status'))
|
||||
|
||||
def test_foreign_key(self):
|
||||
|
|
Loading…
Reference in New Issue