Fix bugs in tricircle Admin API

1. What is the problem
When we use admin role for demo project to retrieve jobs/routings
through Admin API, return results include admin project's
jobs/routings. The result should be only related to demo project.

2. What is the solution for the problem
Extract project ID from context and use it as filter. The project
ID filter in URL query string will be ignored, and only the project ID
in which the user is authorized will be used as the filter.

3. What the features need to be implemented to the Tricircle to
realize the solution
Choose project ID as mandatory filter for jobs/routings list operation.

Change-Id: I86778e0525c1fecf45bc473bd42e62909534778b
Closes-Bug: #1711001
Closes-Bug: #1711003
This commit is contained in:
Dongfeng Huang 2017-08-16 10:26:24 +08:00
parent 3e13f36102
commit 9939540c07
7 changed files with 255 additions and 168 deletions

View File

@ -326,7 +326,8 @@ entries. Accordingly the filtering condition(s) will be added to the tail of
the service url separated by question mark. For example, the default service
url is ``GET /routings``, when filtering is applied, the service url becomes
``GET /routings?attribute=attribute_value``. One or multiple conditions are
supported.
supported. What's more, project ID filter in URL query string will be ignored,
and only the project ID in which the user is authorized will be used as the filter.
All items returned are sorted in descending order by ID. Because the ID is a
big integer, ID with greater value means they are newly added to the resource
@ -866,8 +867,10 @@ be raised.
By default, this fetches all of the jobs including active jobs like NEW, FAIL
and RUNNING jobs as well as SUCCESS jobs from job log. We can filter them by
project ID, job type and job status to only get the specific kind of job
entries. Accordingly the filtering condition will be added to the tail
job type and job status to only get the specific kind of job entries, project ID
filter in URL query string will be ignored, and only the project ID in which
the user is authorized will be used as the filter.
Accordingly the filtering condition will be added to the tail
of the service url separated by question mark. For example, the default
service url is ``GET /jobs``. Using a filter the service url becomes
``GET /jobs?filter_name=value``. One or multiple filtering conditions are

View File

@ -216,20 +216,21 @@ class AsyncJobController(rest.RestController):
marker = kwargs.pop('marker', None)
sorts = [('timestamp', 'desc'), ('id', 'desc')]
if kwargs:
is_valid_filter, filters = self._get_filters(kwargs)
is_valid_filter, filters = self._get_filters(kwargs)
if not is_valid_filter:
msg = (_('Unsupported filter type: %(filters)s') % {
'filters': ', '.join(
[filter_name for filter_name in filters])
})
return utils.format_api_error(400, msg)
if not is_valid_filter:
msg = (_('Unsupported filter type: %(filters)s') % {
'filters': ', '.join(
[filter_name for filter_name in filters])
})
return utils.format_api_error(400, msg)
filters = [{'key': key, 'comparator': 'eq', 'value': value}
for key, value in six.iteritems(filters)]
else:
filters = None
# project ID from client should be equal to the one from
# context, since only the project ID in which the user
# is authorized will be used as the filter.
filters['project_id'] = context.project_id
filters = [{'key': key, 'comparator': 'eq', 'value': value}
for key, value in six.iteritems(filters)]
try:
if marker is not None:

View File

@ -146,33 +146,33 @@ class RoutingController(rest.RestController):
"for int() rather than '%s'") % marker)
return utils.format_api_error(400, msg)
if kwargs:
is_valid_filter, filters = self._get_filters(kwargs)
is_valid_filter, filters = self._get_filters(kwargs)
if not is_valid_filter:
msg = (_('Unsupported filter type: %(filters)s') % {
'filters': ', '.join(
[filter_name for filter_name in filters])
})
if not is_valid_filter:
msg = (_('Unsupported filter type: %(filters)s') % {
'filters': ', '.join(
[filter_name for filter_name in filters])
})
return utils.format_api_error(400, msg)
if 'id' in filters:
try:
# resource routing id is an integer.
filters['id'] = int(filters['id'])
except ValueError as e:
LOG.exception('Failed to convert routing id to an integer:'
' %(exception)s ', {'exception': e})
msg = (_("Id should be an integer or a valid literal "
"for int() rather than '%s'") % filters['id'])
return utils.format_api_error(400, msg)
if 'id' in filters:
try:
# resource routing id is an integer.
filters['id'] = int(filters['id'])
except ValueError as e:
LOG.exception('Failed to convert routing id to an integer:'
' %(exception)s ', {'exception': e})
msg = (_("Id should be an integer or a valid literal "
"for int() rather than '%s'") % filters['id'])
return utils.format_api_error(400, msg)
expand_filters = [{'key': filter_name, 'comparator': 'eq',
'value': filters[filter_name]}
for filter_name in filters]
else:
expand_filters = None
# project ID from client should be equal to the one from
# context, since only the project ID in which the user
# is authorized will be used as the filter.
filters['project_id'] = context.project_id
expand_filters = [{'key': filter_name, 'comparator': 'eq',
'value': filters[filter_name]}
for filter_name in filters]
try:
routings = db_api.list_resource_routings(context, expand_filters,
limit, marker,

View File

@ -38,6 +38,10 @@ CONF = cfg.CONF
LOG = logging.getLogger(__name__)
def db_test_stub(*args):
pass
def create_pod(context, pod_dict):
with context.session.begin():
return core.create_resource(context, models.Pod, pod_dict)
@ -508,32 +512,45 @@ def get_running_job(context, _type, resource_id):
def finish_job(context, job_id, successful, timestamp):
status = constants.JS_Success if successful else constants.JS_Fail
with context.session.begin():
job_dict = {'status': status,
'timestamp': timestamp,
'extra_id': uuidutils.generate_uuid()}
job = core.update_resource(context, models.AsyncJob, job_id, job_dict)
if status == constants.JS_Success:
log_dict = {'id': uuidutils.generate_uuid(),
'type': job['type'],
'project_id': job['project_id'],
'timestamp': timestamp,
'resource_id': job['resource_id']}
context.session.query(models.AsyncJob).filter(
sql.and_(models.AsyncJob.type == job['type'],
models.AsyncJob.resource_id == job['resource_id'],
models.AsyncJob.timestamp <= timestamp)).delete(
synchronize_session=False)
core.create_resource(context, models.AsyncJobLog, log_dict)
else:
# sqlite has problem handling "<" operator on timestamp, so we
# slide the timestamp a bit and use "<="
timestamp = timestamp - datetime.timedelta(microseconds=1)
context.session.query(models.AsyncJob).filter(
sql.and_(models.AsyncJob.type == job['type'],
models.AsyncJob.resource_id == job['resource_id'],
models.AsyncJob.timestamp <= timestamp)).delete(
synchronize_session=False)
retries = 5
for i in range(retries + 1):
try:
with context.session.begin():
db_test_stub(i)
job_dict = {'status': status,
'timestamp': timestamp,
'extra_id': uuidutils.generate_uuid()}
job = core.update_resource(context, models.AsyncJob, job_id,
job_dict)
if status == constants.JS_Success:
log_dict = {'id': uuidutils.generate_uuid(),
'type': job['type'],
'project_id': job['project_id'],
'timestamp': timestamp,
'resource_id': job['resource_id']}
context.session.query(models.AsyncJob).filter(
sql.and_(
models.AsyncJob.type == job['type'],
models.AsyncJob.resource_id == job['resource_id'],
models.AsyncJob.timestamp <= timestamp)).delete(
synchronize_session=False)
core.create_resource(context, models.AsyncJobLog, log_dict)
else:
# sqlite has problem handling "<" operator on timestamp,
# so we slide the timestamp a bit and use "<="
timestamp = timestamp - datetime.timedelta(microseconds=1)
context.session.query(models.AsyncJob).filter(
sql.and_(
models.AsyncJob.type == job['type'],
models.AsyncJob.resource_id == job['resource_id'],
models.AsyncJob.timestamp <= timestamp)).delete(
synchronize_session=False)
except db_exc.DBDeadlock:
if i == retries:
raise
time.sleep(1)
continue
return
def ensure_agent_exists(context, pod_id, host, _type, tunnel_ip):

View File

@ -251,17 +251,24 @@ class TestAsyncJobController(API_FunctionalTest):
self._test_and_check(jobs)
@patch.object(context, 'extract_context_from_environ',
new=fake_admin_context)
@patch.object(xrpcapi.XJobAPI, 'invoke_method',
new=fake_invoke_method)
def test_get_one_and_get_all(self):
@patch.object(context, 'extract_context_from_environ')
def test_get_one_and_get_all(self, mock_context):
self.context.project_id = "fake_project_id"
mock_context.return_value = self.context
all_job_ids = {}
all_job_project_ids = {}
index = 0
for job_type in self.all_job_types:
job = self._prepare_job_element(job_type)
if index == 0:
# the first job has a project ID that differs from
# context.project_id
job = self._prepare_job_element(job_type)
else:
job = self._prepare_job_element(job_type,
self.context.project_id)
job = {"job": job, "expected_error": 200}
@ -279,7 +286,8 @@ class TestAsyncJobController(API_FunctionalTest):
'service_uri': service_uri})
return_jobs_1 = response_1.json
self.assertEqual(amount_of_all_jobs, len(return_jobs_1['jobs']))
self.assertEqual(amount_of_all_jobs - 1,
len(return_jobs_1['jobs']))
self.assertIn('status', response_1)
self.assertIn('resource', response_1)
self.assertIn('project_id', response_1)
@ -294,7 +302,7 @@ class TestAsyncJobController(API_FunctionalTest):
response_2 = self.app.get('/v1.0/jobs?status=new')
return_jobs_2 = response_2.json
self.assertEqual(amount_of_all_jobs, len(return_jobs_2['jobs']))
self.assertEqual(amount_of_all_jobs - 1, len(return_jobs_2['jobs']))
response = self.app.get('/v1.0/jobs?status=fail')
return_jobs_3 = response.json
@ -318,7 +326,7 @@ class TestAsyncJobController(API_FunctionalTest):
'service_uri': service_uri})
return_jobs = response.json
self.assertEqual(amount_of_fail_jobs, len(return_jobs['jobs']))
self.assertEqual(amount_of_fail_jobs - 1, len(return_jobs['jobs']))
response = self.app.get('/v1.0/%(service_uri)s?status=success'
'' % {'service_uri': service_uri})
@ -326,36 +334,45 @@ class TestAsyncJobController(API_FunctionalTest):
self.assertEqual(amount_of_succ_jobs, len(return_jobs['jobs']))
# use job type filter or project id filter
# project ID filter in URL query string will be ignored, and
# only the project ID in which the user is authorized will
# be used as filter.
response = self.app.get(
'/v1.0/%(service_uri)s' % {'service_uri': service_uri})
return_job = response.json
response1 = self.app.get(
'/v1.0/%(service_uri)s?project_id=%(project_id)s' % {
'service_uri': service_uri,
'project_id': uuidutils.generate_uuid()})
return_job1 = response1.json
response2 = self.app.get(
'/v1.0/%(service_uri)s?project_id=%(project_id)s' % {
'service_uri': service_uri,
'project_id': 'fake_project_id'})
return_job2 = response2.json
self.assertEqual(len(return_job2['jobs']),
len(return_job1['jobs']))
self.assertEqual(len(return_job['jobs']),
len(return_job2['jobs']))
# use job type filter
count = 1
for job_type in self.all_job_types:
response = self.app.get('/v1.0/%(service_uri)s?type=%(type)s'
'' % {'service_uri': service_uri,
'type': job_type})
return_job = response.json
if count == 1:
self.assertEqual(0, len(return_job['jobs']))
else:
self.assertEqual(1, len(return_job['jobs']))
count += 1
self.assertEqual(1, len(return_job['jobs']))
response = self.app.get(
'/v1.0/%(service_uri)s?project_id=%(project_id)s' % {
'service_uri': service_uri,
'project_id': all_job_project_ids[job_type]})
return_job = response.json
self.assertEqual(1, len(return_job['jobs']))
# combine job type filter and project id filter
response = self.app.get(
'/v1.0/%(service_uri)s?project_id=%(project_id)s&'
'type=%(type)s' % {
'service_uri': service_uri,
'project_id': all_job_project_ids[job_type],
'type': job_type})
return_job = response.json
self.assertEqual(1, len(return_job['jobs']))
# combine job type filter, project id filter and job status filter
for i in xrange(amount_of_all_jobs):
# combine job type and job status filter
for i in xrange(1, amount_of_all_jobs):
if i < amount_of_fail_jobs:
# this aims to test service "/v1.0/jobs/{id}"
response_1 = self.app.get('/v1.0/jobs/%(id)s' % {
@ -364,11 +381,9 @@ class TestAsyncJobController(API_FunctionalTest):
response_2 = self.app.get(
'/v1.0/%(service_uri)s?'
'project_id=%(project_id)s&'
'type=%(type)s&'
'status=%(status)s' % {
'service_uri': service_uri,
'project_id': return_job_1['job']['project_id'],
'type': return_job_1['job']['type'],
'status': 'fail'})
@ -382,10 +397,9 @@ class TestAsyncJobController(API_FunctionalTest):
# job log. their job ids are not stored in all_job_ids
job_type = self.all_job_types[i]
response = self.app.get(
'/v1.0/%(service_uri)s?project_id=%(project_id)s&'
'/v1.0/%(service_uri)s?'
'type=%(type)s&status=%(status)s' % {
'service_uri': service_uri,
'project_id': all_job_project_ids[job_type],
'type': job_type,
'status': 'success'})
@ -408,10 +422,9 @@ class TestAsyncJobController(API_FunctionalTest):
return_job_1 = response_1.json
response_2 = self.app.get(
'/v1.0/%(service_uri)s?project_id=%(project_id)s&'
'/v1.0/%(service_uri)s?'
'type=%(type)s&status=%(status)s' % {
'service_uri': service_uri,
'project_id': return_job_1['job']['project_id'],
'type': return_job_1['job']['type'],
'status': 'new'})
@ -728,40 +741,43 @@ class TestAsyncJobController(API_FunctionalTest):
back_job = response.json
return back_job['job']['id']
def _prepare_job_element(self, job_type):
def _prepare_job_element(self, job_type, project_id=None):
# in order to create a job, we need three elements: job type,
# job resource and project id.
# job resource and project id. If project_id parameter is not
# None then we create resource and job for that project,
# or else we create resource and job for an entirely new project.
if project_id is None:
project_id = uuidutils.generate_uuid()
job = {}
job['resource'] = {}
job['type'] = job_type
for resource_type, resource_id in self.job_resource_map[job_type]:
job['resource'][resource_id] = uuidutils.generate_uuid()
# these two jobs need no resource routings. We only need to ensure
# that job['resource']['project_id'] equals to job['project_id'], which
# keeps consistent with job_primary_resource_map in common/constant.py
if job_type in (constants.JT_SEG_RULE_SETUP,
constants.JT_RESOURCE_RECYCLE):
job['resource']['project_id'] = project_id
else:
for resource_type, resource_id in self.job_resource_map[job_type]:
job['resource'][resource_id] = uuidutils.generate_uuid()
job['project_id'] = self._prepare_project_id_for_job(job)
self._create_resource_for_project(job, project_id)
job['project_id'] = project_id
return job
def _prepare_project_id_for_job(self, job):
# prepare the project id for job creation, currently job parameter
# contains job type and job resource information.
job_type = job['type']
if job_type in (constants.JT_SEG_RULE_SETUP,
constants.JT_RESOURCE_RECYCLE):
project_id = job['resource']['project_id']
else:
project_id = uuidutils.generate_uuid()
pod_id = uuidutils.generate_uuid()
def _create_resource_for_project(self, job, project_id):
# create resource for project ${project_id}
pod_id = uuidutils.generate_uuid()
resource_type, resource_id = (
constants.job_primary_resource_map[job_type])
routing = db_api.create_resource_mapping(
self.context, job['resource'][resource_id],
job['resource'][resource_id], pod_id, project_id,
resource_type)
self.assertIsNotNone(routing)
return project_id
resource_type, resource_id = (
constants.job_primary_resource_map[job['type']])
routing = db_api.create_resource_mapping(
self.context, job['resource'][resource_id],
job['resource'][resource_id], pod_id, project_id,
resource_type)
self.assertIsNotNone(routing)
def _validate_error_code(self, res, code):
self.assertEqual(res[list(res.keys())[0]]['code'], code)

View File

@ -14,6 +14,7 @@ import copy
import mock
from mock import patch
from oslo_config import cfg
import oslo_db.exception as db_exc
from oslo_utils import timeutils
from oslo_utils import uuidutils
import re
@ -51,6 +52,11 @@ class FakeResponse(object):
return super(FakeResponse, cls).__new__(cls)
def mock_db_test_stub(i):
if i == 0:
raise db_exc.DBDeadlock
class AsyncJobControllerTest(base.TestCase):
def setUp(self):
super(AsyncJobControllerTest, self).setUp()
@ -64,6 +70,7 @@ class AsyncJobControllerTest(base.TestCase):
self.job_resource_map = constants.job_resource_map
policy.populate_default_rules()
@patch.object(db_api, 'db_test_stub', new=mock_db_test_stub)
@patch.object(pecan, 'response', new=FakeResponse)
@patch.object(context, 'extract_context_from_environ')
def test_post(self, mock_context):
@ -186,6 +193,7 @@ class AsyncJobControllerTest(base.TestCase):
@patch.object(pecan, 'response', new=FakeResponse)
@patch.object(context, 'extract_context_from_environ')
def test_get_one_and_get_all(self, mock_context):
self.context.project_id = uuidutils.generate_uuid()
mock_context.return_value = self.context
# failure case, only admin can list the job's info
@ -227,6 +235,8 @@ class AsyncJobControllerTest(base.TestCase):
index = 0
for job_type in self.job_resource_map.keys():
job = self._prepare_job_element(job_type)
# for test convenience, all jobs have same project ID
job['project_id'] = self.context.project_id
resource_id = '#'.join([job['resource'][resource_id]
for resource_type, resource_id
@ -280,6 +290,7 @@ class AsyncJobControllerTest(base.TestCase):
@patch.object(pecan, 'response', new=FakeResponse)
@patch.object(context, 'extract_context_from_environ')
def test_get_all_jobs_with_pagination(self, mock_context):
self.context.project_id = uuidutils.generate_uuid()
mock_context.return_value = self.context
# map job type to project id for later project id filter validation.
@ -291,6 +302,10 @@ class AsyncJobControllerTest(base.TestCase):
# cover all job types.
for job_type in self.job_resource_map.keys():
job = self._prepare_job_element(job_type)
if count > 1:
# for test convenience, the first job has a project ID
# that is different from the context.project_id
job['project_id'] = self.context.project_id
job_project_id_map[job_type] = job['project_id']
@ -321,9 +336,6 @@ class AsyncJobControllerTest(base.TestCase):
unsupported_filter = {'fake_filter': "fake_filter"}
count = 1
for job_type in self.job_resource_map.keys():
project_id_filter_1 = {'project_id': job_project_id_map[job_type]}
project_id_filter_2 = {'project_id': uuidutils.generate_uuid()}
job_type_filter_1 = {'type': job_type}
job_type_filter_2 = {'type': job_type + '_1'}
@ -334,41 +346,53 @@ class AsyncJobControllerTest(base.TestCase):
self.context.is_admin = True
# successful case, filter by project id
jobs_project_id_filter_1 = self.controller.get_all(
**project_id_filter_1)
self.assertEqual(1, len(jobs_project_id_filter_1['jobs']))
# test when specify project ID filter from client, if this
# project ID is different from the one from context, then
# it will be ignored, project ID from context will be
# used instead.
filter1 = {'project_id': uuidutils.generate_uuid()}
res1 = self.controller.get_all(**filter1)
jobs_project_id_filter_2 = self.controller.get_all(
**project_id_filter_2)
self.assertEqual(0, len(jobs_project_id_filter_2['jobs']))
filter2 = {'project_id': self.context.project_id}
res2 = self.controller.get_all(**filter2)
self.assertEqual(len(res2['jobs']), len(res1['jobs']))
res3 = self.controller.get_all()
# there is one job whose project ID is different from
# context.project_id. As the list operation only retrieves the
# jobs whose project ID equals to context.project_id, so this
# special job entry won't be retrieved.
self.assertEqual(len(res3['jobs']), len(res2['jobs']))
# successful case, filter by job type
jobs_job_type_filter_1 = self.controller.get_all(
**job_type_filter_1)
self.assertEqual(1, len(jobs_job_type_filter_1['jobs']))
if count == 1:
self.assertEqual(0, len(jobs_job_type_filter_1['jobs']))
else:
self.assertEqual(1, len(jobs_job_type_filter_1['jobs']))
jobs_job_type_filter_2 = self.controller.get_all(
**job_type_filter_2)
self.assertEqual(0, len(jobs_job_type_filter_2['jobs']))
# successful case, filter by project id, job status and job type
# successful case, filter by job status and job type
if count <= amount_of_running_jobs:
all_filters = dict(list(project_id_filter_1.items()) +
list(job_status_filter_3.items()) +
all_filters = dict(list(job_status_filter_3.items()) +
list(job_type_filter_1.items()))
jobs_all_filters = self.controller.get_all(**all_filters)
self.assertEqual(1, len(jobs_all_filters['jobs']))
if count == 1:
self.assertEqual(0, len(jobs_all_filters['jobs']))
else:
self.assertEqual(1, len(jobs_all_filters['jobs']))
else:
all_filters = dict(list(project_id_filter_1.items()) +
list(job_status_filter_1.items()) +
all_filters = dict(list(job_status_filter_1.items()) +
list(job_type_filter_1.items()))
jobs_all_filters = self.controller.get_all(**all_filters)
self.assertEqual(1, len(jobs_all_filters['jobs']))
# successful case, contradictory filter
contradict_filters = dict(list(project_id_filter_1.items()) +
list(job_status_filter_2.items()) +
contradict_filters = dict(list(job_status_filter_2.items()) +
list((job_type_filter_2.items())))
jobs_contradict_filters = self.controller.get_all(
**contradict_filters)
@ -385,7 +409,8 @@ class AsyncJobControllerTest(base.TestCase):
# successful case, list jobs without filters
jobs_empty_filters = self.controller.get_all()
self.assertEqual(amount_of_all_jobs, len(jobs_empty_filters['jobs']))
self.assertEqual(amount_of_all_jobs - 1,
len(jobs_empty_filters['jobs']))
# successful case, filter by job status
jobs_job_status_filter_1 = self.controller.get_all(
@ -399,12 +424,12 @@ class AsyncJobControllerTest(base.TestCase):
jobs_job_status_filter_3 = self.controller.get_all(
**job_status_filter_3)
self.assertEqual(amount_of_running_jobs,
self.assertEqual(amount_of_running_jobs - 1,
len(jobs_job_status_filter_3['jobs']))
# test for paginate query
job_paginate_no_filter_1 = self.controller.get_all()
self.assertEqual(amount_of_all_jobs,
self.assertEqual(amount_of_all_jobs - 1,
len(job_paginate_no_filter_1['jobs']))
# no limit no marker
@ -434,7 +459,7 @@ class AsyncJobControllerTest(base.TestCase):
marker = res1['jobs'][0]['id']
job_paginate_filter_7 = {'status': 'running', 'marker': marker}
res2 = self.controller.get_all(**job_paginate_filter_7)
self.assertEqual(amount_of_running_jobs, len(res2['jobs']))
self.assertEqual(amount_of_running_jobs - 1, len(res2['jobs']))
job_paginate_filter_8 = {'status': 'new', 'limit': 3}
res = self.controller.get_all(**job_paginate_filter_8)
@ -445,7 +470,7 @@ class AsyncJobControllerTest(base.TestCase):
# unsupported marker type
res = self.controller.get_all(marker=None)
self.assertEqual(amount_of_all_jobs, len(res['jobs']))
self.assertEqual(amount_of_all_jobs - 1, len(res['jobs']))
res = self.controller.get_all(marker='-123')
self._validate_error_code(res, 400)
@ -470,17 +495,17 @@ class AsyncJobControllerTest(base.TestCase):
job_paginate_filter_10 = {'status': 'running'}
res = self.controller.get_all(**job_paginate_filter_10)
self.assertEqual(amount_of_running_jobs, len(res['jobs']))
self.assertEqual(amount_of_running_jobs - 1, len(res['jobs']))
# add some rows to job log table
for i in xrange(amount_of_running_jobs):
for i in xrange(amount_of_running_jobs - 1):
db_api.finish_job(self.context, res['jobs'][i]['id'], True,
timeutils.utcnow())
time.sleep(1)
res_success_log = db_api.list_jobs_from_log(self.context, None)
self.assertEqual(amount_of_running_jobs, len(res_success_log))
self.assertEqual(amount_of_running_jobs - 1, len(res_success_log))
res_in_job = db_api.list_jobs(self.context, None)
self.assertEqual(amount_of_all_jobs - amount_of_running_jobs,
self.assertEqual(amount_of_all_jobs - (amount_of_running_jobs - 1),
len(res_in_job))
job_paginate_filter_11 = {'limit': 2}

View File

@ -141,21 +141,30 @@ class RoutingControllerTest(base.TestCase):
@patch.object(pecan, 'response', new=FakeResponse)
@patch.object(context, 'extract_context_from_environ')
def test_get_routings_with_pagination(self, mock_context):
self.context.project_id = uuidutils.generate_uuid()
mock_context.return_value = self.context
# test when no pagination and filters are applied to the list
# operation, then all of the routings will be retrieved.
count = 1
total_routings = 4
for resource_type in ('subnet', 'router', 'security_group', 'network'):
kw_routing = self._prepare_routing_element(resource_type)
# for test convenience, the first routing has a different
# project ID from later ones.
if count > 1:
kw_routing['routing']['project_id'] = self.context.project_id
self.controller.post(**kw_routing)
count += 1
routings = self.controller.get_all()
ids = [routing['id']
for key, values in six.iteritems(routings)
for routing in values]
self.assertEqual([4, 3, 2, 1], ids)
self.assertEqual([4, 3, 2], ids)
for filter_name in ('subnet', 'router', 'security_group', 'network'):
for filter_name in ('router', 'security_group', 'network'):
filters = {'resource_type': filter_name}
routings = self.controller.get_all(**filters)
items = [routing['resource_type']
@ -163,8 +172,8 @@ class RoutingControllerTest(base.TestCase):
for routing in values]
self.assertEqual(1, len(items))
# test when pagination limit varies in range [1, 5)
for i in xrange(1, 5):
# test when pagination limit varies in range [1, total_routings+1)
for i in xrange(1, total_routings+1):
routings = []
total_pages = 0
@ -183,16 +192,19 @@ class RoutingControllerTest(base.TestCase):
total_pages += 1
routings.extend(routing['routings'])
# assert that total pages will decrease as the limit increase.
pages = int(4 / i)
if 4 % i:
# because the first routing has a different project ID and can't
# be retrieved by current admin role of project, so the number
# of actual total routings we can get is total_routings-1.
pages = int((total_routings - 1) / i)
if (total_routings - 1) % i:
pages += 1
self.assertEqual(pages, total_pages)
self.assertEqual(4, len(routings))
self.assertEqual(total_routings - 1, len(routings))
for i in xrange(4):
self.assertEqual(4-i, routings[i]['id'])
for i in xrange(total_routings-1):
self.assertEqual(total_routings - i, routings[i]['id'])
set1 = set(['subnet', 'router', 'security_group', 'network'])
set1 = set(['router', 'security_group', 'network'])
set2 = set([routing1['resource_type'] for routing1 in routings])
self.assertEqual(set1, set2)
@ -201,7 +213,7 @@ class RoutingControllerTest(base.TestCase):
self.assertEqual(1, len(routings['routings']))
routings = self.controller.get_all(resource_type='subnet', limit=2)
self.assertEqual(1, len(routings['routings']))
self.assertEqual(0, len(routings['routings']))
# apply a filter and if it doesn't match with any of the retrieved
# routings, then all of them will be discarded and the method returns
@ -211,10 +223,10 @@ class RoutingControllerTest(base.TestCase):
# test cases when limit from client is abnormal
routings = self.controller.get_all(limit=0)
self.assertEqual(4, len(routings['routings']))
self.assertEqual(total_routings - 1, len(routings['routings']))
routings = self.controller.get_all(limit=-1)
self.assertEqual(4, len(routings['routings']))
self.assertEqual(total_routings - 1, len(routings['routings']))
res = self.controller.get_all(limit='20x')
self._validate_error_code(res, 400)
@ -257,6 +269,19 @@ class RoutingControllerTest(base.TestCase):
res = self.controller.get_all(**kw_filter5)
self._validate_error_code(res, 400)
# test when specify project ID filter from client, if this
# project ID is different from the one from context, then
# it will be ignored, project ID from context will be
# used instead.
res = self.controller.get_all()
kw_filter6 = {'project_id': uuidutils.generate_uuid()}
res1 = self.controller.get_all(**kw_filter6)
kw_filter7 = {'project_id': self.context.project_id}
res2 = self.controller.get_all(**kw_filter7)
self.assertEqual(len(res2['routings']), len(res1['routings']))
self.assertEqual(len(res['routings']), len(res2['routings']))
@patch.object(pecan, 'response', new=FakeResponse)
@patch.object(context, 'extract_context_from_environ')
def test_get_all_non_admin(self, mock_context):