Avoid leaking admin-ness into threshold-oriented alarms
Fixes bug 1237567 Previously when an admin created a threshold-oriented alarm on behalf of an non-admin identity, this had the effect of leaking visibility onto statistics for resources that would not normally be visible to the non-admin tenant. Now we ensure that an additional implicit threshold rule query constraint is added on the project ID of the non-admin indentity that will ultimately own the alarm. This is acheived by splitting the query validation from the construction of the kwargs from the query. The addition of the implicit query constraint to the threshold rule can then be delayed to a later point in the dispatch path where the full context of the alarm is known (so that we can check for the case where the alarm is created by an admin on behalf of another tenant). Change-Id: I1adae8c899112e7c3eb4e94f3f68262c84a98574
This commit is contained in:
@@ -24,6 +24,7 @@
|
||||
"""
|
||||
import ast
|
||||
import base64
|
||||
import copy
|
||||
import datetime
|
||||
import inspect
|
||||
import json
|
||||
@@ -276,20 +277,29 @@ class ProjectNotAuthorized(Exception):
|
||||
_("Not Authorized to access project %s") % id)
|
||||
|
||||
|
||||
def _sanitize_query(q, valid_keys, headers=None):
|
||||
def _sanitize_query(query, db_func, on_behalf_of=None):
|
||||
'''Check the query to see if:
|
||||
1) the request is coming from admin - then allow full visibility
|
||||
2) non-admin - make sure that the query includes the requester's
|
||||
project.
|
||||
'''
|
||||
auth_project = acl.get_limited_to_project(headers or
|
||||
pecan.request.headers)
|
||||
if auth_project:
|
||||
proj_q = [i for i in q if i.field == 'project_id']
|
||||
for i in proj_q:
|
||||
if auth_project != i.value or i.op != 'eq':
|
||||
raise ProjectNotAuthorized(i.value)
|
||||
q = copy.copy(query)
|
||||
auth_project = acl.get_limited_to_project(pecan.request.headers)
|
||||
created_by = pecan.request.headers.get('X-Project-Id')
|
||||
|
||||
# when an alarm is created by an admin on behalf of another tenant
|
||||
# we must ensure that an implicit query constraint on project_id is
|
||||
# added so that admin-level visibility on statistics is not leaked,
|
||||
# hence for null auth_project (indicating admin-ness) we check if
|
||||
# the creating tenant differs from the tenant on whose behalf the
|
||||
# alarm is being created
|
||||
auth_project = (auth_project or
|
||||
(on_behalf_of if on_behalf_of != created_by else None))
|
||||
if auth_project:
|
||||
_verify_query_segregation(q, auth_project)
|
||||
|
||||
proj_q = [i for i in q if i.field == 'project_id']
|
||||
valid_keys = inspect.getargspec(db_func)[0]
|
||||
if not proj_q and 'on_behalf_of' not in valid_keys:
|
||||
# The user is restricted, but they didn't specify a project
|
||||
# so add it for them.
|
||||
@@ -299,11 +309,66 @@ def _sanitize_query(q, valid_keys, headers=None):
|
||||
return q
|
||||
|
||||
|
||||
def _query_to_kwargs(query, db_func, internal_keys=[], headers=None):
|
||||
def _verify_query_segregation(query, auth_project=None):
|
||||
'''Ensure non-admin queries are not constrained to another project.'''
|
||||
auth_project = (auth_project or
|
||||
acl.get_limited_to_project(pecan.request.headers))
|
||||
if auth_project:
|
||||
for q in query:
|
||||
if q.field == 'project_id' and (auth_project != q.value or
|
||||
q.op != 'eq'):
|
||||
raise ProjectNotAuthorized(q.value)
|
||||
|
||||
|
||||
def _validate_query(query, db_func, internal_keys=[]):
|
||||
_verify_query_segregation(query)
|
||||
|
||||
valid_keys = inspect.getargspec(db_func)[0]
|
||||
query = _sanitize_query(query, valid_keys, headers=headers)
|
||||
internal_keys.append('self')
|
||||
valid_keys = set(valid_keys) - set(internal_keys)
|
||||
translation = {'user_id': 'user',
|
||||
'project_id': 'project',
|
||||
'resource_id': 'resource'}
|
||||
has_timestamp = False
|
||||
for i in query:
|
||||
if i.field == 'timestamp':
|
||||
has_timestamp = True
|
||||
if i.op not in ('lt', 'le', 'gt', 'ge'):
|
||||
raise wsme.exc.InvalidInput('op', i.op,
|
||||
'unimplemented operator for %s' %
|
||||
i.field)
|
||||
else:
|
||||
if i.op == 'eq':
|
||||
if i.field == 'search_offset':
|
||||
has_timestamp = True
|
||||
elif i.field == 'enabled':
|
||||
i._get_value_as_type('boolean')
|
||||
elif i.field.startswith('metadata.'):
|
||||
i._get_value_as_type()
|
||||
elif i.field.startswith('resource_metadata.'):
|
||||
i._get_value_as_type()
|
||||
else:
|
||||
key = translation.get(i.field, i.field)
|
||||
if key not in valid_keys:
|
||||
msg = ("unrecognized field in query: %s, "
|
||||
"valid keys: %s") % (query, valid_keys)
|
||||
raise wsme.exc.UnknownArgument(key, msg)
|
||||
else:
|
||||
raise wsme.exc.InvalidInput('op', i.op,
|
||||
'unimplemented operator for %s' %
|
||||
i.field)
|
||||
|
||||
if has_timestamp and not ('start' in valid_keys or
|
||||
'start_timestamp' in valid_keys):
|
||||
raise wsme.exc.UnknownArgument('timestamp',
|
||||
"not valid for this resource")
|
||||
|
||||
|
||||
def _query_to_kwargs(query, db_func, internal_keys=[]):
|
||||
_validate_query(query, db_func, internal_keys=internal_keys)
|
||||
query = _sanitize_query(query, db_func)
|
||||
internal_keys.append('self')
|
||||
valid_keys = set(inspect.getargspec(db_func)[0]) - set(internal_keys)
|
||||
translation = {'user_id': 'user',
|
||||
'project_id': 'project',
|
||||
'resource_id': 'resource'}
|
||||
@@ -318,10 +383,6 @@ def _query_to_kwargs(query, db_func, internal_keys=[], headers=None):
|
||||
elif i.op in ('gt', 'ge'):
|
||||
stamp['start_timestamp'] = i.value
|
||||
stamp['start_timestamp_op'] = i.op
|
||||
else:
|
||||
raise wsme.exc.InvalidInput('op', i.op,
|
||||
'unimplemented operator for %s' %
|
||||
i.field)
|
||||
else:
|
||||
if i.op == 'eq':
|
||||
if i.field == 'search_offset':
|
||||
@@ -334,15 +395,7 @@ def _query_to_kwargs(query, db_func, internal_keys=[], headers=None):
|
||||
metaquery[i.field[9:]] = i._get_value_as_type()
|
||||
else:
|
||||
key = translation.get(i.field, i.field)
|
||||
if key not in valid_keys:
|
||||
msg = ("unrecognized field in query: %s, "
|
||||
"valid keys: %s") % (query, valid_keys)
|
||||
raise wsme.exc.UnknownArgument(key, msg)
|
||||
kwargs[key] = i.value
|
||||
else:
|
||||
raise wsme.exc.InvalidInput('op', i.op,
|
||||
'unimplemented operator for %s' %
|
||||
i.field)
|
||||
|
||||
if metaquery and 'metaquery' in valid_keys:
|
||||
kwargs['metaquery'] = metaquery
|
||||
@@ -354,9 +407,6 @@ def _query_to_kwargs(query, db_func, internal_keys=[], headers=None):
|
||||
elif 'start_timestamp' in valid_keys:
|
||||
kwargs['start_timestamp'] = q_ts['query_start']
|
||||
kwargs['end_timestamp'] = q_ts['query_end']
|
||||
else:
|
||||
raise wsme.exc.UnknownArgument('timestamp',
|
||||
"not valid for this resource")
|
||||
if 'start_timestamp_op' in stamp:
|
||||
kwargs['start_timestamp_op'] = stamp['start_timestamp_op']
|
||||
if 'end_timestamp_op' in stamp:
|
||||
@@ -963,11 +1013,10 @@ class AlarmThresholdRule(_Base):
|
||||
if not threshold_rule.query:
|
||||
threshold_rule.query = []
|
||||
|
||||
#note(sileht): _query_to_kwargs implicitly call _sanitize_query
|
||||
#that add project_id in query
|
||||
_query_to_kwargs(threshold_rule.query, storage.SampleFilter.__init__,
|
||||
internal_keys=['timestamp', 'start', 'start_timestamp'
|
||||
'end', 'end_timestamp'])
|
||||
timestamp_keys = ['timestamp', 'start', 'start_timestamp' 'end',
|
||||
'end_timestamp']
|
||||
_validate_query(threshold_rule.query, storage.SampleFilter.__init__,
|
||||
internal_keys=timestamp_keys)
|
||||
return threshold_rule
|
||||
|
||||
@property
|
||||
@@ -1129,6 +1178,14 @@ class Alarm(_Base):
|
||||
error = _("%s is mandatory") % field
|
||||
pecan.response.translatable_error = error
|
||||
raise wsme.exc.ClientSideError(unicode(error))
|
||||
if alarm.threshold_rule:
|
||||
# ensure an implicit constraint on project_id is added to
|
||||
# the query if not already present
|
||||
alarm.threshold_rule.query = _sanitize_query(
|
||||
alarm.threshold_rule.query,
|
||||
storage.SampleFilter.__init__,
|
||||
on_behalf_of=alarm.project_id
|
||||
)
|
||||
|
||||
if alarm.threshold_rule and alarm.combination_rule:
|
||||
error = _("threshold_rule and combination_rule "
|
||||
|
||||
@@ -456,7 +456,7 @@ class TestAlarms(FunctionalTest,
|
||||
else:
|
||||
self.fail("Alarm not found")
|
||||
|
||||
def test_post_alarm_as_admin(self):
|
||||
def _do_test_post_alarm_as_admin(self, explicit_project_constraint):
|
||||
"""Test the creation of an alarm as admin for another project."""
|
||||
json = {
|
||||
'enabled': False,
|
||||
@@ -470,9 +470,7 @@ class TestAlarms(FunctionalTest,
|
||||
'query': [{'field': 'metadata.field',
|
||||
'op': 'eq',
|
||||
'value': '5',
|
||||
'type': 'string'},
|
||||
{'field': 'project_id', 'op': 'eq',
|
||||
'value': 'aprojectidthatisnotmine'}],
|
||||
'type': 'string'}],
|
||||
'comparison_operator': 'le',
|
||||
'statistic': 'count',
|
||||
'threshold': 50,
|
||||
@@ -480,6 +478,10 @@ class TestAlarms(FunctionalTest,
|
||||
'period': 180,
|
||||
}
|
||||
}
|
||||
if explicit_project_constraint:
|
||||
project_constraint = {'field': 'project_id', 'op': 'eq',
|
||||
'value': 'aprojectidthatisnotmine'}
|
||||
json['threshold_rule']['query'].append(project_constraint)
|
||||
headers = {}
|
||||
headers.update(self.auth_headers)
|
||||
headers['X-Roles'] = 'admin'
|
||||
@@ -493,13 +495,35 @@ class TestAlarms(FunctionalTest,
|
||||
for key in json:
|
||||
if key.endswith('_rule'):
|
||||
storage_key = 'rule'
|
||||
if explicit_project_constraint:
|
||||
self.assertEqual(getattr(alarms[0], storage_key),
|
||||
json[key])
|
||||
else:
|
||||
query = getattr(alarms[0], storage_key).get('query')
|
||||
self.assertEqual(len(query), 2)
|
||||
implicit_constraint = {
|
||||
u'field': u'project_id',
|
||||
u'value': u'aprojectidthatisnotmine',
|
||||
u'op': u'eq'
|
||||
}
|
||||
self.assertEqual(query[1], implicit_constraint)
|
||||
else:
|
||||
storage_key = key
|
||||
self.assertEqual(getattr(alarms[0], storage_key),
|
||||
json[key])
|
||||
self.assertEqual(getattr(alarms[0], key), json[key])
|
||||
else:
|
||||
self.fail("Alarm not found")
|
||||
|
||||
def test_post_alarm_as_admin_explicit_project_constraint(self):
|
||||
"""Test the creation of an alarm as admin for another project,
|
||||
with an explicit query constraint on the owner's project ID.
|
||||
"""
|
||||
self._do_test_post_alarm_as_admin(True)
|
||||
|
||||
def test_post_alarm_as_admin_implicit_project_constraint(self):
|
||||
"""Test the creation of an alarm as admin for another project,
|
||||
without an explicit query constraint on the owner's project ID.
|
||||
"""
|
||||
self._do_test_post_alarm_as_admin(False)
|
||||
|
||||
def test_post_alarm_as_admin_no_user(self):
|
||||
"""Test the creation of an alarm as admin for another project but
|
||||
forgetting to set the values.
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
# under the License.
|
||||
"""Test the methods related to query."""
|
||||
import datetime
|
||||
import mock
|
||||
|
||||
import wsme
|
||||
|
||||
@@ -169,6 +170,7 @@ class TestQueryToKwArgs(tests_base.TestCase):
|
||||
def setUp(self):
|
||||
super(TestQueryToKwArgs, self).setUp()
|
||||
self.stubs.Set(api, '_sanitize_query', lambda x, y, **z: x)
|
||||
self.stubs.Set(api, '_verify_query_segregation', lambda x, **z: x)
|
||||
|
||||
def test_sample_filter_single(self):
|
||||
q = [Query(field='user_id',
|
||||
@@ -241,12 +243,13 @@ class TestQueryToKwArgs(tests_base.TestCase):
|
||||
op='le',
|
||||
value='ramdisk',
|
||||
type='string')]
|
||||
self.assertRaises(
|
||||
wsme.exc.InvalidInput,
|
||||
api._query_to_kwargs,
|
||||
queries,
|
||||
storage.SampleFilter.__init__,
|
||||
headers={'X-ProjectId': 'foobar'})
|
||||
with mock.patch('pecan.request') as request:
|
||||
request.headers.return_value = {'X-ProjectId': 'foobar'}
|
||||
self.assertRaises(
|
||||
wsme.exc.InvalidInput,
|
||||
api._query_to_kwargs,
|
||||
queries,
|
||||
storage.SampleFilter.__init__)
|
||||
|
||||
def test_sample_filter_invalid_field(self):
|
||||
q = [Query(field='invalid',
|
||||
@@ -278,21 +281,23 @@ class TestQueryToKwArgs(tests_base.TestCase):
|
||||
op='eq',
|
||||
value='fake',
|
||||
type='string') for f in ['y', 'on_behalf_of', 'x']]
|
||||
self.assertRaises(wsme.exc.ClientSideError,
|
||||
api._query_to_kwargs,
|
||||
queries,
|
||||
storage.SampleFilter.__init__,
|
||||
headers={'X-ProjectId': 'foobar'},
|
||||
internal_keys=['on_behalf_of'])
|
||||
with mock.patch('pecan.request') as request:
|
||||
request.headers.return_value = {'X-ProjectId': 'foobar'}
|
||||
self.assertRaises(wsme.exc.ClientSideError,
|
||||
api._query_to_kwargs,
|
||||
queries,
|
||||
storage.SampleFilter.__init__,
|
||||
internal_keys=['on_behalf_of'])
|
||||
|
||||
def test_sample_filter_self_always_excluded(self):
|
||||
queries = [Query(field='user_id',
|
||||
op='eq',
|
||||
value='20')]
|
||||
kwargs = api._query_to_kwargs(queries,
|
||||
storage.SampleFilter.__init__,
|
||||
headers={'X-ProjectId': 'foobar'})
|
||||
self.assertFalse('self' in kwargs)
|
||||
with mock.patch('pecan.request') as request:
|
||||
request.headers.return_value = {'X-ProjectId': 'foobar'}
|
||||
kwargs = api._query_to_kwargs(queries,
|
||||
storage.SampleFilter.__init__)
|
||||
self.assertFalse('self' in kwargs)
|
||||
|
||||
def test_sample_filter_translation(self):
|
||||
queries = [Query(field=f,
|
||||
@@ -301,8 +306,9 @@ class TestQueryToKwArgs(tests_base.TestCase):
|
||||
type='string') for f in ['user_id',
|
||||
'project_id',
|
||||
'resource_id']]
|
||||
kwargs = api._query_to_kwargs(queries,
|
||||
storage.SampleFilter.__init__,
|
||||
headers={'X-ProjectId': 'foobar'})
|
||||
for o in ['user', 'project', 'resource']:
|
||||
self.assertEqual(kwargs.get(o), 'fake_%s_id' % o)
|
||||
with mock.patch('pecan.request') as request:
|
||||
request.headers.return_value = {'X-ProjectId': 'foobar'}
|
||||
kwargs = api._query_to_kwargs(queries,
|
||||
storage.SampleFilter.__init__)
|
||||
for o in ['user', 'project', 'resource']:
|
||||
self.assertEqual(kwargs.get(o), 'fake_%s_id' % o)
|
||||
|
||||
Reference in New Issue
Block a user