Auto-populate lessee for deployments
Adds a configuration option and capability to automatically record the lessee for a deployment based upon the original auth_token information provided in the request context. Additional token information is now shared through the context which is extended in the same fashion as most other projects saving request token information to their RequestContext, instead of triggering excess API calls in the background to Keystone to try and figure out requestor's information. Change-Id: I42a2ceb9d2e7dfdc575eb37ed773a1bc682cec23
This commit is contained in:
parent
2e94aa4241
commit
c3f397149a
@ -88,14 +88,16 @@ class ContextHook(hooks.PecanHook):
|
||||
|
||||
def before(self, state):
|
||||
is_public_api = state.request.environ.get('is_public_api', False)
|
||||
|
||||
auth_token_info = state.request.environ.get('keystone.token_info')
|
||||
# set the global_request_id if we have an inbound request id
|
||||
gr_id = state.request.headers.get(INBOUND_HEADER, "")
|
||||
if re.match(ID_FORMAT, gr_id):
|
||||
state.request.environ[GLOBAL_REQ_ID] = gr_id
|
||||
|
||||
ctx = context.RequestContext.from_environ(state.request.environ,
|
||||
is_public_api=is_public_api)
|
||||
ctx = context.RequestContext.from_environ(
|
||||
state.request.environ,
|
||||
is_public_api=is_public_api,
|
||||
auth_token_info=auth_token_info)
|
||||
# Do not pass any token with context for noauth mode
|
||||
if cfg.CONF.auth_strategy != 'keystone':
|
||||
ctx.auth_token = None
|
||||
|
@ -18,15 +18,25 @@ from oslo_context import context
|
||||
class RequestContext(context.RequestContext):
|
||||
"""Extends security contexts from the oslo.context library."""
|
||||
|
||||
def __init__(self, is_public_api=False, **kwargs):
|
||||
# NOTE(TheJulia): This is a flag used by oslo.context which allows us to
|
||||
# pass in a list of keys to preserve when calling from_dict() on the
|
||||
# RequestContext class.
|
||||
FROM_DICT_EXTRA_KEYS = ['auth_token_info']
|
||||
|
||||
def __init__(self, is_public_api=False, auth_token_info=None, **kwargs):
|
||||
"""Initialize the RequestContext
|
||||
|
||||
:param is_public_api: Specifies whether the request should be processed
|
||||
without authentication.
|
||||
:param auth_token_info: Parameter to house auth token validation
|
||||
response data such as the user auth token's project id as opposed
|
||||
to the bearer token used. This allows for easy access to attributes
|
||||
for the end user when actions are taken on behalf of a user.
|
||||
:param kwargs: additional arguments passed to oslo.context.
|
||||
"""
|
||||
super(RequestContext, self).__init__(**kwargs)
|
||||
self.is_public_api = is_public_api
|
||||
self.auth_token_info = auth_token_info
|
||||
|
||||
def to_policy_values(self):
|
||||
policy_values = super(RequestContext, self).to_policy_values()
|
||||
@ -48,6 +58,32 @@ class RequestContext(context.RequestContext):
|
||||
return
|
||||
self.update_store()
|
||||
|
||||
@classmethod
|
||||
def from_environ(cls, environ, **kwargs):
|
||||
"""Load a context object from a request environment.
|
||||
|
||||
If keyword arguments are provided then they override the values in the
|
||||
request environment, injecting the kwarg arguments used by ironic, as
|
||||
unknown values are filtered out from the final context object in
|
||||
the base oslo.context library.
|
||||
|
||||
:param environ: The environment dictionary associated with a request.
|
||||
:type environ: dict
|
||||
"""
|
||||
context = super().from_environ(environ)
|
||||
context.is_public_api = environ.get('is_public_api', False)
|
||||
context.auth_token_info = environ.get('keystone.token_info')
|
||||
return context
|
||||
|
||||
def to_dict(self):
|
||||
"""Return a dictionary of context attributes."""
|
||||
# The parent class in oslo.context provides the core standard
|
||||
# fields, but does not go beyond that. This preserves auth_token_info
|
||||
# for serialization and ultimately things like RPC transport.
|
||||
cdict = super().to_dict()
|
||||
cdict['auth_token_info'] = self.auth_token_info
|
||||
return cdict
|
||||
|
||||
|
||||
def get_admin_context():
|
||||
"""Create an administrator context."""
|
||||
|
@ -83,6 +83,26 @@ def start_deploy(task, manager, configdrive=None, event='deploy',
|
||||
instance_info.pop('kernel', None)
|
||||
instance_info.pop('ramdisk', None)
|
||||
node.instance_info = instance_info
|
||||
elif CONF.conductor.automatic_lessee:
|
||||
# This should only be on deploy...
|
||||
project = utils.get_token_project_from_request(task.context)
|
||||
if (project and node.lessee is None):
|
||||
LOG.debug('Adding lessee $(project)s to node %(uuid)s.',
|
||||
{'project': project,
|
||||
'uuid': node.uuid})
|
||||
node.set_driver_internal_info('automatic_lessee', True)
|
||||
node.lessee = project
|
||||
elif project and node.lessee is not None:
|
||||
# Since the model is a bit of a matrix and we're largely
|
||||
# just empowering operators, lets at least log a warning
|
||||
# since they may need to remedy something here. Or maybe
|
||||
# not.
|
||||
LOG.warning('Could not automatically save lessee '
|
||||
'$(project)s to node %(uuid)s. Node already '
|
||||
'has a defined lessee of %(lessee)s.',
|
||||
{'project': project,
|
||||
'uuid': node.uuid,
|
||||
'lessee': node.lessee})
|
||||
|
||||
# Infer the image type to make sure the deploy driver
|
||||
# validates only the necessary variables for different
|
||||
|
@ -1097,6 +1097,10 @@ class ConductorManager(base_manager.BaseConductorManager):
|
||||
node.del_driver_internal_info('root_uuid_or_disk_id')
|
||||
node.del_driver_internal_info('is_whole_disk_image')
|
||||
node.del_driver_internal_info('deploy_boot_mode')
|
||||
if node.driver_internal_info.get('automatic_lessee'):
|
||||
# Remove lessee, as it was automatically added
|
||||
node.lessee = None
|
||||
node.del_driver_internal_info('automatic_lessee')
|
||||
network.remove_vifs_from_node(task)
|
||||
node.save()
|
||||
if node.allocation_id:
|
||||
|
@ -547,7 +547,6 @@ class ConductorAPI(object):
|
||||
if deploy_steps:
|
||||
version = '1.52'
|
||||
new_kws['deploy_steps'] = deploy_steps
|
||||
|
||||
cctxt = self._prepare_call(topic=topic, version=version)
|
||||
return cctxt.call(context, 'do_node_deploy', node_id=node_id,
|
||||
rebuild=rebuild, configdrive=configdrive, **new_kws)
|
||||
|
@ -1692,3 +1692,27 @@ def exclude_current_conductor(current_conductor, offline_conductors):
|
||||
current_conductor)
|
||||
|
||||
return [x for x in offline_conductors if x != current_conductor]
|
||||
|
||||
|
||||
def get_token_project_from_request(ctx):
|
||||
"""Identifies the request originator project via keystone token details.
|
||||
|
||||
This method evaluates the ``auth_token_info`` field, which is used to
|
||||
pass information returned from keystone as a token's
|
||||
verification. This information is based upon the actual, original
|
||||
requestor context provided ``auth_token``.
|
||||
|
||||
When a service, such as Nova proxies a request, the request provided
|
||||
auth token value is intended to be from the original user.
|
||||
|
||||
:returns: The project ID value.
|
||||
"""
|
||||
|
||||
try:
|
||||
if ctx.auth_token_info:
|
||||
project = ctx.auth_token_info.get('token', {}).get('project', {})
|
||||
if project:
|
||||
return project.get('id')
|
||||
except AttributeError:
|
||||
LOG.warning('Attempted to identify requestor project ID value, '
|
||||
'however we were unable to do so. Possible older API?')
|
||||
|
@ -348,6 +348,16 @@ opts = [
|
||||
'be specified multiple times to define priorities '
|
||||
'for multiple steps. If set to 0, this specific step '
|
||||
'will not run during verification. ')),
|
||||
cfg.BoolOpt('automatic_lessee',
|
||||
default=False,
|
||||
mutable=True,
|
||||
help=_('If the conductor should record the Project ID '
|
||||
'indicated by Keystone for a requested deployment. '
|
||||
'Allows rights to be granted to directly access the '
|
||||
'deployed node as a lessee within the RBAC security '
|
||||
'model. The conductor does *not* record this value '
|
||||
'otherwise, and this information is not backfilled '
|
||||
'for prior instances which have been deployed.')),
|
||||
]
|
||||
|
||||
|
||||
|
@ -208,10 +208,13 @@ class TestContextHook(base.BaseApiTest):
|
||||
@mock.patch.object(policy, 'check', autospec=True)
|
||||
def _test_context_hook(self, mock_policy, mock_ctx, is_admin=False,
|
||||
is_public_api=False, auth_strategy='keystone',
|
||||
request_id=None):
|
||||
request_id=None, auth_token_info=None):
|
||||
cfg.CONF.set_override('auth_strategy', auth_strategy)
|
||||
headers = fake_headers(admin=is_admin)
|
||||
environ = headers_to_environ(headers, is_public_api=is_public_api)
|
||||
environ = headers_to_environ(
|
||||
headers,
|
||||
**{'is_public_api': is_public_api,
|
||||
'keystone.token_info': auth_token_info})
|
||||
reqstate = FakeRequestState(headers=headers, environ=environ)
|
||||
context_hook = hooks.ContextHook(None)
|
||||
ctx = mock.Mock()
|
||||
@ -223,10 +226,14 @@ class TestContextHook(base.BaseApiTest):
|
||||
mock_policy.return_value = False
|
||||
context_hook.before(reqstate)
|
||||
creds_dict = {'is_public_api': is_public_api}
|
||||
mock_ctx.from_environ.assert_called_once_with(environ, **creds_dict)
|
||||
if auth_token_info:
|
||||
creds_dict['auth_token_info'] = auth_token_info
|
||||
mock_ctx.from_environ.assert_called_once_with(
|
||||
environ, **creds_dict)
|
||||
mock_policy.assert_not_called()
|
||||
if auth_strategy == 'noauth':
|
||||
self.assertIsNone(ctx.auth_token)
|
||||
mock_policy.assert_not_called()
|
||||
return context_hook, reqstate
|
||||
|
||||
def test_context_hook_not_admin(self):
|
||||
@ -250,6 +257,9 @@ class TestContextHook(base.BaseApiTest):
|
||||
self.assertNotIn('Openstack-Request-Id',
|
||||
response.headers)
|
||||
|
||||
def test_context_hook_auth_token_info(self):
|
||||
self._test_context_hook(auth_token_info='data-dict')
|
||||
|
||||
|
||||
class TestPolicyDeprecation(tests_base.TestCase):
|
||||
|
||||
|
@ -37,6 +37,9 @@ class RequestContextTestCase(tests_base.TestCase):
|
||||
"roles": None,
|
||||
"overwrite": True
|
||||
}
|
||||
self.environ = {
|
||||
'keystone.token_info': {'key': 'value'},
|
||||
}
|
||||
|
||||
@mock.patch.object(oslo_context.RequestContext, "__init__", autospec=True)
|
||||
def test_create_context(self, context_mock):
|
||||
@ -63,3 +66,26 @@ class RequestContextTestCase(tests_base.TestCase):
|
||||
context_get_mock.return_value = self.context
|
||||
self.context.ensure_thread_contain_context()
|
||||
self.assertFalse(self.context.update_store.called)
|
||||
|
||||
def test_create_context_with_environ(self):
|
||||
test_context = context.RequestContext().from_environ(self.environ)
|
||||
self.assertEqual({'key': 'value'}, test_context.auth_token_info)
|
||||
|
||||
def test_to_dict_get_auth_token_info(self):
|
||||
test_context = context.RequestContext().from_environ(self.environ)
|
||||
self.assertEqual({'key': 'value'}, test_context.auth_token_info)
|
||||
result = test_context.to_dict()
|
||||
self.assertIn('auth_token_info', result)
|
||||
expected = {'key': 'value'}
|
||||
self.assertDictEqual(expected, result['auth_token_info'])
|
||||
|
||||
def test_from_dict(self):
|
||||
cdict = {'auth_token_info': 'magicdict'}
|
||||
ctxt = context.RequestContext().from_dict(cdict)
|
||||
self.assertEqual('magicdict', ctxt.auth_token_info)
|
||||
|
||||
def test_from_dict_older_api_server(self):
|
||||
# Validates auth_token_info is None if not included.
|
||||
cdict = {}
|
||||
ctxt = context.RequestContext().from_dict(cdict)
|
||||
self.assertIsNone(ctxt.auth_token_info)
|
||||
|
@ -337,10 +337,16 @@ class DoNodeDeployTestCase(mgr_utils.ServiceSetUpMixin, db_base.DbTestCase):
|
||||
@mock.patch.object(conductor_utils, 'validate_instance_info_traits',
|
||||
autospec=True)
|
||||
@mock.patch.object(images, 'is_whole_disk_image', autospec=True)
|
||||
def test_start_deploy(self, mock_iwdi, mock_validate_traits,
|
||||
def _test_start_deploy(self, mock_iwdi, mock_validate_traits,
|
||||
mock_validate_deploy_user_steps_and_templates,
|
||||
mock_deploy_validate,
|
||||
mock_power_validate, mock_process_event):
|
||||
mock_power_validate, mock_process_event,
|
||||
automatic_lessee=False):
|
||||
self.context.auth_token_info = {
|
||||
'token': {'project': {'id': 'user-project'}}
|
||||
}
|
||||
if automatic_lessee:
|
||||
self.config(automatic_lessee=True, group='conductor')
|
||||
self._start_service()
|
||||
mock_iwdi.return_value = False
|
||||
deploy_steps = [{"interface": "bios", "step": "factory_reset",
|
||||
@ -366,6 +372,18 @@ class DoNodeDeployTestCase(mgr_utils.ServiceSetUpMixin, db_base.DbTestCase):
|
||||
mock.ANY, 'deploy', call_args=(
|
||||
deployments.do_node_deploy, task, 1, None, deploy_steps),
|
||||
callback=mock.ANY, err_handler=mock.ANY)
|
||||
if automatic_lessee:
|
||||
self.assertEqual('user-project', node.lessee)
|
||||
self.assertIn('automatic_lessee', node.driver_internal_info)
|
||||
else:
|
||||
self.assertIsNone(node.lessee)
|
||||
self.assertNotIn('automatic_lessee', node.driver_internal_info)
|
||||
|
||||
def test_start_deploy(self):
|
||||
self._test_start_deploy(automatic_lessee=False)
|
||||
|
||||
def test_start_deploy_records_lessee(self):
|
||||
self._test_start_deploy(automatic_lessee=True)
|
||||
|
||||
|
||||
@mgr_utils.mock_record_keepalive
|
||||
|
@ -2476,7 +2476,9 @@ class DoNodeTearDownTestCase(mgr_utils.ServiceSetUpMixin, db_base.DbTestCase):
|
||||
driver_internal_info={'is_whole_disk_image': False,
|
||||
'deploy_steps': {},
|
||||
'root_uuid_or_disk_id': 'foo',
|
||||
'instance': {'ephemeral_gb': 10}})
|
||||
'instance': {'ephemeral_gb': 10},
|
||||
'automatic_lessee': True},
|
||||
lessee='fooproject')
|
||||
port = obj_utils.create_test_port(
|
||||
self.context, node_id=node.id,
|
||||
internal_info={'tenant_vif_port_id': 'foo'})
|
||||
@ -2498,11 +2500,13 @@ class DoNodeTearDownTestCase(mgr_utils.ServiceSetUpMixin, db_base.DbTestCase):
|
||||
self.assertIsNone(node.last_error)
|
||||
self.assertIsNone(node.instance_uuid)
|
||||
self.assertIsNone(node.allocation_id)
|
||||
self.assertIsNone(node.lessee)
|
||||
self.assertEqual({}, node.instance_info)
|
||||
self.assertNotIn('instance', node.driver_internal_info)
|
||||
self.assertIsNone(node.driver_internal_info['deploy_steps'])
|
||||
self.assertNotIn('root_uuid_or_disk_id', node.driver_internal_info)
|
||||
self.assertNotIn('is_whole_disk_image', node.driver_internal_info)
|
||||
self.assertNotIn('automatic_lessee', node.driver_internal_info)
|
||||
mock_tear_down.assert_called_once_with(task.driver.deploy, task)
|
||||
mock_clean.assert_called_once_with(task)
|
||||
self.assertEqual({}, port.internal_info)
|
||||
|
@ -2667,3 +2667,20 @@ class NodeHistoryRecordTestCase(db_base.DbTestCase):
|
||||
mock_history.create = mock_create
|
||||
mock_history.assert_not_called()
|
||||
mock_create.assert_not_called()
|
||||
|
||||
|
||||
class GetTokenProjectFromRequestTestCase(db_base.DbTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(GetTokenProjectFromRequestTestCase, self).setUp()
|
||||
self.auth_token_info = {'token': {'project': {'id': 'user-project'}}}
|
||||
|
||||
def test_no_token_info(self):
|
||||
self.assertIsNone(self.context.auth_token_info)
|
||||
res = conductor_utils.get_token_project_from_request(self.context)
|
||||
self.assertIsNone(res)
|
||||
|
||||
def test_returns_project_id_if_present(self):
|
||||
self.context.auth_token_info = self.auth_token_info
|
||||
res = conductor_utils.get_token_project_from_request(self.context)
|
||||
self.assertEqual('user-project', res)
|
||||
|
@ -0,0 +1,11 @@
|
||||
---
|
||||
features:
|
||||
- |
|
||||
Adds a new feature to permit Ironic to automatically provide an instance
|
||||
requestor's project, ``lessee`` rights to the Bare Metal machine under
|
||||
the Role Based Access Control model implemented in Ironic. It does this
|
||||
by saving the project ID of the requestor to the Node ``lessee`` field
|
||||
automatically, and removing the rights when undeploying the machine.
|
||||
This feature, is normally disabled, but can be enabled using the
|
||||
``[conductor]automatic_lessee`` configuration option. This option will not
|
||||
work in a mixed-version upgrade with older API services.
|
Loading…
Reference in New Issue
Block a user