Fixes use of dict methods for Python3
In Python3 the dict.keys(), dict.values() and dict.items() methods have been changed to return iterators instead of lists. This causes issues with code that expects a list. bp python3 Change-Id: Id0d55ea4b992666848af1b1a055bc7841548cc6a
This commit is contained in:
parent
2874082ceb
commit
c6e2beaa69
@ -88,7 +88,7 @@ class Endpoint(controller.V2Controller):
|
|||||||
|
|
||||||
# add the legacy endpoint with an interface url
|
# add the legacy endpoint with an interface url
|
||||||
legacy_ep['%surl' % endpoint['interface']] = endpoint['url']
|
legacy_ep['%surl' % endpoint['interface']] = endpoint['url']
|
||||||
return {'endpoints': legacy_endpoints.values()}
|
return {'endpoints': list(legacy_endpoints.values())}
|
||||||
|
|
||||||
@controller.v2_deprecated
|
@controller.v2_deprecated
|
||||||
def create_endpoint(self, context, endpoint):
|
def create_endpoint(self, context, endpoint):
|
||||||
|
6
keystone/common/cache/backends/mongo.py
vendored
6
keystone/common/cache/backends/mongo.py
vendored
@ -451,7 +451,7 @@ class MongoApi(object):
|
|||||||
doc_date = self._get_doc_date()
|
doc_date = self._get_doc_date()
|
||||||
insert_refs = []
|
insert_refs = []
|
||||||
update_refs = []
|
update_refs = []
|
||||||
existing_docs = self._get_results_as_dict(mapping.keys())
|
existing_docs = self._get_results_as_dict(list(mapping.keys()))
|
||||||
for key, value in mapping.items():
|
for key, value in mapping.items():
|
||||||
ref = self._get_cache_entry(key, value.payload, value.metadata,
|
ref = self._get_cache_entry(key, value.payload, value.metadata,
|
||||||
doc_date)
|
doc_date)
|
||||||
@ -536,7 +536,7 @@ class BaseTransform(AbstractManipulator):
|
|||||||
|
|
||||||
def transform_incoming(self, son, collection):
|
def transform_incoming(self, son, collection):
|
||||||
"""Used while saving data to MongoDB."""
|
"""Used while saving data to MongoDB."""
|
||||||
for (key, value) in son.items():
|
for (key, value) in list(son.items()):
|
||||||
if isinstance(value, api.CachedValue):
|
if isinstance(value, api.CachedValue):
|
||||||
son[key] = value.payload # key is 'value' field here
|
son[key] = value.payload # key is 'value' field here
|
||||||
son['meta'] = value.metadata
|
son['meta'] = value.metadata
|
||||||
@ -553,7 +553,7 @@ class BaseTransform(AbstractManipulator):
|
|||||||
('_id', 'value', 'meta', 'doc_date')):
|
('_id', 'value', 'meta', 'doc_date')):
|
||||||
payload = son.pop('value', None)
|
payload = son.pop('value', None)
|
||||||
metadata = son.pop('meta', None)
|
metadata = son.pop('meta', None)
|
||||||
for (key, value) in son.items():
|
for (key, value) in list(son.items()):
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
son[key] = self.transform_outgoing(value, collection)
|
son[key] = self.transform_outgoing(value, collection)
|
||||||
if metadata is not None:
|
if metadata is not None:
|
||||||
|
@ -1152,4 +1152,4 @@ def list_opts():
|
|||||||
|
|
||||||
:returns: a list of (group_name, opts) tuples
|
:returns: a list of (group_name, opts) tuples
|
||||||
"""
|
"""
|
||||||
return FILE_OPTIONS.items()
|
return list(FILE_OPTIONS.items())
|
||||||
|
@ -617,7 +617,7 @@ def _common_ldap_initialization(url, use_tls=False, tls_cacertfile=None,
|
|||||||
"or is not a directory") %
|
"or is not a directory") %
|
||||||
tls_cacertdir)
|
tls_cacertdir)
|
||||||
ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, tls_cacertdir)
|
ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, tls_cacertdir)
|
||||||
if tls_req_cert in LDAP_TLS_CERTS.values():
|
if tls_req_cert in list(LDAP_TLS_CERTS.values()):
|
||||||
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_req_cert)
|
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_req_cert)
|
||||||
else:
|
else:
|
||||||
LOG.debug("LDAP TLS: invalid TLS_REQUIRE_CERT Option=%s",
|
LOG.debug("LDAP TLS: invalid TLS_REQUIRE_CERT Option=%s",
|
||||||
@ -1440,8 +1440,8 @@ class BaseLdap(object):
|
|||||||
with self.get_connection() as conn:
|
with self.get_connection() as conn:
|
||||||
try:
|
try:
|
||||||
attrs = list(set(([self.id_attr] +
|
attrs = list(set(([self.id_attr] +
|
||||||
self.attribute_mapping.values() +
|
list(self.attribute_mapping.values()) +
|
||||||
self.extra_attr_mapping.keys())))
|
list(self.extra_attr_mapping.keys()))))
|
||||||
res = conn.search_s(self.tree_dn,
|
res = conn.search_s(self.tree_dn,
|
||||||
self.LDAP_SCOPE,
|
self.LDAP_SCOPE,
|
||||||
query,
|
query,
|
||||||
@ -1460,8 +1460,8 @@ class BaseLdap(object):
|
|||||||
with self.get_connection() as conn:
|
with self.get_connection() as conn:
|
||||||
try:
|
try:
|
||||||
attrs = list(set(([self.id_attr] +
|
attrs = list(set(([self.id_attr] +
|
||||||
self.attribute_mapping.values() +
|
list(self.attribute_mapping.values()) +
|
||||||
self.extra_attr_mapping.keys())))
|
list(self.extra_attr_mapping.keys()))))
|
||||||
return conn.search_s(self.tree_dn,
|
return conn.search_s(self.tree_dn,
|
||||||
self.LDAP_SCOPE,
|
self.LDAP_SCOPE,
|
||||||
query,
|
query,
|
||||||
|
@ -27,7 +27,8 @@ def upgrade(migrate_engine):
|
|||||||
# names, depending on version of MySQL used. We shoud make this naming
|
# names, depending on version of MySQL used. We shoud make this naming
|
||||||
# consistent, by reverting index name to a consistent condition.
|
# consistent, by reverting index name to a consistent condition.
|
||||||
if any(i for i in endpoint.indexes if
|
if any(i for i in endpoint.indexes if
|
||||||
i.columns.keys() == ['service_id'] and i.name != 'service_id'):
|
list(i.columns.keys()) == ['service_id']
|
||||||
|
and i.name != 'service_id'):
|
||||||
# NOTE(i159): by this action will be made re-creation of an index
|
# NOTE(i159): by this action will be made re-creation of an index
|
||||||
# with the new name. This can be considered as renaming under the
|
# with the new name. This can be considered as renaming under the
|
||||||
# MySQL rules.
|
# MySQL rules.
|
||||||
@ -37,5 +38,6 @@ def upgrade(migrate_engine):
|
|||||||
meta, autoload=True)
|
meta, autoload=True)
|
||||||
|
|
||||||
if any(i for i in user_group_membership.indexes if
|
if any(i for i in user_group_membership.indexes if
|
||||||
i.columns.keys() == ['group_id'] and i.name != 'group_id'):
|
list(i.columns.keys()) == ['group_id']
|
||||||
|
and i.name != 'group_id'):
|
||||||
sa.Index('group_id', user_group_membership.c.group_id).create()
|
sa.Index('group_id', user_group_membership.c.group_id).create()
|
||||||
|
@ -51,7 +51,7 @@ def flatten_dict(d, parent_key=''):
|
|||||||
for k, v in d.items():
|
for k, v in d.items():
|
||||||
new_key = parent_key + '.' + k if parent_key else k
|
new_key = parent_key + '.' + k if parent_key else k
|
||||||
if isinstance(v, collections.MutableMapping):
|
if isinstance(v, collections.MutableMapping):
|
||||||
items.extend(flatten_dict(v, new_key).items())
|
items.extend(list(flatten_dict(v, new_key).items()))
|
||||||
else:
|
else:
|
||||||
items.append((new_key, v))
|
items.append((new_key, v))
|
||||||
return dict(items)
|
return dict(items)
|
||||||
@ -81,7 +81,7 @@ class SmarterEncoder(jsonutils.json.JSONEncoder):
|
|||||||
"""Help for JSON encoding dict-like objects."""
|
"""Help for JSON encoding dict-like objects."""
|
||||||
def default(self, obj):
|
def default(self, obj):
|
||||||
if not isinstance(obj, dict) and hasattr(obj, 'iteritems'):
|
if not isinstance(obj, dict) and hasattr(obj, 'iteritems'):
|
||||||
return dict(obj.iteritems())
|
return dict(six.iteritems(obj))
|
||||||
return super(SmarterEncoder, self).default(obj)
|
return super(SmarterEncoder, self).default(obj)
|
||||||
|
|
||||||
|
|
||||||
|
@ -356,7 +356,7 @@ def transform_to_group_ids(group_names, mapping_id,
|
|||||||
def get_assertion_params_from_env(context):
|
def get_assertion_params_from_env(context):
|
||||||
LOG.debug('Environment variables: %s', context['environment'])
|
LOG.debug('Environment variables: %s', context['environment'])
|
||||||
prefix = CONF.federation.assertion_prefix
|
prefix = CONF.federation.assertion_prefix
|
||||||
for k, v in context['environment'].items():
|
for k, v in list(context['environment'].items()):
|
||||||
if k.startswith(prefix):
|
if k.startswith(prefix):
|
||||||
yield (k, v)
|
yield (k, v)
|
||||||
|
|
||||||
|
@ -26,7 +26,8 @@ def upgrade(migrate_engine):
|
|||||||
# indexes create automatically. That those indexes will have different
|
# indexes create automatically. That those indexes will have different
|
||||||
# names, depending on version of MySQL used. We shoud make this naming
|
# names, depending on version of MySQL used. We shoud make this naming
|
||||||
# consistent, by reverting index name to a consistent condition.
|
# consistent, by reverting index name to a consistent condition.
|
||||||
if any(i for i in table.indexes if i.columns.keys() == ['consumer_id']
|
if any(i for i in table.indexes if
|
||||||
|
list(i.columns.keys()) == ['consumer_id']
|
||||||
and i.name != 'consumer_id'):
|
and i.name != 'consumer_id'):
|
||||||
# NOTE(i159): by this action will be made re-creation of an index
|
# NOTE(i159): by this action will be made re-creation of an index
|
||||||
# with the new name. This can be considered as renaming under the
|
# with the new name. This can be considered as renaming under the
|
||||||
|
@ -63,7 +63,7 @@ class Extensions(wsgi.Application):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def get_extensions_info(self, context):
|
def get_extensions_info(self, context):
|
||||||
return {'extensions': {'values': self.extensions.values()}}
|
return {'extensions': {'values': list(self.extensions.values())}}
|
||||||
|
|
||||||
def get_extension_info(self, context, extension_alias):
|
def get_extension_info(self, context, extension_alias):
|
||||||
try:
|
try:
|
||||||
@ -177,7 +177,7 @@ class Version(wsgi.Application):
|
|||||||
versions = self._get_versions_list(context)
|
versions = self._get_versions_list(context)
|
||||||
return wsgi.render_response(status=(300, 'Multiple Choices'), body={
|
return wsgi.render_response(status=(300, 'Multiple Choices'), body={
|
||||||
'versions': {
|
'versions': {
|
||||||
'values': versions.values()
|
'values': list(versions.values())
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -1093,14 +1093,14 @@ class DomainConfigManager(manager.Manager):
|
|||||||
'provided contains group %(group_other)s '
|
'provided contains group %(group_other)s '
|
||||||
'instead') % {
|
'instead') % {
|
||||||
'group': group,
|
'group': group,
|
||||||
'group_other': config.keys()[0]}
|
'group_other': list(config.keys())[0]}
|
||||||
raise exception.InvalidDomainConfig(reason=msg)
|
raise exception.InvalidDomainConfig(reason=msg)
|
||||||
if option and option not in config[group]:
|
if option and option not in config[group]:
|
||||||
msg = _('Trying to update option %(option)s in group '
|
msg = _('Trying to update option %(option)s in group '
|
||||||
'%(group)s, but config provided contains option '
|
'%(group)s, but config provided contains option '
|
||||||
'%(option_other)s instead') % {
|
'%(option_other)s instead') % {
|
||||||
'group': group, 'option': option,
|
'group': group, 'option': option,
|
||||||
'option_other': config[group].keys()[0]}
|
'option_other': list(config[group].keys())[0]}
|
||||||
raise exception.InvalidDomainConfig(reason=msg)
|
raise exception.InvalidDomainConfig(reason=msg)
|
||||||
|
|
||||||
# Finally, we need to check if the group/option specified
|
# Finally, we need to check if the group/option specified
|
||||||
|
@ -21,7 +21,7 @@ from keystone.tests import unit as tests
|
|||||||
|
|
||||||
|
|
||||||
# List of 2-tuples, (pem_type, pem_header)
|
# List of 2-tuples, (pem_type, pem_header)
|
||||||
headers = pemutils.PEM_TYPE_TO_HEADER.items()
|
headers = list(pemutils.PEM_TYPE_TO_HEADER.items())
|
||||||
|
|
||||||
|
|
||||||
def make_data(size, offset=0):
|
def make_data(size, offset=0):
|
||||||
|
@ -412,7 +412,7 @@ class TestCase(BaseTestCase):
|
|||||||
|
|
||||||
for manager_name, manager in six.iteritems(drivers):
|
for manager_name, manager in six.iteritems(drivers):
|
||||||
setattr(self, manager_name, manager)
|
setattr(self, manager_name, manager)
|
||||||
self.addCleanup(self.cleanup_instance(*drivers.keys()))
|
self.addCleanup(self.cleanup_instance(*list(drivers.keys())))
|
||||||
|
|
||||||
def load_extra_backends(self):
|
def load_extra_backends(self):
|
||||||
"""Override to load managers that aren't loaded by default.
|
"""Override to load managers that aren't loaded by default.
|
||||||
|
@ -254,7 +254,7 @@ class FakeLdap(core.LDAPHandler):
|
|||||||
ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, tls_cacertfile)
|
ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, tls_cacertfile)
|
||||||
elif tls_cacertdir:
|
elif tls_cacertdir:
|
||||||
ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, tls_cacertdir)
|
ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, tls_cacertdir)
|
||||||
if tls_req_cert in core.LDAP_TLS_CERTS.values():
|
if tls_req_cert in list(core.LDAP_TLS_CERTS.values()):
|
||||||
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_req_cert)
|
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_req_cert)
|
||||||
else:
|
else:
|
||||||
raise ValueError("invalid TLS_REQUIRE_CERT tls_req_cert=%s",
|
raise ValueError("invalid TLS_REQUIRE_CERT tls_req_cert=%s",
|
||||||
|
@ -160,7 +160,7 @@ class MockCollection(object):
|
|||||||
return new
|
return new
|
||||||
if isinstance(obj, dict):
|
if isinstance(obj, dict):
|
||||||
new = container()
|
new = container()
|
||||||
for key, value in obj.items():
|
for key, value in list(obj.items()):
|
||||||
new[key] = self._copy_doc(value, container)
|
new[key] = self._copy_doc(value, container)
|
||||||
return new
|
return new
|
||||||
else:
|
else:
|
||||||
|
@ -488,6 +488,8 @@ class KVSTest(tests.TestCase):
|
|||||||
memcached_expire_time=memcache_expire_time,
|
memcached_expire_time=memcache_expire_time,
|
||||||
some_other_arg=uuid.uuid4().hex,
|
some_other_arg=uuid.uuid4().hex,
|
||||||
no_expiry_keys=[self.key_bar])
|
no_expiry_keys=[self.key_bar])
|
||||||
|
kvs_driver = kvs._region.backend.driver
|
||||||
|
|
||||||
# Ensure the set_arguments are correct
|
# Ensure the set_arguments are correct
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
kvs._region.backend._get_set_arguments_driver_attr(),
|
kvs._region.backend._get_set_arguments_driver_attr(),
|
||||||
@ -499,8 +501,8 @@ class KVSTest(tests.TestCase):
|
|||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
kvs._region.backend.driver.client.set_arguments_passed,
|
kvs._region.backend.driver.client.set_arguments_passed,
|
||||||
expected_set_args)
|
expected_set_args)
|
||||||
self.assertEqual(expected_foo_keys,
|
observed_foo_keys = list(kvs_driver.client.keys_values.keys())
|
||||||
kvs._region.backend.driver.client.keys_values.keys())
|
self.assertEqual(expected_foo_keys, observed_foo_keys)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.value_foo,
|
self.value_foo,
|
||||||
kvs._region.backend.driver.client.keys_values[self.key_foo][0])
|
kvs._region.backend.driver.client.keys_values[self.key_foo][0])
|
||||||
@ -511,8 +513,8 @@ class KVSTest(tests.TestCase):
|
|||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
kvs._region.backend.driver.client.set_arguments_passed,
|
kvs._region.backend.driver.client.set_arguments_passed,
|
||||||
expected_no_expiry_args)
|
expected_no_expiry_args)
|
||||||
self.assertEqual(expected_bar_keys,
|
observed_bar_keys = list(kvs_driver.client.keys_values.keys())
|
||||||
kvs._region.backend.driver.client.keys_values.keys())
|
self.assertEqual(expected_bar_keys, observed_bar_keys)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.value_bar,
|
self.value_bar,
|
||||||
kvs._region.backend.driver.client.keys_values[self.key_bar][0])
|
kvs._region.backend.driver.client.keys_values[self.key_bar][0])
|
||||||
@ -523,8 +525,8 @@ class KVSTest(tests.TestCase):
|
|||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
kvs._region.backend.driver.client.set_arguments_passed,
|
kvs._region.backend.driver.client.set_arguments_passed,
|
||||||
expected_set_args)
|
expected_set_args)
|
||||||
self.assertEqual(expected_foo_keys,
|
observed_foo_keys = list(kvs_driver.client.keys_values.keys())
|
||||||
kvs._region.backend.driver.client.keys_values.keys())
|
self.assertEqual(expected_foo_keys, observed_foo_keys)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.value_foo,
|
self.value_foo,
|
||||||
kvs._region.backend.driver.client.keys_values[self.key_foo][0])
|
kvs._region.backend.driver.client.keys_values[self.key_foo][0])
|
||||||
@ -535,8 +537,8 @@ class KVSTest(tests.TestCase):
|
|||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
kvs._region.backend.driver.client.set_arguments_passed,
|
kvs._region.backend.driver.client.set_arguments_passed,
|
||||||
expected_no_expiry_args)
|
expected_no_expiry_args)
|
||||||
self.assertEqual(expected_bar_keys,
|
observed_bar_keys = list(kvs_driver.client.keys_values.keys())
|
||||||
kvs._region.backend.driver.client.keys_values.keys())
|
self.assertEqual(expected_bar_keys, observed_bar_keys)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.value_bar,
|
self.value_bar,
|
||||||
kvs._region.backend.driver.client.keys_values[self.key_bar][0])
|
kvs._region.backend.driver.client.keys_values[self.key_bar][0])
|
||||||
|
@ -405,14 +405,16 @@ class SqlUpgradeTests(SqlMigrateBase):
|
|||||||
self.upgrade(53)
|
self.upgrade(53)
|
||||||
self.upgrade(54)
|
self.upgrade(54)
|
||||||
table = sqlalchemy.Table('assignment', self.metadata, autoload=True)
|
table = sqlalchemy.Table('assignment', self.metadata, autoload=True)
|
||||||
index_data = [(idx.name, idx.columns.keys()) for idx in table.indexes]
|
index_data = [(idx.name, list(idx.columns.keys()))
|
||||||
|
for idx in table.indexes]
|
||||||
self.assertIn(('ix_actor_id', ['actor_id']), index_data)
|
self.assertIn(('ix_actor_id', ['actor_id']), index_data)
|
||||||
|
|
||||||
def test_token_user_id_and_trust_id_index_upgrade(self):
|
def test_token_user_id_and_trust_id_index_upgrade(self):
|
||||||
self.upgrade(54)
|
self.upgrade(54)
|
||||||
self.upgrade(55)
|
self.upgrade(55)
|
||||||
table = sqlalchemy.Table('token', self.metadata, autoload=True)
|
table = sqlalchemy.Table('token', self.metadata, autoload=True)
|
||||||
index_data = [(idx.name, idx.columns.keys()) for idx in table.indexes]
|
index_data = [(idx.name, list(idx.columns.keys()))
|
||||||
|
for idx in table.indexes]
|
||||||
self.assertIn(('ix_token_user_id', ['user_id']), index_data)
|
self.assertIn(('ix_token_user_id', ['user_id']), index_data)
|
||||||
self.assertIn(('ix_token_trust_id', ['trust_id']), index_data)
|
self.assertIn(('ix_token_trust_id', ['trust_id']), index_data)
|
||||||
|
|
||||||
|
@ -651,7 +651,7 @@ class RestfulTestCase(tests.SQLDriverOverrides, rest.RestfulTestCase,
|
|||||||
of those in expected.
|
of those in expected.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
for k, v in expected.iteritems():
|
for k, v in six.iteritems(expected):
|
||||||
self.assertIn(k, actual)
|
self.assertIn(k, actual)
|
||||||
if isinstance(v, dict):
|
if isinstance(v, dict):
|
||||||
self.assertDictContainsSubset(v, actual[k])
|
self.assertDictContainsSubset(v, actual[k])
|
||||||
@ -803,7 +803,7 @@ class RestfulTestCase(tests.SQLDriverOverrides, rest.RestfulTestCase,
|
|||||||
self.assertValidCatalog(resp.json['catalog'])
|
self.assertValidCatalog(resp.json['catalog'])
|
||||||
self.assertIn('links', resp.json)
|
self.assertIn('links', resp.json)
|
||||||
self.assertIsInstance(resp.json['links'], dict)
|
self.assertIsInstance(resp.json['links'], dict)
|
||||||
self.assertEqual(['self'], resp.json['links'].keys())
|
self.assertEqual(['self'], list(resp.json['links'].keys()))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
'http://localhost/v3/auth/catalog',
|
'http://localhost/v3/auth/catalog',
|
||||||
resp.json['links']['self'])
|
resp.json['links']['self'])
|
||||||
|
@ -109,14 +109,14 @@ class FederatedSetupMixin(object):
|
|||||||
self.assertEqual(token_projects, projects_ref)
|
self.assertEqual(token_projects, projects_ref)
|
||||||
|
|
||||||
def _check_scoped_token_attributes(self, token):
|
def _check_scoped_token_attributes(self, token):
|
||||||
def xor_project_domain(iterable):
|
def xor_project_domain(token_keys):
|
||||||
return sum(('project' in iterable, 'domain' in iterable)) % 2
|
return sum(('project' in token_keys, 'domain' in token_keys)) % 2
|
||||||
|
|
||||||
for obj in ('user', 'catalog', 'expires_at', 'issued_at',
|
for obj in ('user', 'catalog', 'expires_at', 'issued_at',
|
||||||
'methods', 'roles'):
|
'methods', 'roles'):
|
||||||
self.assertIn(obj, token)
|
self.assertIn(obj, token)
|
||||||
# Check for either project or domain
|
# Check for either project or domain
|
||||||
if not xor_project_domain(token.keys()):
|
if not xor_project_domain(list(token.keys())):
|
||||||
raise AssertionError("You must specify either"
|
raise AssertionError("You must specify either"
|
||||||
"project or domain.")
|
"project or domain.")
|
||||||
|
|
||||||
|
@ -195,7 +195,7 @@ class V2TokenDataHelper(object):
|
|||||||
new_service_ref['endpoints'] = endpoints_ref
|
new_service_ref['endpoints'] = endpoints_ref
|
||||||
services[service] = new_service_ref
|
services[service] = new_service_ref
|
||||||
|
|
||||||
return services.values()
|
return list(services.values())
|
||||||
|
|
||||||
|
|
||||||
@dependency.requires('assignment_api', 'catalog_api', 'federation_api',
|
@dependency.requires('assignment_api', 'catalog_api', 'federation_api',
|
||||||
|
@ -178,7 +178,7 @@ def rotate_keys(keystone_user_id=None, keystone_group_id=None):
|
|||||||
|
|
||||||
LOG.info(_LI('Starting key rotation with %(count)s key files: %(list)s'), {
|
LOG.info(_LI('Starting key rotation with %(count)s key files: %(list)s'), {
|
||||||
'count': len(key_files),
|
'count': len(key_files),
|
||||||
'list': key_files.values()})
|
'list': list(key_files.values())})
|
||||||
|
|
||||||
# determine the number of the new primary key
|
# determine the number of the new primary key
|
||||||
current_primary_key = max(key_files.keys())
|
current_primary_key = max(key_files.keys())
|
||||||
|
Loading…
Reference in New Issue
Block a user