Use dict comprehensions instead of dict constructor
PEP-0274 introduced dict comprehensions [1], these are benefits: The dictionary constructor approach has two distinct disadvantages from the proposed syntax though. First, it isn't as legible as a dict comprehension. Second, it forces the programmer to create an in-core list object first, which could be expensive. Keystone dropped python 2.6 support in Kilo, we can leaverage this now. There is deep dive about PEP-0274[2] and basic tests about performance[3]. Note: This commit doesn't handle dict constructor with kwargs. This commit also adds a hacking rule. [1]http://legacy.python.org/dev/peps/pep-0274/ [2]http://doughellmann.com/2012/11/12/the-performance-impact-of-using-dict-instead-of-in-cpython-2-7-2.html [3]http://paste.openstack.org/show/154798 Change-Id: Ie74719d0c969fa7819c243d5b162df6656c1e136
This commit is contained in:
parent
cf99025245
commit
d988d14801
|
@ -560,7 +560,7 @@ class Auth(controller.V3Controller):
|
||||||
# it's most likely that only one of these will be filled so avoid
|
# it's most likely that only one of these will be filled so avoid
|
||||||
# the combination if possible.
|
# the combination if possible.
|
||||||
if a and b:
|
if a and b:
|
||||||
return dict((x['id'], x) for x in a + b).values()
|
return {x['id']: x for x in a + b}.values()
|
||||||
else:
|
else:
|
||||||
return a or b
|
return a or b
|
||||||
|
|
||||||
|
|
|
@ -418,14 +418,13 @@ class MongoApi(object):
|
||||||
|
|
||||||
def get_multi(self, keys):
|
def get_multi(self, keys):
|
||||||
db_results = self._get_results_as_dict(keys)
|
db_results = self._get_results_as_dict(keys)
|
||||||
return dict((doc['_id'], doc['value']) for doc in
|
return {doc['_id']: doc['value'] for doc in six.itervalues(db_results)}
|
||||||
six.itervalues(db_results))
|
|
||||||
|
|
||||||
def _get_results_as_dict(self, keys):
|
def _get_results_as_dict(self, keys):
|
||||||
critieria = {'_id': {'$in': keys}}
|
critieria = {'_id': {'$in': keys}}
|
||||||
db_results = self.get_cache_collection().find(spec=critieria,
|
db_results = self.get_cache_collection().find(spec=critieria,
|
||||||
**self.meth_kwargs)
|
**self.meth_kwargs)
|
||||||
return dict((doc['_id'], doc) for doc in db_results)
|
return {doc['_id']: doc for doc in db_results}
|
||||||
|
|
||||||
def set(self, key, value):
|
def set(self, key, value):
|
||||||
doc_date = self._get_doc_date()
|
doc_date = self._get_doc_date()
|
||||||
|
|
|
@ -142,22 +142,21 @@ class MemcachedBackend(manager.Manager):
|
||||||
# all ``set`` and ``set_multi`` calls by the driver, by calling
|
# all ``set`` and ``set_multi`` calls by the driver, by calling
|
||||||
# the client directly it is possible to exclude the ``time``
|
# the client directly it is possible to exclude the ``time``
|
||||||
# argument to the memcached server.
|
# argument to the memcached server.
|
||||||
new_mapping = dict((k, mapping[k]) for k in no_expiry_keys)
|
new_mapping = {k: mapping[k] for k in no_expiry_keys}
|
||||||
set_arguments = self._get_set_arguments_driver_attr(
|
set_arguments = self._get_set_arguments_driver_attr(
|
||||||
exclude_expiry=True)
|
exclude_expiry=True)
|
||||||
self.driver.client.set_multi(new_mapping, **set_arguments)
|
self.driver.client.set_multi(new_mapping, **set_arguments)
|
||||||
|
|
||||||
if has_expiry_keys:
|
if has_expiry_keys:
|
||||||
new_mapping = dict((k, mapping[k]) for k in has_expiry_keys)
|
new_mapping = {k: mapping[k] for k in has_expiry_keys}
|
||||||
self.driver.set_multi(new_mapping)
|
self.driver.set_multi(new_mapping)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_config_dict(cls, config_dict, prefix):
|
def from_config_dict(cls, config_dict, prefix):
|
||||||
prefix_len = len(prefix)
|
prefix_len = len(prefix)
|
||||||
return cls(
|
return cls(
|
||||||
dict((key[prefix_len:], config_dict[key])
|
{key[prefix_len:]: config_dict[key] for key in config_dict
|
||||||
for key in config_dict
|
if key.startswith(prefix)})
|
||||||
if key.startswith(prefix)))
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def key_mangler(self):
|
def key_mangler(self):
|
||||||
|
|
|
@ -1319,7 +1319,7 @@ class BaseLdap(object):
|
||||||
# in a case-insensitive way. We use the case specified in the
|
# in a case-insensitive way. We use the case specified in the
|
||||||
# mapping for the model to ensure we have a predictable way of
|
# mapping for the model to ensure we have a predictable way of
|
||||||
# retrieving values later.
|
# retrieving values later.
|
||||||
lower_res = dict((k.lower(), v) for k, v in six.iteritems(res[1]))
|
lower_res = {k.lower(): v for k, v in six.iteritems(res[1])}
|
||||||
|
|
||||||
id_attrs = lower_res.get(self.id_attr.lower())
|
id_attrs = lower_res.get(self.id_attr.lower())
|
||||||
if not id_attrs:
|
if not id_attrs:
|
||||||
|
|
|
@ -125,8 +125,8 @@ class DictBase(models.ModelBase):
|
||||||
def from_dict(cls, d):
|
def from_dict(cls, d):
|
||||||
new_d = d.copy()
|
new_d = d.copy()
|
||||||
|
|
||||||
new_d['extra'] = dict((k, new_d.pop(k)) for k in six.iterkeys(d)
|
new_d['extra'] = {k: new_d.pop(k) for k in six.iterkeys(d)
|
||||||
if k not in cls.attributes and k != 'extra')
|
if k not in cls.attributes and k != 'extra'}
|
||||||
|
|
||||||
return cls(**new_d)
|
return cls(**new_d)
|
||||||
|
|
||||||
|
@ -163,7 +163,7 @@ class ModelDictMixin(object):
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
"""Returns the model's attributes as a dictionary."""
|
"""Returns the model's attributes as a dictionary."""
|
||||||
names = (column.name for column in self.__table__.columns)
|
names = (column.name for column in self.__table__.columns)
|
||||||
return dict((name, getattr(self, name)) for name in names)
|
return {name: getattr(self, name) for name in names}
|
||||||
|
|
||||||
|
|
||||||
_engine_facade = None
|
_engine_facade = None
|
||||||
|
|
|
@ -283,8 +283,7 @@ class Application(BaseApplication):
|
||||||
return arg.replace(':', '_').replace('-', '_')
|
return arg.replace(':', '_').replace('-', '_')
|
||||||
|
|
||||||
def _normalize_dict(self, d):
|
def _normalize_dict(self, d):
|
||||||
return dict([(self._normalize_arg(k), v)
|
return {self._normalize_arg(k): v for (k, v) in six.iteritems(d)}
|
||||||
for (k, v) in six.iteritems(d)])
|
|
||||||
|
|
||||||
def assert_admin(self, context):
|
def assert_admin(self, context):
|
||||||
if not context['is_admin']:
|
if not context['is_admin']:
|
||||||
|
|
|
@ -104,9 +104,9 @@ class EndpointFilterV3Controller(_ControllerBase):
|
||||||
"""List all endpoints currently associated with a given project."""
|
"""List all endpoints currently associated with a given project."""
|
||||||
self.resource_api.get_project(project_id)
|
self.resource_api.get_project(project_id)
|
||||||
refs = self.endpoint_filter_api.list_endpoints_for_project(project_id)
|
refs = self.endpoint_filter_api.list_endpoints_for_project(project_id)
|
||||||
filtered_endpoints = dict(
|
filtered_endpoints = {ref['endpoint_id']:
|
||||||
(ref['endpoint_id'], self.catalog_api.get_endpoint(
|
self.catalog_api.get_endpoint(ref['endpoint_id'])
|
||||||
ref['endpoint_id'])) for ref in refs)
|
for ref in refs}
|
||||||
|
|
||||||
# need to recover endpoint_groups associated with project
|
# need to recover endpoint_groups associated with project
|
||||||
# then for each endpoint group return the endpoints.
|
# then for each endpoint group return the endpoints.
|
||||||
|
|
|
@ -360,8 +360,8 @@ class RuleProcessor(object):
|
||||||
# This will create a new dictionary where the values are arrays, and
|
# This will create a new dictionary where the values are arrays, and
|
||||||
# any multiple values are stored in the arrays.
|
# any multiple values are stored in the arrays.
|
||||||
LOG.debug('assertion data: %s', assertion_data)
|
LOG.debug('assertion data: %s', assertion_data)
|
||||||
assertion = dict((n, v.split(';')) for n, v in assertion_data.items()
|
assertion = {n: v.split(';') for n, v in assertion_data.items()
|
||||||
if isinstance(v, six.string_types))
|
if isinstance(v, six.string_types)}
|
||||||
LOG.debug('assertion: %s', assertion)
|
LOG.debug('assertion: %s', assertion)
|
||||||
identity_values = []
|
identity_values = []
|
||||||
|
|
||||||
|
|
|
@ -140,7 +140,7 @@ def get_oauth_headers(headers):
|
||||||
|
|
||||||
def extract_non_oauth_params(query_string):
|
def extract_non_oauth_params(query_string):
|
||||||
params = oauthlib.common.extract_params(query_string)
|
params = oauthlib.common.extract_params(query_string)
|
||||||
return dict([(k, v) for k, v in params if not k.startswith('oauth_')])
|
return {k: v for k, v in params if not k.startswith('oauth_')}
|
||||||
|
|
||||||
|
|
||||||
@dependency.provider('oauth_api')
|
@dependency.provider('oauth_api')
|
||||||
|
|
|
@ -91,8 +91,8 @@ class RevokeEvent(object):
|
||||||
'audit_id',
|
'audit_id',
|
||||||
'audit_chain_id',
|
'audit_chain_id',
|
||||||
]
|
]
|
||||||
event = dict((key, self.__dict__[key]) for key in keys
|
event = {key: self.__dict__[key] for key in keys
|
||||||
if self.__dict__[key] is not None)
|
if self.__dict__[key] is not None}
|
||||||
if self.trust_id is not None:
|
if self.trust_id is not None:
|
||||||
event['OS-TRUST:trust_id'] = self.trust_id
|
event['OS-TRUST:trust_id'] = self.trust_id
|
||||||
if self.consumer_id is not None:
|
if self.consumer_id is not None:
|
||||||
|
|
|
@ -62,8 +62,8 @@ class Error(Exception):
|
||||||
message = self.message_format % kwargs
|
message = self.message_format % kwargs
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
try:
|
try:
|
||||||
kwargs = dict([(k, encodeutils.safe_decode(v)) for k, v in
|
kwargs = {k: encodeutils.safe_decode(v)
|
||||||
six.iteritems(kwargs)])
|
for k, v in six.iteritems(kwargs)}
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
# NOTE(jamielennox): This is the complete failure case
|
# NOTE(jamielennox): This is the complete failure case
|
||||||
# at least by showing the template we have some idea
|
# at least by showing the template we have some idea
|
||||||
|
|
|
@ -414,9 +414,33 @@ def check_oslo_namespace_imports(logical_line, blank_before, filename):
|
||||||
yield(0, msg)
|
yield(0, msg)
|
||||||
|
|
||||||
|
|
||||||
|
def dict_constructor_with_sequence_copy(logical_line):
|
||||||
|
"""Should use a dict comprehension instead of a dict constructor.
|
||||||
|
|
||||||
|
PEP-0274 introduced dict comprehension with performance enhancement
|
||||||
|
and it also makes code more readable.
|
||||||
|
|
||||||
|
Okay: lower_res = {k.lower(): v for k, v in six.iteritems(res[1])}
|
||||||
|
Okay: fool = dict(a='a', b='b')
|
||||||
|
K008: lower_res = dict((k.lower(), v) for k, v in six.iteritems(res[1]))
|
||||||
|
K008: attrs = dict([(k, _from_json(v))
|
||||||
|
K008: dict([[i,i] for i in range(3)])
|
||||||
|
|
||||||
|
"""
|
||||||
|
MESSAGE = ("K008 Must use a dict comprehension instead of a dict"
|
||||||
|
" constructor with a sequence of key-value pairs.")
|
||||||
|
|
||||||
|
dict_constructor_with_sequence_re = (
|
||||||
|
re.compile(r".*\bdict\((\[)?(\(|\[)(?!\{)"))
|
||||||
|
|
||||||
|
if dict_constructor_with_sequence_re.match(logical_line):
|
||||||
|
yield (0, MESSAGE)
|
||||||
|
|
||||||
|
|
||||||
def factory(register):
|
def factory(register):
|
||||||
register(CheckForMutableDefaultArgs)
|
register(CheckForMutableDefaultArgs)
|
||||||
register(block_comments_begin_with_a_space)
|
register(block_comments_begin_with_a_space)
|
||||||
register(CheckForAssertingNoneEquality)
|
register(CheckForAssertingNoneEquality)
|
||||||
register(CheckForLoggingIssues)
|
register(CheckForLoggingIssues)
|
||||||
register(check_oslo_namespace_imports)
|
register(check_oslo_namespace_imports)
|
||||||
|
register(dict_constructor_with_sequence_copy)
|
||||||
|
|
|
@ -58,13 +58,13 @@ class RoleTests(object):
|
||||||
role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
|
role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
|
||||||
self.role_api.create_role(role['id'], role)
|
self.role_api.create_role(role['id'], role)
|
||||||
role_ref = self.role_api.get_role(role['id'])
|
role_ref = self.role_api.get_role(role['id'])
|
||||||
role_ref_dict = dict((x, role_ref[x]) for x in role_ref)
|
role_ref_dict = {x: role_ref[x] for x in role_ref}
|
||||||
self.assertDictEqual(role_ref_dict, role)
|
self.assertDictEqual(role_ref_dict, role)
|
||||||
|
|
||||||
role['name'] = uuid.uuid4().hex
|
role['name'] = uuid.uuid4().hex
|
||||||
updated_role_ref = self.role_api.update_role(role['id'], role)
|
updated_role_ref = self.role_api.update_role(role['id'], role)
|
||||||
role_ref = self.role_api.get_role(role['id'])
|
role_ref = self.role_api.get_role(role['id'])
|
||||||
role_ref_dict = dict((x, role_ref[x]) for x in role_ref)
|
role_ref_dict = {x: role_ref[x] for x in role_ref}
|
||||||
self.assertDictEqual(role_ref_dict, role)
|
self.assertDictEqual(role_ref_dict, role)
|
||||||
self.assertDictEqual(role_ref_dict, updated_role_ref)
|
self.assertDictEqual(role_ref_dict, updated_role_ref)
|
||||||
|
|
||||||
|
|
|
@ -348,8 +348,7 @@ class FakeLdap(core.LDAPHandler):
|
||||||
core.utf8_decode(dn))
|
core.utf8_decode(dn))
|
||||||
raise ldap.ALREADY_EXISTS(dn)
|
raise ldap.ALREADY_EXISTS(dn)
|
||||||
|
|
||||||
self.db[key] = dict([(k, _internal_attr(k, v))
|
self.db[key] = {k: _internal_attr(k, v) for k, v in modlist}
|
||||||
for k, v in modlist])
|
|
||||||
self.db.sync()
|
self.db.sync()
|
||||||
|
|
||||||
def delete_s(self, dn):
|
def delete_s(self, dn):
|
||||||
|
@ -512,8 +511,8 @@ class FakeLdap(core.LDAPHandler):
|
||||||
match_attrs[id_attr] = [id_val]
|
match_attrs[id_attr] = [id_val]
|
||||||
if not filterstr or _match_query(filterstr, match_attrs):
|
if not filterstr or _match_query(filterstr, match_attrs):
|
||||||
# filter the attributes by attrlist
|
# filter the attributes by attrlist
|
||||||
attrs = dict([(k, v) for k, v in six.iteritems(attrs)
|
attrs = {k: v for k, v in six.iteritems(attrs)
|
||||||
if not attrlist or k in attrlist])
|
if not attrlist or k in attrlist}
|
||||||
objects.append((dn, attrs))
|
objects.append((dn, attrs))
|
||||||
|
|
||||||
return objects
|
return objects
|
||||||
|
|
|
@ -257,6 +257,21 @@ class HackingCode(fixtures.Fixture):
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
dict_constructor = {
|
||||||
|
'code': """
|
||||||
|
lower_res = {k.lower(): v for k, v in six.iteritems(res[1])}
|
||||||
|
fool = dict(a='a', b='b')
|
||||||
|
lower_res = dict((k.lower(), v) for k, v in six.iteritems(res[1]))
|
||||||
|
attrs = dict([(k, _from_json(v))])
|
||||||
|
dict([[i,i] for i in range(3)])
|
||||||
|
dict(({1:2}))
|
||||||
|
""",
|
||||||
|
'expected_errors': [
|
||||||
|
(3, 0, 'K008'),
|
||||||
|
(4, 0, 'K008'),
|
||||||
|
(5, 0, 'K008'),
|
||||||
|
]}
|
||||||
|
|
||||||
|
|
||||||
class HackingLogging(fixtures.Fixture):
|
class HackingLogging(fixtures.Fixture):
|
||||||
|
|
||||||
|
|
|
@ -78,8 +78,7 @@ class RestfulTestCase(tests.TestCase):
|
||||||
def request(self, app, path, body=None, headers=None, token=None,
|
def request(self, app, path, body=None, headers=None, token=None,
|
||||||
expected_status=None, **kwargs):
|
expected_status=None, **kwargs):
|
||||||
if headers:
|
if headers:
|
||||||
headers = dict([(str(k), str(v)) for k, v
|
headers = {str(k): str(v) for k, v in six.iteritems(headers)}
|
||||||
in six.iteritems(headers)])
|
|
||||||
else:
|
else:
|
||||||
headers = {}
|
headers = {}
|
||||||
|
|
||||||
|
|
|
@ -63,15 +63,14 @@ class TestAuthPlugin(tests.SQLDriverOverrides, tests.TestCase):
|
||||||
|
|
||||||
def config_overrides(self):
|
def config_overrides(self):
|
||||||
super(TestAuthPlugin, self).config_overrides()
|
super(TestAuthPlugin, self).config_overrides()
|
||||||
method_opts = dict(
|
method_opts = {
|
||||||
[
|
'external': 'keystone.auth.plugins.external.DefaultDomain',
|
||||||
('external', 'keystone.auth.plugins.external.DefaultDomain'),
|
'password': 'keystone.auth.plugins.password.Password',
|
||||||
('password', 'keystone.auth.plugins.password.Password'),
|
'token': 'keystone.auth.plugins.token.Token',
|
||||||
('token', 'keystone.auth.plugins.token.Token'),
|
METHOD_NAME:
|
||||||
(METHOD_NAME,
|
'keystone.tests.unit.test_auth_plugin.SimpleChallengeResponse',
|
||||||
'keystone.tests.unit.test_auth_plugin.'
|
}
|
||||||
'SimpleChallengeResponse'),
|
|
||||||
])
|
|
||||||
self.auth_plugin_config_override(
|
self.auth_plugin_config_override(
|
||||||
methods=['external', 'password', 'token', METHOD_NAME],
|
methods=['external', 'password', 'token', METHOD_NAME],
|
||||||
**method_opts)
|
**method_opts)
|
||||||
|
|
|
@ -2962,14 +2962,14 @@ class IdentityTests(object):
|
||||||
user = self.identity_api.create_user(user_dict)
|
user = self.identity_api.create_user(user_dict)
|
||||||
user_ref = self.identity_api.get_user(user['id'])
|
user_ref = self.identity_api.get_user(user['id'])
|
||||||
del user_dict['password']
|
del user_dict['password']
|
||||||
user_ref_dict = dict((x, user_ref[x]) for x in user_ref)
|
user_ref_dict = {x: user_ref[x] for x in user_ref}
|
||||||
self.assertDictContainsSubset(user_dict, user_ref_dict)
|
self.assertDictContainsSubset(user_dict, user_ref_dict)
|
||||||
|
|
||||||
user_dict['password'] = uuid.uuid4().hex
|
user_dict['password'] = uuid.uuid4().hex
|
||||||
self.identity_api.update_user(user['id'], user_dict)
|
self.identity_api.update_user(user['id'], user_dict)
|
||||||
user_ref = self.identity_api.get_user(user['id'])
|
user_ref = self.identity_api.get_user(user['id'])
|
||||||
del user_dict['password']
|
del user_dict['password']
|
||||||
user_ref_dict = dict((x, user_ref[x]) for x in user_ref)
|
user_ref_dict = {x: user_ref[x] for x in user_ref}
|
||||||
self.assertDictContainsSubset(user_dict, user_ref_dict)
|
self.assertDictContainsSubset(user_dict, user_ref_dict)
|
||||||
|
|
||||||
self.identity_api.delete_user(user['id'])
|
self.identity_api.delete_user(user['id'])
|
||||||
|
|
|
@ -2062,14 +2062,14 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity):
|
||||||
user_dict['enabled'] = True
|
user_dict['enabled'] = True
|
||||||
user_ref = self.identity_api.get_user(user['id'])
|
user_ref = self.identity_api.get_user(user['id'])
|
||||||
del user_dict['password']
|
del user_dict['password']
|
||||||
user_ref_dict = dict((x, user_ref[x]) for x in user_ref)
|
user_ref_dict = {x: user_ref[x] for x in user_ref}
|
||||||
self.assertDictContainsSubset(user_dict, user_ref_dict)
|
self.assertDictContainsSubset(user_dict, user_ref_dict)
|
||||||
|
|
||||||
user_dict['password'] = uuid.uuid4().hex
|
user_dict['password'] = uuid.uuid4().hex
|
||||||
self.identity_api.update_user(user['id'], user)
|
self.identity_api.update_user(user['id'], user)
|
||||||
user_ref = self.identity_api.get_user(user['id'])
|
user_ref = self.identity_api.get_user(user['id'])
|
||||||
del user_dict['password']
|
del user_dict['password']
|
||||||
user_ref_dict = dict((x, user_ref[x]) for x in user_ref)
|
user_ref_dict = {x: user_ref[x] for x in user_ref}
|
||||||
self.assertDictContainsSubset(user_dict, user_ref_dict)
|
self.assertDictContainsSubset(user_dict, user_ref_dict)
|
||||||
|
|
||||||
self.identity_api.delete_user(user['id'])
|
self.identity_api.delete_user(user['id'])
|
||||||
|
|
|
@ -198,7 +198,7 @@ class MockCollection(object):
|
||||||
existing_doc = self._documents[self._insert(document)]
|
existing_doc = self._documents[self._insert(document)]
|
||||||
|
|
||||||
def _internalize_dict(self, d):
|
def _internalize_dict(self, d):
|
||||||
return dict((k, copy.deepcopy(v)) for k, v in six.iteritems(d))
|
return {k: copy.deepcopy(v) for k, v in six.iteritems(d)}
|
||||||
|
|
||||||
def remove(self, spec_or_id=None, search_filter=None):
|
def remove(self, spec_or_id=None, search_filter=None):
|
||||||
"""Remove objects matching spec_or_id from the collection."""
|
"""Remove objects matching spec_or_id from the collection."""
|
||||||
|
|
|
@ -130,3 +130,14 @@ class TestCheckOsloNamespaceImports(BaseStyleCheck):
|
||||||
code = self.code_ex.oslo_namespace_imports['code']
|
code = self.code_ex.oslo_namespace_imports['code']
|
||||||
errors = self.code_ex.oslo_namespace_imports['expected_errors']
|
errors = self.code_ex.oslo_namespace_imports['expected_errors']
|
||||||
self.assert_has_errors(code, expected_errors=errors)
|
self.assert_has_errors(code, expected_errors=errors)
|
||||||
|
|
||||||
|
|
||||||
|
class TestDictConstructorWithSequenceCopy(BaseStyleCheck):
|
||||||
|
|
||||||
|
def get_checker(self):
|
||||||
|
return checks.dict_constructor_with_sequence_copy
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
code = self.code_ex.dict_constructor['code']
|
||||||
|
errors = self.code_ex.dict_constructor['expected_errors']
|
||||||
|
self.assert_has_errors(code, expected_errors=errors)
|
||||||
|
|
|
@ -479,8 +479,8 @@ class KVSTest(tests.TestCase):
|
||||||
expected_foo_keys = [self.key_foo]
|
expected_foo_keys = [self.key_foo]
|
||||||
expected_bar_keys = [self.key_bar]
|
expected_bar_keys = [self.key_bar]
|
||||||
|
|
||||||
mapping_foo = dict([(self.key_foo, self.value_foo)])
|
mapping_foo = {self.key_foo: self.value_foo}
|
||||||
mapping_bar = dict([(self.key_bar, self.value_bar)])
|
mapping_bar = {self.key_bar: self.value_bar}
|
||||||
|
|
||||||
kvs.configure(backing_store='openstack.kvs.Memcached',
|
kvs.configure(backing_store='openstack.kvs.Memcached',
|
||||||
memcached_backend='TestDriver',
|
memcached_backend='TestDriver',
|
||||||
|
|
|
@ -94,8 +94,7 @@ class PolicyTestCase(tests.TestCase):
|
||||||
|
|
||||||
def _set_rules(self):
|
def _set_rules(self):
|
||||||
these_rules = common_policy.Rules(
|
these_rules = common_policy.Rules(
|
||||||
dict((k, common_policy.parse_rule(v))
|
{k: common_policy.parse_rule(v)for k, v in self.rules.items()})
|
||||||
for k, v in self.rules.items()))
|
|
||||||
rules._ENFORCER.set_rules(these_rules)
|
rules._ENFORCER.set_rules(these_rules)
|
||||||
|
|
||||||
def test_enforce_nonexistent_action_throws(self):
|
def test_enforce_nonexistent_action_throws(self):
|
||||||
|
@ -189,8 +188,8 @@ class DefaultPolicyTestCase(tests.TestCase):
|
||||||
|
|
||||||
def _set_rules(self, default_rule):
|
def _set_rules(self, default_rule):
|
||||||
these_rules = common_policy.Rules(
|
these_rules = common_policy.Rules(
|
||||||
dict((k, common_policy.parse_rule(v))
|
{k: common_policy.parse_rule(v) for k, v in self.rules.items()},
|
||||||
for k, v in self.rules.items()), default_rule)
|
default_rule)
|
||||||
rules._ENFORCER.set_rules(these_rules)
|
rules._ENFORCER.set_rules(these_rules)
|
||||||
|
|
||||||
def test_policy_called(self):
|
def test_policy_called(self):
|
||||||
|
|
|
@ -42,7 +42,7 @@ class V3ControllerTestCase(tests.TestCase):
|
||||||
|
|
||||||
def test_check_immutable_params_fail(self):
|
def test_check_immutable_params_fail(self):
|
||||||
"""Pass invalid parameter to the method and expect failure."""
|
"""Pass invalid parameter to the method and expect failure."""
|
||||||
ref = dict([(uuid.uuid4().hex, uuid.uuid4().hex) for _ in range(3)])
|
ref = {uuid.uuid4().hex: uuid.uuid4().hex for _ in range(3)}
|
||||||
|
|
||||||
ex = self.assertRaises(exception.ImmutableAttributeError,
|
ex = self.assertRaises(exception.ImmutableAttributeError,
|
||||||
self.api.check_immutable_params, ref)
|
self.api.check_immutable_params, ref)
|
||||||
|
|
|
@ -106,7 +106,7 @@ class TrustV3(controller.V3Controller):
|
||||||
|
|
||||||
def _normalize_role_list(self, trust, all_roles):
|
def _normalize_role_list(self, trust, all_roles):
|
||||||
trust_roles = []
|
trust_roles = []
|
||||||
all_role_names = dict((r['name'], r) for r in all_roles)
|
all_role_names = {r['name']: r for r in all_roles}
|
||||||
for role in trust.get('roles', []):
|
for role in trust.get('roles', []):
|
||||||
if 'id' in role:
|
if 'id' in role:
|
||||||
trust_roles.append({'id': role['id']})
|
trust_roles.append({'id': role['id']})
|
||||||
|
|
Loading…
Reference in New Issue