Adds hacking check for debug logging translations

bp more-code-style-automation

Change-Id: Id54f322f00b04a165bb4a7b1e24f95bb72b7f068
This commit is contained in:
David Stanek 2014-06-20 16:40:22 +00:00
parent 95bc069519
commit 76baf5b691
20 changed files with 270 additions and 79 deletions

View File

@ -140,8 +140,8 @@ class Tenant(controller.V2Controller):
user_ref = self.identity_api.get_user(user_id)
except exception.UserNotFound:
# Log that user is missing and continue on.
message = _("User %(user_id)s in project %(project_id)s "
"doesn't exist.")
message = ("User %(user_id)s in project %(project_id)s "
"doesn't exist.")
LOG.debug(message,
{'user_id': user_id, 'project_id': tenant_id})
else:

View File

@ -272,8 +272,7 @@ class Manager(manager.Manager):
project_id=tenant_id)
except exception.RoleNotFound:
LOG.debug(_("Removing role %s failed because it does not "
"exist."),
LOG.debug("Removing role %s failed because it does not exist.",
role_id)
# TODO(henry-nash): We might want to consider list limiting this at some
@ -387,9 +386,9 @@ class Manager(manager.Manager):
try:
self.delete_project(project['id'])
except exception.ProjectNotFound:
LOG.debug(_('Project %(projectid)s not found when '
'deleting domain contents for %(domainid)s, '
'continuing with cleanup.'),
LOG.debug(('Project %(projectid)s not found when '
'deleting domain contents for %(domainid)s, '
'continuing with cleanup.'),
{'projectid': project['id'],
'domainid': domain_id})
@ -400,9 +399,9 @@ class Manager(manager.Manager):
self.identity_api.delete_group(group['id'],
domain_scope=domain_id)
except exception.GroupNotFound:
LOG.debug(_('Group %(groupid)s not found when deleting '
'domain contents for %(domainid)s, continuing '
'with cleanup.'),
LOG.debug(('Group %(groupid)s not found when deleting '
'domain contents for %(domainid)s, continuing '
'with cleanup.'),
{'groupid': group['id'], 'domainid': domain_id})
# And finally, delete the users themselves
@ -412,9 +411,9 @@ class Manager(manager.Manager):
self.identity_api.delete_user(user['id'],
domain_scope=domain_id)
except exception.UserNotFound:
LOG.debug(_('User %(userid)s not found when '
'deleting domain contents for %(domainid)s, '
'continuing with cleanup.'),
LOG.debug(('User %(userid)s not found when '
'deleting domain contents for %(domainid)s, '
'continuing with cleanup.'),
{'userid': user['id'],
'domainid': domain_id})
@ -524,7 +523,7 @@ class Manager(manager.Manager):
user_id=user['id'], role_id=role_id,
domain_id=domain_id, project_id=project_id)
except exception.GroupNotFound:
LOG.debug(_('Group %s not found, no tokens to invalidate.'),
LOG.debug('Group %s not found, no tokens to invalidate.',
group_id)
self.driver.delete_grant(role_id, user_id, group_id, domain_id,
@ -567,8 +566,8 @@ class Manager(manager.Manager):
target = _('Domain (%s)') % assignment['domain_id']
else:
target = _('Unknown Target')
msg = _('Group (%(group)s), referenced in assignment '
'for %(target)s, not found - ignoring.')
msg = ('Group (%(group)s), referenced in assignment '
'for %(target)s, not found - ignoring.')
LOG.debug(msg, {'group': assignment['group_id'],
'target': target})
continue

View File

@ -58,7 +58,7 @@ def v3_token_to_auth_context(token):
if 'project' in token_data:
creds['project_id'] = token_data['project']['id']
else:
LOG.debug(_('RBAC: Proceeding without project'))
LOG.debug('RBAC: Proceeding without project')
if 'domain' in token_data:
creds['domain_id'] = token_data['domain']['id']
if 'roles' in token_data:
@ -102,7 +102,7 @@ def v2_token_to_auth_context(token):
if 'tenant' in token_data['token']:
creds['project_id'] = token_data['token']['tenant']['id']
else:
LOG.debug(_('RBAC: Proceeding without tenant'))
LOG.debug('RBAC: Proceeding without tenant')
if 'roles' in token_data['user']:
creds['roles'] = [role['name'] for
role in token_data['user']['roles']]

View File

@ -51,31 +51,31 @@ class DebugProxy(proxy.ProxyBackend):
def get(self, key):
value = self.proxied.get(key)
LOG.debug(_('CACHE_GET: Key: "%(key)r" Value: "%(value)r"'),
LOG.debug('CACHE_GET: Key: "%(key)r" Value: "%(value)r"',
{'key': key, 'value': value})
return value
def get_multi(self, keys):
values = self.proxied.get_multi(keys)
LOG.debug(_('CACHE_GET_MULTI: "%(keys)r" Values: "%(values)r"'),
LOG.debug('CACHE_GET_MULTI: "%(keys)r" Values: "%(values)r"',
{'keys': keys, 'values': values})
return values
def set(self, key, value):
LOG.debug(_('CACHE_SET: Key: "%(key)r" Value: "%(value)r"'),
LOG.debug('CACHE_SET: Key: "%(key)r" Value: "%(value)r"',
{'key': key, 'value': value})
return self.proxied.set(key, value)
def set_multi(self, keys):
LOG.debug(_('CACHE_SET_MULTI: "%r"'), keys)
LOG.debug('CACHE_SET_MULTI: "%r"', keys)
self.proxied.set_multi(keys)
def delete(self, key):
self.proxied.delete(key)
LOG.debug(_('CACHE_DELETE: "%r"'), key)
LOG.debug('CACHE_DELETE: "%r"', key)
def delete_multi(self, keys):
LOG.debug(_('CACHE_DELETE_MULTI: "%r"'), keys)
LOG.debug('CACHE_DELETE_MULTI: "%r"', keys)
self.proxied.delete_multi(keys)
@ -101,7 +101,7 @@ def build_cache_config():
arg_key = '.'.join([prefix, 'arguments', argname])
conf_dict[arg_key] = argvalue
LOG.debug(_('Keystone Cache Config: %s'), conf_dict)
LOG.debug('Keystone Cache Config: %s', conf_dict)
return conf_dict
@ -149,7 +149,7 @@ def configure_cache_region(region):
# ProxyBackends work, see the dogpile.cache documents on
# "changing-backend-behavior"
cls = importutils.import_class(class_path)
LOG.debug(_("Adding cache-proxy '%s' to backend."), class_path)
LOG.debug("Adding cache-proxy '%s' to backend.", class_path)
region.wrap(cls)
return region

View File

@ -49,20 +49,19 @@ def v2_deprecated(f):
def _build_policy_check_credentials(self, action, context, kwargs):
LOG.debug(_('RBAC: Authorizing %(action)s(%(kwargs)s)'), {
LOG.debug('RBAC: Authorizing %(action)s(%(kwargs)s)', {
'action': action,
'kwargs': ', '.join(['%s=%s' % (k, kwargs[k]) for k in kwargs])})
# see if auth context has already been created. If so use it.
if ('environment' in context and
authorization.AUTH_CONTEXT_ENV in context['environment']):
LOG.debug(_('RBAC: using auth context from the request environment'))
LOG.debug('RBAC: using auth context from the request environment')
return context['environment'].get(authorization.AUTH_CONTEXT_ENV)
# now build the auth context from the incoming auth token
try:
LOG.debug(_('RBAC: building auth context from the incoming '
'auth token'))
LOG.debug('RBAC: building auth context from the incoming auth token')
# TODO(ayoung): These two functions return the token in different
# formats. However, the call
# to get_token hits the caching layer, and does not validate the
@ -148,7 +147,7 @@ def protected(callback=None):
self.policy_api.enforce(creds,
action,
utils.flatten_dict(policy_dict))
LOG.debug(_('RBAC: Authorization granted'))
LOG.debug('RBAC: Authorization granted')
return f(self, context, *args, **kwargs)
return inner
return wrapper
@ -178,7 +177,7 @@ def filterprotected(*filters):
if item in context['query_string']:
target[item] = context['query_string'][item]
LOG.debug(_('RBAC: Adding query filter params (%s)'), (
LOG.debug('RBAC: Adding query filter params (%s)', (
', '.join(['%s=%s' % (item, target[item])
for item in target])))
@ -190,7 +189,7 @@ def filterprotected(*filters):
action,
utils.flatten_dict(target))
LOG.debug(_('RBAC: Authorization granted'))
LOG.debug('RBAC: Authorization granted')
else:
LOG.warning(_('RBAC: Bypassing authorization'))
return f(self, context, filters, **kwargs)
@ -612,7 +611,7 @@ class V3Controller(wsgi.Application):
self.policy_api.enforce(creds,
action,
utils.flatten_dict(policy_dict))
LOG.debug(_('RBAC: Authorization granted'))
LOG.debug('RBAC: Authorization granted')
@classmethod
def check_immutable_params(cls, ref):

View File

@ -16,7 +16,6 @@ import functools
import os
from keystone.common import config
from keystone.openstack.common.gettextutils import _
from keystone.openstack.common import log
CONF = config.CONF
@ -49,7 +48,7 @@ def configure_once(name):
raise SystemError("Environment has already been "
"configured as %s" % _configured)
LOG.debug(_("Environment configured as: %s"), name)
LOG.debug("Environment configured as: %s", name)
_configured = name
return func(*args, **kwargs)

View File

@ -60,8 +60,8 @@ def _register_backends():
for backend in CONF.kvs.backends:
module, cls = backend.rsplit('.', 1)
backend_name = prefix % cls
LOG.debug(_('Registering Dogpile Backend %(backend_path)s as '
'%(backend_name)s'),
LOG.debug(('Registering Dogpile Backend %(backend_path)s as '
'%(backend_name)s'),
{'backend_path': backend, 'backend_name': backend_name})
region.register_backend(backend_name, module, cls)
BACKENDS_REGISTERED = True
@ -119,7 +119,7 @@ class KeyValueStore(object):
for item in proxy_list:
if isinstance(item, str):
LOG.debug(_('Importing class %s as KVS proxy.'), item)
LOG.debug('Importing class %s as KVS proxy.', item)
pxy = importutils.import_class(item)
else:
pxy = item
@ -155,9 +155,9 @@ class KeyValueStore(object):
# Support of the feature is implied by the existence of the
# 'raw_no_expiry_keys' attribute.
if not hasattr(self._region.backend, 'raw_no_expiry_keys'):
LOG.debug(_('Non-expiring keys not supported/required by '
'%(region)s backend; unable to set '
'key_mangler for backend: %(err)s'),
LOG.debug(('Non-expiring keys not supported/required by '
'%(region)s backend; unable to set '
'key_mangler for backend: %(err)s'),
{'region': self._region.name, 'err': e})
else:
raise
@ -232,7 +232,7 @@ class KeyValueStore(object):
arg_key = '.'.join([prefix, 'arguments', argument])
conf_dict[arg_key] = value
LOG.debug(_('KVS region configuration for %(name)s: %(config)r'),
LOG.debug('KVS region configuration for %(name)s: %(config)r',
{'name': self._region.name, 'config': conf_dict})
self._region.configure_from_config(conf_dict, '%s.' % prefix)
@ -373,7 +373,7 @@ class KeyValueStoreLock(object):
def acquire(self):
if self.enabled:
self.mutex.acquire()
LOG.debug(_('KVS lock acquired for: %s'), self.key)
LOG.debug('KVS lock acquired for: %s', self.key)
self.active = True
self.acquire_time = time.time()
return self
@ -392,7 +392,7 @@ class KeyValueStoreLock(object):
if self.enabled:
self.mutex.release()
if not self.expired:
LOG.debug(_('KVS lock released for: %s'), self.key)
LOG.debug('KVS lock released for: %s', self.key)
else:
LOG.warning(_('KVS lock released (timeout reached) for: %s'),
self.key)

View File

@ -150,8 +150,8 @@ def convert_ldap_result(ldap_result):
dict((kind, [ldap2py(x) for x in values])
for kind, values in six.iteritems(attrs))))
if at_least_one_referral:
LOG.debug(_('Referrals were returned and ignored. Enable referral '
'chasing in keystone.conf via [ldap] chase_referrals'))
LOG.debug(('Referrals were returned and ignored. Enable referral '
'chasing in keystone.conf via [ldap] chase_referrals'))
return py_result
@ -518,7 +518,7 @@ class PythonLDAPHandler(LDAPHandler):
if tls_req_cert in LDAP_TLS_CERTS.values():
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_req_cert)
else:
LOG.debug(_("LDAP TLS: invalid TLS_REQUIRE_CERT Option=%s"),
LOG.debug("LDAP TLS: invalid TLS_REQUIRE_CERT Option=%s",
tls_req_cert)
self.conn = ldap.initialize(url)

View File

@ -85,8 +85,8 @@ def validate_token_bind(context, token_ref):
LOG.info(_("Kerberos bind authentication successful"))
elif bind_mode == 'permissive':
LOG.debug(_("Ignoring unknown bind for permissive mode: "
"{%(bind_type)s: %(identifier)s}"),
LOG.debug(("Ignoring unknown bind for permissive mode: "
"{%(bind_type)s: %(identifier)s}"),
{'bind_type': bind_type, 'identifier': identifier})
else:
LOG.info(_("Couldn't verify unknown bind: "
@ -178,7 +178,7 @@ class Application(BaseApplication):
arg_dict = req.environ['wsgiorg.routing_args'][1]
action = arg_dict.pop('action')
del arg_dict['controller']
LOG.debug(_('arg_dict: %s'), arg_dict)
LOG.debug('arg_dict: %s', arg_dict)
# allow middleware up the stack to provide context, params and headers.
context = req.environ.get(CONTEXT_ENV, {})
@ -316,8 +316,8 @@ class Application(BaseApplication):
"""
if ('token_id' not in context or
context.get('token_id') == CONF.admin_token):
LOG.debug(_('will not lookup trust as the request auth token is '
'either absent or it is the system admin token'))
LOG.debug(('will not lookup trust as the request auth token is '
'either absent or it is the system admin token'))
return None
try:

View File

@ -22,7 +22,6 @@ from keystone.common import dependency
from keystone.common import manager
from keystone import config
from keystone import exception
from keystone.openstack.common.gettextutils import _
from keystone.openstack.common import log
@ -117,5 +116,5 @@ class Driver(object):
try:
self.credential_api.delete_credential(cr['id'])
except exception.CredentialNotFound:
LOG.debug(_('Deletion of credential is not required: %s'),
LOG.debug('Deletion of credential is not required: %s',
cr['id'])

View File

@ -25,6 +25,8 @@ please see pep8.py.
import ast
import six
class BaseASTChecker(ast.NodeVisitor):
"""Provides a simple framework for writing AST-based checks.
@ -135,7 +137,141 @@ class CheckForAssertingNoneEquality(BaseASTChecker):
super(CheckForAssertingNoneEquality, self).generic_visit(node)
class CheckForTranslationsInDebugLogging(BaseASTChecker):
CHECK_DESC = 'K005 Using translated string in debug logging'
LOG_MODULES = ('logging', 'keystone.openstack.common.log')
I18N_MODULES = ('keystone.openstack.common.gettextutils._')
def __init__(self, tree, filename):
super(CheckForTranslationsInDebugLogging, self).__init__(
tree, filename)
self.logger_names = []
self.logger_module_names = []
self.i18n_names = []
# NOTE(dstanek): this kinda accounts for scopes when talking
# about only leaf node in the graph
self.assignments = []
def _filter_imports(self, module_name, alias):
"""Keeps lists of logging and i18n imports
"""
if module_name in self.LOG_MODULES:
self.logger_module_names.append(alias.asname or alias.name)
elif module_name in self.I18N_MODULES:
self.i18n_names.append(alias.asname or alias.name)
def visit_Import(self, node):
for alias in node.names:
self._filter_imports(alias.name, alias)
super(CheckForTranslationsInDebugLogging, self).generic_visit(node)
def visit_ImportFrom(self, node):
for alias in node.names:
full_name = '%s.%s' % (node.module, alias.name)
self._filter_imports(full_name, alias)
super(CheckForTranslationsInDebugLogging, self).generic_visit(node)
def _find_name(self, node):
"""Return the fully qualified name or a Name or Attribute."""
if isinstance(node, ast.Name):
return node.id
elif (isinstance(node, ast.Attribute)
and isinstance(node.value, (ast.Name, ast.Attribute))):
method_name = node.attr
obj_name = self._find_name(node.value)
if obj_name is None:
return None
return obj_name + '.' + method_name
elif isinstance(node, six.string_types):
return node
else: # could be Subscript, Call or many more
return None
def visit_Assign(self, node):
"""Look for 'LOG = logging.getLogger'
This only handles the simple case:
name = [logging_module].getLogger(...)
- or -
name = [i18n_name](...)
"""
attr_node_types = (ast.Name, ast.Attribute)
if (len(node.targets) != 1
or not isinstance(node.targets[0], attr_node_types)):
# say no to: "x, y = ..."
return
target_name = self._find_name(node.targets[0])
if not isinstance(node.value, ast.Call):
# node.value must be a call to getLogger
return
# is this a call to an i18n function?
if (isinstance(node.value.func, ast.Name)
and node.value.func.id in self.i18n_names):
self.assignments.append(target_name)
return
if (not isinstance(node.value.func, ast.Attribute)
or not isinstance(node.value.func.value, attr_node_types)):
# function must be an attribute on an object like
# logging.getLogger
return
object_name = self._find_name(node.value.func.value)
func_name = node.value.func.attr
if (object_name in self.logger_module_names
and func_name == 'getLogger'):
self.logger_names.append(target_name)
def visit_Call(self, node):
"""Look for the 'LOG.debug' calls.
"""
# obj.method
if isinstance(node.func, ast.Attribute):
obj_name = self._find_name(node.func.value)
if isinstance(node.func.value, ast.Name):
method_name = node.func.attr
elif isinstance(node.func.value, ast.Attribute):
obj_name = self._find_name(node.func.value)
method_name = node.func.attr
else: # could be Subscript, Call or many more
return
# must be a logger instance and the debug method
if obj_name not in self.logger_names or method_name != 'debug':
return
# the call must have arguments
if not len(node.args):
return
# if first arg is a call to a i18n name
if (isinstance(node.args[0], ast.Call)
and isinstance(node.args[0].func, ast.Name)
and node.args[0].func.id in self.i18n_names):
self.add_error(node.args[0])
# if the first arg is a reference to a i18n call
elif (isinstance(node.args[0], ast.Name)
and node.args[0].id in self.assignments):
self.add_error(node.args[0])
def factory(register):
register(CheckForMutableDefaultArgs)
register(block_comments_begin_with_a_space)
register(CheckForAssertingNoneEquality)
register(CheckForTranslationsInDebugLogging)

View File

@ -160,9 +160,9 @@ class Identity(identity.Driver):
try:
users.append(self.user.get_filtered(user_id))
except exception.UserNotFound:
LOG.debug(_("Group member '%(user_dn)s' not found in"
" '%(group_id)s'. The user should be removed"
" from the group. The user will be ignored."),
LOG.debug(("Group member '%(user_dn)s' not found in"
" '%(group_id)s'. The user should be removed"
" from the group. The user will be ignored."),
dict(user_dn=user_dn, group_id=group_id))
return users

View File

@ -134,8 +134,8 @@ class DomainConfigs(dict):
fname[len(DOMAIN_CONF_FHEAD):
-len(DOMAIN_CONF_FTAIL)])
else:
LOG.debug(_('Ignoring file (%s) while scanning domain '
'config directory'),
LOG.debug(('Ignoring file (%s) while scanning domain '
'config directory'),
fname)
def get_domain_driver(self, domain_id):

View File

@ -273,8 +273,8 @@ class AuthContextMiddleware(wsgi.Middleware):
def process_request(self, request):
if AUTH_TOKEN_HEADER not in request.headers:
LOG.debug(_('Auth token not in the request header. '
'Will not build auth context.'))
LOG.debug(('Auth token not in the request header. '
'Will not build auth context.'))
return
if authorization.AUTH_CONTEXT_ENV in request.environ:
@ -283,5 +283,5 @@ class AuthContextMiddleware(wsgi.Middleware):
return
auth_context = self._build_auth_context(request)
LOG.debug(_('RBAC: auth_context: %s'), auth_context)
LOG.debug('RBAC: auth_context: %s', auth_context)
request.environ[authorization.AUTH_CONTEXT_ENV] = auth_context

View File

@ -153,9 +153,9 @@ def notify_event_callbacks(service, resource_type, operation, payload):
'resource_type': resource_type,
'operation': operation,
'payload': payload}
LOG.debug(_('Invoking callback %(cb_name)s for event '
'%(service)s %(resource_type)s %(operation)s for'
'%(payload)s'), subst_dict)
LOG.debug(('Invoking callback %(cb_name)s for event '
'%(service)s %(resource_type)s %(operation)s for'
'%(payload)s'), subst_dict)
cb(service, resource_type, operation, payload)
@ -290,7 +290,7 @@ def _send_audit_notification(action, initiator, outcome):
context = {}
payload = event.as_dict()
LOG.debug(_('CADF Event: %s'), payload)
LOG.debug('CADF Event: %s', payload)
service = 'identity'
event_type = '%(service)s.%(action)s' % {'service': service,
'action': action}

View File

@ -20,7 +20,6 @@ import os.path
from keystone.common import utils
from keystone import config
from keystone import exception
from keystone.openstack.common.gettextutils import _
from keystone.openstack.common import log
from keystone.openstack.common import policy as common_policy
from keystone import policy
@ -96,7 +95,7 @@ def enforce(credentials, action, target, do_raise=True):
class Policy(policy.Driver):
def enforce(self, credentials, action, target):
LOG.debug(_('enforce %(action)s: %(credentials)s'), {
LOG.debug('enforce %(action)s: %(credentials)s', {
'action': action,
'credentials': credentials})
enforce(credentials, action, target)

View File

@ -31,7 +31,6 @@ from six import moves
from keystone.common.ldap import core
from keystone import exception
from keystone.openstack.common.gettextutils import _
from keystone.openstack.common import log
@ -322,7 +321,7 @@ class FakeLdap(core.LDAPHandler):
del self.db[c]
key = self.key(dn)
LOG.debug(_('FakeLdap delete item: dn=%s'), dn)
LOG.debug('FakeLdap delete item: dn=%s', dn)
del self.db[key]
except KeyError:
LOG.debug('delete item failed: dn=%s not found.',

View File

@ -90,3 +90,52 @@ class HackingCode(fixtures.Fixture):
(7, 8, 'K004'),
(8, 8, 'K004'),
]}
assert_no_translations_for_debug_logging = {
'code': """
import logging
import logging as stlib_logging
from keystone.openstack.common import log
from keystone.openstack.common import log as oslo_logging
from keystone.openstack.common.gettextutils import _
from keystone.openstack.common.gettextutils import _ as oslog_i18n
# stdlib logging
L0 = logging.getLogger()
L0.debug(_('text'))
class C:
def __init__(self):
L0.debug(oslog_i18n('text', {}))
# stdlib logging w/ alias and specifying a logger
class C:
def __init__(self):
self.L1 = logging.getLogger(__name__)
def m(self):
self.L1.debug(
_('text'), {}
)
# oslo logging and specifying a logger
L2 = log.getLogger(__name__)
L2.debug(oslog_i18n('text'))
# oslo logging w/ alias
class C:
def __init__(self):
self.L3 = oslo_logging.getLogger()
self.L3.debug(_('text'))
# translation on a separate line
msg = _('text')
L2.debug(msg)
""",
'expected_errors': [
(10, 9, 'K005'),
(13, 17, 'K005'),
(21, 12, 'K005'),
(26, 9, 'K005'),
(32, 22, 'K005'),
(36, 9, 'K005'),
]
}

View File

@ -81,3 +81,15 @@ class TestAssertingNoneEquality(BaseStyleCheck):
code = self.code_ex.asserting_none_equality['code']
errors = self.code_ex.asserting_none_equality['expected_errors']
self.assert_has_errors(code, expected_errors=errors)
class TestCheckForTranslationsInDebugLogging(BaseStyleCheck):
def get_checker(self):
return checks.CheckForTranslationsInDebugLogging
def test(self):
fixture = self.code_ex.assert_no_translations_for_debug_logging
code = fixture['code']
errors = fixture['expected_errors']
self.assert_has_errors(code, expected_errors=errors)

View File

@ -167,8 +167,8 @@ class Token(token.Driver):
continue
if expires < current_time:
LOG.debug(_('Token `%(token_id)s` is expired, removing '
'from `%(user_key)s`.'),
LOG.debug(('Token `%(token_id)s` is expired, removing '
'from `%(user_key)s`.'),
{'token_id': item_id, 'user_key': user_key})
continue
@ -176,8 +176,8 @@ class Token(token.Driver):
# NOTE(morganfainberg): If the token has been revoked, it
# can safely be removed from this list. This helps to keep
# the user_token_list as reasonably small as possible.
LOG.debug(_('Token `%(token_id)s` is revoked, removing '
'from `%(user_key)s`.'),
LOG.debug(('Token `%(token_id)s` is revoked, removing '
'from `%(user_key)s`.'),
{'token_id': item_id, 'user_key': user_key})
continue
filtered_list.append(item)
@ -266,8 +266,8 @@ class Token(token.Driver):
try:
token_id, expires = item
except (TypeError, ValueError):
LOG.debug(_('Invalid token entry expected tuple of '
'`(<token_id>, <expires>)` got: `%(item)r`'),
LOG.debug(('Invalid token entry expected tuple of '
'`(<token_id>, <expires>)` got: `%(item)r`'),
dict(item=item))
raise
@ -275,8 +275,8 @@ class Token(token.Driver):
expires = timeutils.normalize_time(
timeutils.parse_isotime(expires))
except ValueError:
LOG.debug(_('Invalid expires time on token `%(token_id)s`:'
' %(expires)r'),
LOG.debug(('Invalid expires time on token `%(token_id)s`:'
' %(expires)r'),
dict(token_id=token_id, expires=expires))
raise
return token_id, expires