Merge "Bug 1075090 -- Fixing log messages in python source code to support internationalization."
This commit is contained in:
commit
7978bb271b
@ -106,7 +106,7 @@ class TemplatedCatalog(kvs.Catalog):
|
||||
try:
|
||||
self.templates = parse_templates(open(template_file))
|
||||
except IOError:
|
||||
LOG.critical('Unable to open template file %s' % template_file)
|
||||
LOG.critical(_('Unable to open template file %s') % template_file)
|
||||
raise
|
||||
|
||||
def get_catalog(self, user_id, tenant_id, metadata=None):
|
||||
|
@ -34,17 +34,19 @@ def format_url(url, data):
|
||||
except AttributeError:
|
||||
return None
|
||||
except KeyError as e:
|
||||
LOG.error("Malformed endpoint %s - unknown key %s" %
|
||||
(url, str(e)))
|
||||
LOG.error(_("Malformed endpoint %(url)s - unknown key %(keyerror)s") %
|
||||
{"url": url,
|
||||
"keyerror": str(e)})
|
||||
raise exception.MalformedEndpoint(endpoint=url)
|
||||
except TypeError as e:
|
||||
LOG.error("Malformed endpoint %s - type mismatch %s \
|
||||
(are you missing brackets ?)" %
|
||||
(url, str(e)))
|
||||
LOG.error(_("Malformed endpoint %(url)s - unknown key %(keyerror)s"
|
||||
"(are you missing brackets ?)") %
|
||||
{"url": url,
|
||||
"keyerror": str(e)})
|
||||
raise exception.MalformedEndpoint(endpoint=url)
|
||||
except ValueError as e:
|
||||
LOG.error("Malformed endpoint %s - incomplete format \
|
||||
(are you missing a type notifier ?)" % url)
|
||||
LOG.error(_("Malformed endpoint %s - incomplete format \
|
||||
(are you missing a type notifier ?)") % url)
|
||||
raise exception.MalformedEndpoint(endpoint=url)
|
||||
return result
|
||||
|
||||
|
@ -20,20 +20,21 @@ from keystone import exception
|
||||
def check_length(property_name, value, min_length=1, max_length=64):
|
||||
if len(value) < min_length:
|
||||
if min_length == 1:
|
||||
msg = "%s cannot be empty." % property_name
|
||||
msg = _("%s cannot be empty.") % property_name
|
||||
else:
|
||||
msg = ("%(property_name)s cannot be less than "
|
||||
"%(min_length)s characters.") % locals()
|
||||
msg = (_("%(property_name)s cannot be less than "
|
||||
"%(min_length)s characters.")) % locals()
|
||||
raise exception.ValidationError(msg)
|
||||
if len(value) > max_length:
|
||||
msg = ("%(property_name)s should not be greater than "
|
||||
"%(max_length)s characters.") % locals()
|
||||
msg = (_("%(property_name)s should not be greater than "
|
||||
"%(max_length)s characters.")) % locals()
|
||||
raise exception.ValidationError(msg)
|
||||
|
||||
|
||||
def check_type(property_name, value, expected_type, display_expected_type):
|
||||
if not isinstance(value, expected_type):
|
||||
msg = "%(property_name)s is not a %(display_expected_type)s" % locals()
|
||||
msg = _("%(property_name)s is not a"
|
||||
"%(display_expected_type)s") % locals()
|
||||
raise exception.ValidationError(msg)
|
||||
|
||||
|
||||
|
@ -99,8 +99,8 @@ class BufferedHTTPConnection(HTTPConnection):
|
||||
|
||||
def getresponse(self):
|
||||
response = HTTPConnection.getresponse(self)
|
||||
LOG.debug('HTTP PERF: %(time).5f seconds to %(method)s '
|
||||
'%(host)s:%(port)s %(path)s)',
|
||||
LOG.debug(_('HTTP PERF: %(time).5f seconds to %(method)s '
|
||||
'%(host)s:%(port)s %(path)s)'),
|
||||
{'time': time.time() - self._connected_time,
|
||||
'method': self._method,
|
||||
'host': self.host,
|
||||
|
@ -41,7 +41,7 @@ def cms_verify(formatted, signing_cert_file_name, ca_file_name):
|
||||
output, err = process.communicate(formatted)
|
||||
retcode = process.poll()
|
||||
if retcode:
|
||||
LOG.error('Verify error: %s' % err)
|
||||
LOG.error(_('Verify error: %s') % err)
|
||||
raise subprocess.CalledProcessError(retcode, "openssl", output=err)
|
||||
return output
|
||||
|
||||
@ -131,7 +131,7 @@ def cms_sign_text(text, signing_cert_file_name, signing_key_file_name):
|
||||
output, err = process.communicate(text)
|
||||
retcode = process.poll()
|
||||
if retcode or "Error" in err:
|
||||
LOG.error('Signing error: %s' % err)
|
||||
LOG.error(_('Signing error: %s') % err)
|
||||
raise subprocess.CalledProcessError(retcode, "openssl")
|
||||
return output
|
||||
|
||||
|
@ -168,8 +168,8 @@ class BaseLdap(object):
|
||||
pass
|
||||
else:
|
||||
raise exception.Conflict(type=self.options_name,
|
||||
details='Duplicate name, %s.' %
|
||||
values['name'])
|
||||
details=_('Duplicate name, %s.') %
|
||||
values['name'])
|
||||
|
||||
if values.get('id') is not None:
|
||||
try:
|
||||
@ -178,12 +178,13 @@ class BaseLdap(object):
|
||||
pass
|
||||
else:
|
||||
raise exception.Conflict(type=self.options_name,
|
||||
details='Duplicate ID, %s.' %
|
||||
values['id'])
|
||||
details=_('Duplicate ID, %s.') %
|
||||
values['id'])
|
||||
|
||||
def create(self, values):
|
||||
if not self.allow_create:
|
||||
msg = 'LDAP backend does not allow %s create' % self.options_name
|
||||
msg = _('LDAP backend does not allow %s create') \
|
||||
% self.options_name
|
||||
raise exception.ForbiddenAction(msg)
|
||||
|
||||
conn = self.get_connection()
|
||||
@ -289,7 +290,8 @@ class BaseLdap(object):
|
||||
|
||||
def update(self, id, values, old_obj=None):
|
||||
if not self.allow_update:
|
||||
msg = 'LDAP backend does not allow %s update' % self.options_name
|
||||
msg = _('LDAP backend does not allow %s update') \
|
||||
% self.options_name
|
||||
raise exception.ForbiddenAction(msg)
|
||||
|
||||
if old_obj is None:
|
||||
@ -316,7 +318,8 @@ class BaseLdap(object):
|
||||
|
||||
def delete(self, id):
|
||||
if not self.allow_delete:
|
||||
msg = 'LDAP backend does not allow %s delete' % self.options_name
|
||||
msg = _('LDAP backend does not allow %s delete') \
|
||||
% self.options_name
|
||||
raise exception.ForbiddenAction(msg)
|
||||
|
||||
conn = self.get_connection()
|
||||
|
@ -145,7 +145,7 @@ class FakeLdap(object):
|
||||
__prefix = 'ldap:'
|
||||
|
||||
def __init__(self, url):
|
||||
LOG.debug('FakeLdap initialize url=%s', url)
|
||||
LOG.debug(_('FakeLdap initialize url=%s'), url)
|
||||
if url == 'fake://memory':
|
||||
self.db = FakeShelve.get_instance()
|
||||
else:
|
||||
@ -155,26 +155,27 @@ class FakeLdap(object):
|
||||
"""This method is ignored, but provided for compatibility."""
|
||||
if server_fail:
|
||||
raise ldap.SERVER_DOWN
|
||||
LOG.debug('FakeLdap bind dn=%s', dn)
|
||||
LOG.debug(_('FakeLdap bind dn=%s'), dn)
|
||||
if dn == 'cn=Admin' and password == 'password':
|
||||
return
|
||||
|
||||
try:
|
||||
attrs = self.db['%s%s' % (self.__prefix, dn)]
|
||||
except KeyError:
|
||||
LOG.error('FakeLdap bind fail: dn=%s not found', dn)
|
||||
LOG.error(_('FakeLdap bind fail: dn=%s not found'), dn)
|
||||
raise ldap.NO_SUCH_OBJECT
|
||||
|
||||
db_password = None
|
||||
try:
|
||||
db_password = attrs['userPassword'][0]
|
||||
except (KeyError, IndexError):
|
||||
LOG.error('FakeLdap bind fail: password for dn=%s not found', dn)
|
||||
LOG.error(_('FakeLdap bind fail: password for dn=%s not found'),
|
||||
dn)
|
||||
raise ldap.INAPPROPRIATE_AUTH
|
||||
|
||||
if not utils.ldap_check_password(password, db_password):
|
||||
LOG.error('FakeLdap bind fail: password for dn=%s does'
|
||||
' not match' % dn)
|
||||
LOG.error(_('FakeLdap bind fail: password for dn=%s does'
|
||||
' not match') % dn)
|
||||
raise ldap.INVALID_CREDENTIALS
|
||||
|
||||
def unbind_s(self):
|
||||
@ -188,10 +189,10 @@ class FakeLdap(object):
|
||||
raise ldap.SERVER_DOWN
|
||||
|
||||
key = '%s%s' % (self.__prefix, dn)
|
||||
LOG.debug('FakeLdap add item: dn=%s, attrs=%s', dn, attrs)
|
||||
LOG.debug(_('FakeLdap add item: dn=%s, attrs=%s'), dn, attrs)
|
||||
if key in self.db:
|
||||
LOG.error('FakeLdap add item failed: dn=%s is'
|
||||
' already in store.', dn)
|
||||
LOG.error(_('FakeLdap add item failed: dn=%s is'
|
||||
' already in store.'), dn)
|
||||
raise ldap.ALREADY_EXISTS(dn)
|
||||
|
||||
self.db[key] = dict([(k, v if isinstance(v, list) else [v])
|
||||
@ -204,11 +205,11 @@ class FakeLdap(object):
|
||||
raise ldap.SERVER_DOWN
|
||||
|
||||
key = '%s%s' % (self.__prefix, dn)
|
||||
LOG.debug('FakeLdap delete item: dn=%s', dn)
|
||||
LOG.debug(_('FakeLdap delete item: dn=%s'), dn)
|
||||
try:
|
||||
del self.db[key]
|
||||
except KeyError:
|
||||
LOG.error('FakeLdap delete item failed: dn=%s not found.', dn)
|
||||
LOG.error(_('FakeLdap delete item failed: dn=%s not found.'), dn)
|
||||
raise ldap.NO_SUCH_OBJECT
|
||||
self.db.sync()
|
||||
|
||||
@ -218,11 +219,11 @@ class FakeLdap(object):
|
||||
raise ldap.SERVER_DOWN
|
||||
|
||||
key = '%s%s' % (self.__prefix, dn)
|
||||
LOG.debug('FakeLdap delete item: dn=%s', dn)
|
||||
LOG.debug(_('FakeLdap delete item: dn=%s'), dn)
|
||||
try:
|
||||
del self.db[key]
|
||||
except KeyError:
|
||||
LOG.error('FakeLdap delete item failed: dn=%s not found.', dn)
|
||||
LOG.error(_('FakeLdap delete item failed: dn=%s not found.'), dn)
|
||||
raise ldap.NO_SUCH_OBJECT
|
||||
self.db.sync()
|
||||
|
||||
@ -237,11 +238,11 @@ class FakeLdap(object):
|
||||
raise ldap.SERVER_DOWN
|
||||
|
||||
key = '%s%s' % (self.__prefix, dn)
|
||||
LOG.debug('FakeLdap modify item: dn=%s attrs=%s', dn, attrs)
|
||||
LOG.debug(_('FakeLdap modify item: dn=%s attrs=%s'), dn, attrs)
|
||||
try:
|
||||
entry = self.db[key]
|
||||
except KeyError:
|
||||
LOG.error('FakeLdap modify item failed: dn=%s not found.', dn)
|
||||
LOG.error(_('FakeLdap modify item failed: dn=%s not found.'), dn)
|
||||
raise ldap.NO_SUCH_OBJECT
|
||||
|
||||
for cmd, k, v in attrs:
|
||||
@ -258,8 +259,8 @@ class FakeLdap(object):
|
||||
elif cmd == ldap.MOD_DELETE:
|
||||
if v is None:
|
||||
if len(values) == 0:
|
||||
LOG.error('FakeLdap modify item failed: '
|
||||
'item has no attribute "%s" to delete', k)
|
||||
LOG.error(_('FakeLdap modify item failed: '
|
||||
'item has no attribute "%s" to delete'), k)
|
||||
raise ldap.NO_SUCH_ATTRIBUTE
|
||||
values[:] = []
|
||||
else:
|
||||
@ -269,15 +270,15 @@ class FakeLdap(object):
|
||||
try:
|
||||
values.remove(val)
|
||||
except ValueError:
|
||||
LOG.error('FakeLdap modify item failed:'
|
||||
LOG.error(_('FakeLdap modify item failed:'
|
||||
' item has no attribute "%s" with'
|
||||
' value "%s" to delete', k, val)
|
||||
' value "%s" to delete'), k, val)
|
||||
raise ldap.NO_SUCH_ATTRIBUTE
|
||||
else:
|
||||
LOG.error('FakeLdap modify item failed: unknown'
|
||||
' command %s', cmd)
|
||||
raise NotImplementedError('modify_s action %s not implemented'
|
||||
% cmd)
|
||||
LOG.error(_('FakeLdap modify item failed: unknown'
|
||||
' command %s'), cmd)
|
||||
raise NotImplementedError(_('modify_s action %s not'
|
||||
' implemented') % cmd)
|
||||
self.db[key] = entry
|
||||
self.db.sync()
|
||||
|
||||
@ -294,13 +295,14 @@ class FakeLdap(object):
|
||||
if server_fail:
|
||||
raise ldap.SERVER_DOWN
|
||||
|
||||
LOG.debug('FakeLdap search at dn=%s scope=%s query=%s',
|
||||
LOG.debug(_('FakeLdap search at dn=%s scope=%s query=%s'),
|
||||
dn, SCOPE_NAMES.get(scope, scope), query)
|
||||
if scope == ldap.SCOPE_BASE:
|
||||
try:
|
||||
item_dict = self.db['%s%s' % (self.__prefix, dn)]
|
||||
except KeyError:
|
||||
LOG.debug('FakeLdap search fail: dn not found for SCOPE_BASE')
|
||||
LOG.debug(_('FakeLdap search fail: dn not found for'
|
||||
' SCOPE_BASE'))
|
||||
raise ldap.NO_SUCH_OBJECT
|
||||
results = [(dn, item_dict)]
|
||||
elif scope == ldap.SCOPE_SUBTREE:
|
||||
@ -313,7 +315,7 @@ class FakeLdap(object):
|
||||
if re.match('%s\w+=[^,]+,%s' % (self.__prefix, dn), k)]
|
||||
else:
|
||||
LOG.error('FakeLdap search fail: unknown scope %s', scope)
|
||||
raise NotImplementedError('Search scope %s not implemented.'
|
||||
raise NotImplementedError(_('Search scope %s not implemented.')
|
||||
% scope)
|
||||
|
||||
objects = []
|
||||
|
@ -160,7 +160,7 @@ class MySQLPingListener(object):
|
||||
dbapi_con.cursor().execute('select 1')
|
||||
except dbapi_con.OperationalError as e:
|
||||
if e.args[0] in (2006, 2013, 2014, 2045, 2055):
|
||||
logging.warn('Got mysql server has gone away: %s', e)
|
||||
logging.warn(_('Got mysql server has gone away: %s'), e)
|
||||
raise DisconnectionError("Database server went away")
|
||||
else:
|
||||
raise
|
||||
|
@ -171,4 +171,4 @@ class LegacyMigration(object):
|
||||
try:
|
||||
self.ec2_driver.create_credential(None, new_dict)
|
||||
except exc.IntegrityError:
|
||||
LOG.exception('Cannot migrate EC2 credential: %s' % x)
|
||||
LOG.exception(_('Cannot migrate EC2 credential: %s') % x)
|
||||
|
@ -55,7 +55,7 @@ def _create_tenants(api, tenants):
|
||||
'enabled': True,
|
||||
}
|
||||
tenant_map[tenant['id']] = tenant_dict['id']
|
||||
LOG.debug('Create tenant %s' % tenant_dict)
|
||||
LOG.debug(_('Create tenant %s') % tenant_dict)
|
||||
api.create_tenant(tenant_dict['id'], tenant_dict)
|
||||
return tenant_map
|
||||
|
||||
@ -71,7 +71,7 @@ def _create_users(api, users):
|
||||
'enabled': True,
|
||||
}
|
||||
user_map[user['id']] = user_dict['id']
|
||||
LOG.debug('Create user %s' % user_dict)
|
||||
LOG.debug(_('Create user %s') % user_dict)
|
||||
api.create_user(user_dict['id'], user_dict)
|
||||
return user_map
|
||||
|
||||
@ -80,7 +80,7 @@ def _create_memberships(api, memberships, user_map, tenant_map):
|
||||
for membership in memberships:
|
||||
user_id = user_map[membership['user_id']]
|
||||
tenant_id = tenant_map[membership['tenant_id']]
|
||||
LOG.debug('Add user %s to tenant %s' % (user_id, tenant_id))
|
||||
LOG.debug(_('Add user %s to tenant %s') % (user_id, tenant_id))
|
||||
api.add_user_to_tenant(tenant_id, user_id)
|
||||
|
||||
|
||||
@ -88,14 +88,14 @@ def _create_roles(api, roles):
|
||||
role_map = dict((r['name'], r['id']) for r in api.list_roles())
|
||||
for role in roles:
|
||||
if role in role_map:
|
||||
LOG.debug('Ignoring existing role %s' % role)
|
||||
LOG.debug(_('Ignoring existing role %s') % role)
|
||||
continue
|
||||
role_dict = {
|
||||
'id': _generate_uuid(),
|
||||
'name': role,
|
||||
}
|
||||
role_map[role] = role_dict['id']
|
||||
LOG.debug('Create role %s' % role_dict)
|
||||
LOG.debug(_('Create role %s') % role_dict)
|
||||
api.create_role(role_dict['id'], role_dict)
|
||||
return role_map
|
||||
|
||||
@ -105,7 +105,7 @@ def _assign_roles(api, assignments, role_map, user_map, tenant_map):
|
||||
role_id = role_map[assignment['role']]
|
||||
user_id = user_map[assignment['user_id']]
|
||||
tenant_id = tenant_map[assignment['tenant_id']]
|
||||
LOG.debug('Assign role %s to user %s on tenant %s' %
|
||||
LOG.debug(_('Assign role %s to user %s on tenant %s') %
|
||||
(role_id, user_id, tenant_id))
|
||||
api.add_role_to_user_and_tenant(user_id, tenant_id, role_id)
|
||||
|
||||
@ -120,6 +120,6 @@ def _create_ec2_creds(ec2_api, identity_api, ec2_creds, user_map):
|
||||
'user_id': user_id,
|
||||
'tenant_id': tenant_id,
|
||||
}
|
||||
LOG.debug('Creating ec2 cred for user %s and tenant %s' %
|
||||
LOG.debug(_('Creating ec2 cred for user %s and tenant %s') %
|
||||
(user_id, tenant_id))
|
||||
ec2_api.create_credential(None, cred_dict)
|
||||
|
@ -121,7 +121,7 @@ class Ec2Signer(object):
|
||||
|
||||
def _calc_signature_2(self, params, verb, server_string, path):
|
||||
"""Generate AWS signature version 2 string."""
|
||||
LOG.debug('using _calc_signature_2')
|
||||
LOG.debug(_('using _calc_signature_2'))
|
||||
string_to_sign = '%s\n%s\n%s\n' % (verb, server_string, path)
|
||||
if self.hmac_256:
|
||||
current_hmac = self.hmac_256
|
||||
@ -137,13 +137,13 @@ class Ec2Signer(object):
|
||||
val = urllib.quote(val, safe='-_~')
|
||||
pairs.append(urllib.quote(key, safe='') + '=' + val)
|
||||
qs = '&'.join(pairs)
|
||||
LOG.debug('query string: %s', qs)
|
||||
LOG.debug(_('query string: %s'), qs)
|
||||
string_to_sign += qs
|
||||
LOG.debug('string_to_sign: %s', string_to_sign)
|
||||
LOG.debug(_('string_to_sign: %s'), string_to_sign)
|
||||
current_hmac.update(string_to_sign)
|
||||
b64 = base64.b64encode(current_hmac.digest())
|
||||
LOG.debug('len(b64)=%d', len(b64))
|
||||
LOG.debug('base64 encoded digest: %s', b64)
|
||||
LOG.debug(_('len(b64)=%d'), len(b64))
|
||||
LOG.debug(_('base64 encoded digest: %s'), b64)
|
||||
return b64
|
||||
|
||||
|
||||
|
@ -70,7 +70,7 @@ class Server(object):
|
||||
|
||||
def start(self, key=None, backlog=128):
|
||||
"""Run a WSGI server with the given application."""
|
||||
LOG.debug('Starting %(arg0)s on %(host)s:%(port)s' %
|
||||
LOG.debug(_('Starting %(arg0)s on %(host)s:%(port)s') %
|
||||
{'arg0': sys.argv[0],
|
||||
'host': self.host,
|
||||
'port': self.port})
|
||||
@ -193,7 +193,7 @@ class Application(BaseApplication):
|
||||
arg_dict = req.environ['wsgiorg.routing_args'][1]
|
||||
action = arg_dict.pop('action')
|
||||
del arg_dict['controller']
|
||||
LOG.debug('arg_dict: %s', arg_dict)
|
||||
LOG.debug(_('arg_dict: %s'), arg_dict)
|
||||
|
||||
# allow middleware up the stack to provide context & params
|
||||
context = req.environ.get(CONTEXT_ENV, {})
|
||||
@ -214,7 +214,7 @@ class Application(BaseApplication):
|
||||
try:
|
||||
result = method(context, **params)
|
||||
except exception.Unauthorized as e:
|
||||
LOG.warning("Authorization failed. %s from %s"
|
||||
LOG.warning(_("Authorization failed. %s from %s")
|
||||
% (e, req.environ['REMOTE_ADDR']))
|
||||
return render_exception(e)
|
||||
except exception.Error as e:
|
||||
|
@ -41,8 +41,8 @@ def setup_logging(conf):
|
||||
logging.config.fileConfig(conf.log_config)
|
||||
return
|
||||
else:
|
||||
raise RuntimeError('Unable to locate specified logging '
|
||||
'config file: %s' % conf.log_config)
|
||||
raise RuntimeError(_('Unable to locate specified logging '
|
||||
'config file: %s') % conf.log_config)
|
||||
|
||||
root_logger = logging.root
|
||||
if conf.debug:
|
||||
|
@ -92,5 +92,5 @@ def enforce(credentials, action, target):
|
||||
|
||||
class Policy(policy.Driver):
|
||||
def enforce(self, credentials, action, target):
|
||||
LOG.debug('enforce %s: %s', action, credentials)
|
||||
LOG.debug(_('enforce %s: %s'), action, credentials)
|
||||
enforce(credentials, action, target)
|
||||
|
@ -88,7 +88,7 @@ def checkout_vendor(repo, rev):
|
||||
with open(modcheck, 'w') as fd:
|
||||
fd.write('1')
|
||||
except subprocess.CalledProcessError:
|
||||
LOG.warning('Failed to checkout %s', repo)
|
||||
LOG.warning(_('Failed to checkout %s'), repo)
|
||||
cd(working_dir)
|
||||
return revdir
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user