Merge "Remove log translations"

This commit is contained in:
Jenkins 2017-06-05 14:52:02 +00:00 committed by Gerrit Code Review
commit 0b982b3b12
14 changed files with 60 additions and 140 deletions

View File

@ -22,7 +22,7 @@ from oslo_serialization import jsonutils
from glare.common import exception
from glare.common import policy
from glare.i18n import _, _LW
from glare.i18n import _
context_opts = [
cfg.BoolOpt('allow_anonymous_access', default=False,
@ -71,7 +71,7 @@ class BaseContextMiddleware(base_middleware.ConfigurableMiddleware):
try:
request_id = resp.request.context.request_id
except AttributeError:
LOG.warn(_LW('Unable to retrieve request id from context'))
LOG.warn('Unable to retrieve request id from context')
else:
# For python 3 compatibility need to use bytes type
prefix = b'req-' if isinstance(request_id, bytes) else 'req-'

View File

@ -28,7 +28,7 @@ from glare.api.v1 import api_versioning
from glare.common import exception as exc
from glare.common import wsgi
from glare import engine
from glare.i18n import _, _LI
from glare.i18n import _
LOG = logging.getLogger(__name__)
@ -172,10 +172,10 @@ def log_request_progress(f):
{'request_id': req.context.request_id,
'api_method': f.__name__})
result = f(self, req, *args, **kwargs)
LOG.info(_LI(
LOG.info(
"Request %(request_id)s for artifact %(api_method)s "
"successfully executed."), {'request_id': req.context.request_id,
'api_method': f.__name__})
"successfully executed.", {'request_id': req.context.request_id,
'api_method': f.__name__})
return result
return log_decorator

View File

@ -19,7 +19,6 @@ from oslo_log import log as logging
from glare.common import exception
from glare.common import utils
from glare.i18n import _LW
from glare.store import database
CONF = cfg.CONF
@ -69,9 +68,8 @@ def save_blob_to_store(blob_id, blob, context, max_size,
:return: tuple of values: (location_uri, size, checksums)
"""
if store_type not in set(CONF.glance_store.stores + ['database']):
LOG.warning(_LW("Incorrect backend configuration - scheme '%s' is not"
" supported. Fallback to default store.")
% store_type)
LOG.warning("Incorrect backend configuration - scheme '%s' is not"
" supported. Fallback to default store.", store_type)
store_type = None
data = utils.LimitingReader(utils.CooperativeReader(blob), max_size)

View File

@ -40,7 +40,7 @@ from oslo_versionedobjects import fields
import six
from glare.common import exception
from glare.i18n import _, _LE, _LW
from glare.i18n import _
from glare.objects.meta import fields as glare_fields
CONF = cfg.CONF
@ -86,8 +86,7 @@ def cooperative_iter(iter):
yield chunk
except Exception as err:
with excutils.save_and_reraise_exception():
msg = _LE("Error: cooperative_iter exception %s") % err
LOG.error(msg)
LOG.error("Error: cooperative_iter exception %s", err)
def cooperative_read(fd):
@ -266,13 +265,13 @@ def validate_key_cert(key_file, cert_file):
digest = CONF.digest_algorithm
if digest == 'sha1':
LOG.warning(
_LW('The FIPS (FEDERAL INFORMATION PROCESSING STANDARDS)'
' state that the SHA-1 is not suitable for'
' general-purpose digital signature applications (as'
' specified in FIPS 186-3) that require 112 bits of'
' security. The default value is sha1 in Kilo for a'
' smooth upgrade process, and it will be updated'
' with sha256 in next release(L).'))
'The FIPS (FEDERAL INFORMATION PROCESSING STANDARDS)'
' state that the SHA-1 is not suitable for'
' general-purpose digital signature applications (as'
' specified in FIPS 186-3) that require 112 bits of'
' security. The default value is sha1 in Kilo for a'
' smooth upgrade process, and it will be updated'
' with sha256 in next release(L).')
out = crypto.sign(key, data, digest)
crypto.verify(cert, out, data, digest)
except crypto.Error as ce:

View File

@ -50,8 +50,8 @@ from webob import multidict
from glare.common import exception as glare_exc
from glare.common import utils
from glare.i18n import _, _LE, _LI, _LW
from glare import i18n
from glare.i18n import _
bind_opts = [
@ -289,7 +289,7 @@ class Server(object):
self.pool.spawn_n(self._single_run, self.application, self.sock)
return
else:
LOG.info(_LI("Starting %d workers"), workers)
LOG.info("Starting %d workers", workers)
signal.signal(signal.SIGTERM, self.kill_children)
signal.signal(signal.SIGINT, self.kill_children)
signal.signal(signal.SIGHUP, self.hup)
@ -302,22 +302,21 @@ class Server(object):
def _remove_children(self, pid):
if pid in self.children:
self.children.remove(pid)
LOG.info(_LI('Removed dead child %s'), pid)
LOG.info('Removed dead child %s', pid)
elif pid in self.stale_children:
self.stale_children.remove(pid)
LOG.info(_LI('Removed stale child %s'), pid)
LOG.info('Removed stale child %s', pid)
else:
LOG.warning(_LW('Unrecognised child %s') % pid)
LOG.warning('Unrecognised child %s', pid)
def _verify_and_respawn_children(self, pid, status):
if len(self.stale_children) == 0:
LOG.debug('No stale children')
if os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0:
LOG.error(_LE('Not respawning child %d, cannot '
'recover from termination') % pid)
LOG.error('Not respawning child %d, cannot '
'recover from termination', pid)
if not self.children and not self.stale_children:
LOG.info(
_LI('All workers have terminated. Exiting'))
LOG.info('All workers have terminated. Exiting')
self.running = False
else:
if len(self.children) < get_num_workers():
@ -334,7 +333,7 @@ class Server(object):
if err.errno not in (errno.EINTR, errno.ECHILD):
raise
except KeyboardInterrupt:
LOG.info(_LI('Caught keyboard interrupt. Exiting.'))
LOG.info('Caught keyboard interrupt. Exiting.')
break
except glare_exc.SIGHUPInterrupt:
self.reload()
@ -421,12 +420,12 @@ class Server(object):
# exit on sighup
self._sock = None
self.run_server()
LOG.info(_LI('Child %d exiting normally'), os.getpid())
LOG.info('Child %d exiting normally', os.getpid())
# self.pool.waitall() is now called in wsgi's server so
# it's safe to exit here
sys.exit(0)
else:
LOG.info(_LI('Started child %s'), pid)
LOG.info('Started child %s', pid)
self.children.add(pid)
def run_server(self):
@ -452,7 +451,7 @@ class Server(object):
def _single_run(self, application, sock):
"""Start a WSGI server in a new green thread."""
LOG.info(_LI("Starting single process server"))
LOG.info("Starting single process server")
eventlet.wsgi.server(sock, application, custom_pool=self.pool,
log=self._logger,
debug=False,
@ -790,7 +789,7 @@ class Resource(object):
"decoded by Glare")
raise webob.exc.HTTPBadRequest(explanation=msg)
except Exception as e:
LOG.exception(_LE("Caught error: %s"),
LOG.exception("Caught error: %s",
encodeutils.exception_to_unicode(e))
response = webob.exc.HTTPInternalServerError(explanation=str(e))
return response

View File

@ -20,7 +20,6 @@ from retrying import retry
import six
from glare.db.sqlalchemy import api
from glare.i18n import _LW
from glare import locking
LOG = logging.getLogger(__name__)
@ -30,7 +29,7 @@ def _retry_on_connection_error(exc):
"""Function to retry a DB API call if connection error was received."""
if isinstance(exc, db_exception.DBConnectionError):
LOG.warning(_LW("Connection error detected. Retrying..."))
LOG.warning("Connection error detected. Retrying...")
return True
return False

View File

@ -39,7 +39,7 @@ from glare.common import exception
from glare.common import semver_db
from glare.common import utils
from glare.db.sqlalchemy import models
from glare.i18n import _, _LW
from glare.i18n import _
LOG = os_logging.getLogger(__name__)
@ -62,7 +62,7 @@ def _retry_on_deadlock(exc):
"""Decorator to retry a DB API call if Deadlock was received."""
if isinstance(exc, db_exception.DBDeadlock):
LOG.warning(_LW("Deadlock detected. Retrying..."))
LOG.warning("Deadlock detected. Retrying...")
return True
return False

View File

@ -24,7 +24,7 @@ from glare.common import exception
from glare.common import policy
from glare.common import store_api
from glare.common import utils
from glare.i18n import _, _LI
from glare.i18n import _
from glare.notification import Notifier
from glare.objects import base
from glare.objects.meta import fields as glare_fields
@ -266,8 +266,8 @@ class Engine(object):
blob["sha256"] = blob_meta.pop("sha256", None)
modified_af = cls.update_blob(
context, type_name, artifact_id, blob, field_name, blob_key)
LOG.info(_LI("External location %(location)s has been created "
"successfully for artifact %(artifact)s blob %(blob)s"),
LOG.info("External location %(location)s has been created "
"successfully for artifact %(artifact)s blob %(blob)s",
{'location': location, 'artifact': af.id,
'blob': blob_name})
@ -333,8 +333,8 @@ class Engine(object):
field_name, blob_dict_attr)
blob_name = "%s[%s]" % (field_name, blob_key) \
if blob_key else field_name
LOG.info(_LI("Successfully finished blob upload for artifact "
"%(artifact)s blob field %(blob)s."),
LOG.info("Successfully finished blob upload for artifact "
"%(artifact)s blob field %(blob)s.",
{'artifact': af.id, 'blob': blob_name})
# update blob info and activate it

View File

@ -14,8 +14,6 @@
import re
import pep8
"""
Guidelines for writing new hacking checks
@ -111,30 +109,6 @@ def no_direct_use_of_unicode_function(logical_line):
yield(0, "G320: Use six.text_type() instead of unicode()")
def validate_log_translations(logical_line, physical_line, filename):
# Translations are not required in the test directory
if pep8.noqa(physical_line):
return
msg = "G322: LOG.info messages require translations `_LI()`!"
if log_translation_info.match(logical_line):
yield (0, msg)
msg = "G323: LOG.exception messages require translations `_LE()`!"
if log_translation_exception.match(logical_line):
yield (0, msg)
msg = "G324: LOG.error messages require translations `_LE()`!"
if log_translation_error.match(logical_line):
yield (0, msg)
msg = "G325: LOG.critical messages require translations `_LC()`!"
if log_translation_critical.match(logical_line):
yield (0, msg)
msg = "G326: LOG.warning messages require translations `_LW()`!"
if log_translation_warning.match(logical_line):
yield (0, msg)
msg = "G321: Log messages require translations!"
if log_translation.match(logical_line):
yield (0, msg)
def check_no_contextlib_nested(logical_line):
msg = ("G327: contextlib.nested is deprecated since Python 2.7. See "
"https://docs.python.org/2/library/contextlib.html#contextlib."
@ -184,7 +158,6 @@ def factory(register):
register(assert_equal_none)
register(no_translate_debug_logs)
register(no_direct_use_of_unicode_function)
register(validate_log_translations)
register(check_no_contextlib_nested)
register(dict_constructor_with_list_copy)
register(check_python3_xrange)

View File

@ -14,8 +14,6 @@
# under the License.
from oslo_log import log as logging
from glare.i18n import _LI
LOG = logging.getLogger(__name__)
@ -96,12 +94,11 @@ class LockEngine(object):
"""
if lock_key is not None and len(lock_key) < self.MAX_LOCK_LENGTH:
lock_id = self.lock_api.create_lock(context, lock_key)
LOG.info(_LI("Lock %(lock_id)s acquired for lock_key "
"%(lock_key)s"),
LOG.info("Lock %(lock_id)s acquired for lock_key %(lock_key)s",
{'lock_id': lock_id, 'lock_key': lock_key})
else:
lock_id = None
LOG.info(_LI("No lock for lock_key %s"), lock_key)
LOG.info("No lock for lock_key %s", lock_key)
return Lock(context, lock_id, lock_key, self.release)
@ -112,5 +109,5 @@ class LockEngine(object):
"""
if lock.lock_id is not None:
self.lock_api.delete_lock(lock.context, lock.lock_id)
LOG.info(_LI("Lock %(lock_id)s released for lock_key %(key)s"),
LOG.info("Lock %(lock_id)s released for lock_key %(key)s",
{'lock_id': lock.lock_id, 'key': lock.lock_key})

View File

@ -28,8 +28,8 @@ from glare.common import exception
from glare.common import store_api
from glare.common import utils
from glare.db import artifact_api
from glare.i18n import _
from glare import locking
from glare.i18n import _, _LI
from glare.objects.meta import attribute
from glare.objects.meta import fields as glare_fields
from glare.objects.meta import validators
@ -264,8 +264,8 @@ class BaseArtifact(base.VersionedObject):
values['created_at'] = timeutils.utcnow()
values['updated_at'] = values['created_at']
af = cls._init_artifact(context, values)
LOG.info(_LI("Parameters validation for artifact creation "
"passed for request %s."), context.request_id)
LOG.info("Parameters validation for artifact creation "
"passed for request %s.", context.request_id)
af_vals = cls.db_api.create(
context, af._obj_changes_to_primitive(), cls.get_type_name())
return cls._init_artifact(context, af_vals)
@ -367,8 +367,8 @@ class BaseArtifact(base.VersionedObject):
raise exception.BadRequest(msg)
setattr(af, key, value)
LOG.info(_LI("Parameters validation for artifact %(artifact)s "
"update passed for request %(request)s."),
LOG.info("Parameters validation for artifact %(artifact)s "
"update passed for request %(request)s.",
{'artifact': af.id, 'request': context.request_id})
updated_af = cls.db_api.update(
context, af.id, af._obj_changes_to_primitive())
@ -635,8 +635,7 @@ class BaseArtifact(base.VersionedObject):
if blobs:
# delete blobs one by one
cls._delete_blobs(blobs, context, af)
LOG.info(_LI("Blobs successfully deleted "
"for artifact %s"), af.id)
LOG.info("Blobs successfully deleted for artifact %s", af.id)
# delete artifact itself
cls.db_api.delete(context, af.id)
@ -665,8 +664,8 @@ class BaseArtifact(base.VersionedObject):
raise exception.InvalidStatusTransition(
orig=af.status, new=cls.STATUS.ACTIVE
)
LOG.info(_LI("Parameters validation for artifact %(artifact)s "
"activate passed for request %(request)s."),
LOG.info("Parameters validation for artifact %(artifact)s "
"activate passed for request %(request)s.",
{'artifact': af.id, 'request': context.request_id})
af = cls.db_api.update(context, af.id, {'status': cls.STATUS.ACTIVE})
return cls._init_artifact(context, af)
@ -689,8 +688,8 @@ class BaseArtifact(base.VersionedObject):
raise exception.InvalidStatusTransition(
orig=af.status, new=cls.STATUS.ACTIVE
)
LOG.info(_LI("Parameters validation for artifact %(artifact)s "
"reactivate passed for request %(request)s."),
LOG.info("Parameters validation for artifact %(artifact)s "
"reactivate passed for request %(request)s.",
{'artifact': af.id, 'request': context.request_id})
af = cls.db_api.update(context, af.id, {'status': cls.STATUS.ACTIVE})
return cls._init_artifact(context, af)
@ -719,8 +718,8 @@ class BaseArtifact(base.VersionedObject):
raise exception.InvalidStatusTransition(
orig=af.status, new=cls.STATUS.ACTIVE
)
LOG.info(_LI("Parameters validation for artifact %(artifact)s "
"deactivate passed for request %(request)s."),
LOG.info("Parameters validation for artifact %(artifact)s "
"deactivate passed for request %(request)s.",
{'artifact': af.id, 'request': context.request_id})
af = cls.db_api.update(context, af.id,
{'status': cls.STATUS.DEACTIVATED})
@ -748,8 +747,8 @@ class BaseArtifact(base.VersionedObject):
cls._validate_versioning(context, af.name, af.version,
is_public=True)
cls.validate_publish(context, af)
LOG.info(_LI("Parameters validation for artifact %(artifact)s "
"publish passed for request %(request)s."),
LOG.info("Parameters validation for artifact %(artifact)s "
"publish passed for request %(request)s.",
{'artifact': af.id, 'request': context.request_id})
af = cls.db_api.update(context, af.id, {'visibility': 'public'})
return cls._init_artifact(context, af)

View File

@ -25,7 +25,7 @@ from oslo_versionedobjects import base as vo_base
import six
from glare.common import exception
from glare.i18n import _, _LE
from glare.i18n import _
from glare.objects import base
CONF = cfg.CONF
@ -67,8 +67,8 @@ def import_modules_list(modules):
custom_module_list.append(importlib.import_module(module_name))
except Exception as e:
LOG.exception(e)
LOG.error(_LE("Cannot import custom artifact type from module "
"%(module_name)%s. Error: %(error)s"),
LOG.error("Cannot import custom artifact type from module "
"%(module_name)%s. Error: %(error)s",
{'module_name': module_name, 'error': str(e)})
return custom_module_list

View File

@ -39,7 +39,6 @@ import sqlalchemy.exc
from glare.db.migration import migration
import glare.db.sqlalchemy.api
from glare.i18n import _LE
from glare.tests.unit import glare_fixtures
LOG = logging.getLogger(__name__)
@ -96,9 +95,8 @@ class WalkVersionsMixin(object):
if check:
check(engine, data)
except Exception:
LOG.error(_LE("Failed to migrate to version %(version)s on engine "
"%(engine)s"),
{'version': version, 'engine': engine})
LOG.error("Failed to migrate to version %(version)s on engine "
"%(engine)s", {'version': version, 'engine': engine})
raise

View File

@ -13,7 +13,6 @@
# under the License.
import inspect
import mock
from glare.hacking import checks
from glare.tests.unit import base
@ -142,47 +141,6 @@ class HackingTestCase(base.BaseTestCase):
self.assertEqual(0, len(list(checks.check_python3_no_itervalues(
"obj.values()"))))
def test_validate_log_translations(self):
with mock.patch('pep8.noqa', return_value=True):
self.assertEqual([], list(
checks.validate_log_translations("", "", None)))
self.assertEqual(0, len(list(checks.validate_log_translations(
"LOG.info(_LI(''))", "", None))))
self.assertEqual(1, len(list(checks.validate_log_translations(
"LOG.info('')", "", None))))
self.assertEqual(0, len(list(checks.validate_log_translations(
"LOG.exception(_LE(''))", "", None))))
self.assertEqual(1, len(list(checks.validate_log_translations(
"LOG.exception('')", "", None))))
self.assertEqual(0, len(list(checks.validate_log_translations(
"LOG.error(_LE(''))", "", None))))
self.assertEqual(1, len(list(checks.validate_log_translations(
"LOG.error('')", "", None))))
self.assertEqual(0, len(list(checks.validate_log_translations(
"LOG.critical(_LC(''))", "", None))))
self.assertEqual(1, len(list(checks.validate_log_translations(
"LOG.critical('')", "", None))))
self.assertEqual(0, len(list(checks.validate_log_translations(
"LOG.warning(_LW(''))", "", None))))
self.assertEqual(1, len(list(checks.validate_log_translations(
"LOG.warning('')", "", None))))
self.assertEqual(0, len(list(checks.validate_log_translations(
"LOG.audit(_(''))", "", None))))
self.assertEqual(1, len(list(checks.validate_log_translations(
"LOG.audit('')", "", None))))
def test_factory(self):
class Register(object):
def __init__(self):