Merge "mark logging.info translation accordingly"

This commit is contained in:
Jenkins 2015-10-08 22:21:53 +00:00 committed by Gerrit Code Review
commit 6fe1319f66
14 changed files with 46 additions and 47 deletions

View File

@ -175,12 +175,12 @@ class PollingTask(object):
# If no resources, skip for this pollster # If no resources, skip for this pollster
if not polling_resources: if not polling_resources:
p_context = 'new ' if history else '' p_context = 'new ' if history else ''
LOG.info(_("Skip pollster %(name)s, no %(p_context)s" LOG.info(_LI("Skip pollster %(name)s, no %(p_context)s"
"resources found this cycle"), "resources found this cycle"),
{'name': pollster.name, 'p_context': p_context}) {'name': pollster.name, 'p_context': p_context})
continue continue
LOG.info(_("Polling pollster %(poll)s in the context of " LOG.info(_LI("Polling pollster %(poll)s in the context of "
"%(src)s"), "%(src)s"),
dict(poll=pollster.name, src=source_name)) dict(poll=pollster.name, src=source_name))
try: try:

View File

@ -27,7 +27,7 @@ from oslo_utils import timeutils
import pytz import pytz
import six import six
from ceilometer.i18n import _ from ceilometer.i18n import _, _LI
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@ -72,7 +72,7 @@ class Evaluator(object):
try: try:
previous = alarm.state previous = alarm.state
if previous != state: if previous != state:
LOG.info(_('alarm %(id)s transitioning to %(state)s because ' LOG.info(_LI('alarm %(id)s transitioning to %(state)s because '
'%(reason)s') % {'id': alarm.alarm_id, '%(reason)s') % {'id': alarm.alarm_id,
'state': state, 'state': state,
'reason': reason}) 'reason': reason})

View File

@ -17,7 +17,7 @@
from oslo_log import log from oslo_log import log
from ceilometer.alarm import notifier from ceilometer.alarm import notifier
from ceilometer.i18n import _ from ceilometer.i18n import _LI
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@ -28,7 +28,7 @@ class LogAlarmNotifier(notifier.AlarmNotifier):
@staticmethod @staticmethod
def notify(action, alarm_id, alarm_name, severity, previous, current, def notify(action, alarm_id, alarm_name, severity, previous, current,
reason, reason_data): reason, reason_data):
LOG.info(_( LOG.info(_LI(
"Notifying alarm %(alarm_name)s %(alarm_id)s of %(severity)s " "Notifying alarm %(alarm_name)s %(alarm_id)s of %(severity)s "
"priority from %(previous)s to %(current)s with action %(action)s" "priority from %(previous)s to %(current)s with action %(action)s"
" because %(reason)s.") % ({'alarm_name': alarm_name, " because %(reason)s.") % ({'alarm_name': alarm_name,

View File

@ -23,7 +23,7 @@ import requests
import six.moves.urllib.parse as urlparse import six.moves.urllib.parse as urlparse
from ceilometer.alarm import notifier from ceilometer.alarm import notifier
from ceilometer.i18n import _ from ceilometer.i18n import _LI
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@ -65,7 +65,7 @@ class RestAlarmNotifier(notifier.AlarmNotifier):
if not headers.get('x-openstack-request-id'): if not headers.get('x-openstack-request-id'):
headers['x-openstack-request-id'] = context.generate_request_id() headers['x-openstack-request-id'] = context.generate_request_id()
LOG.info(_( LOG.info(_LI(
"Notifying alarm %(alarm_name)s %(alarm_id)s with severity" "Notifying alarm %(alarm_name)s %(alarm_id)s with severity"
" %(severity)s from %(previous)s to %(current)s with action " " %(severity)s from %(previous)s to %(current)s with action "
"%(action)s because %(reason)s. request-id: %(request_id)s ") % "%(action)s because %(reason)s. request-id: %(request_id)s ") %

View File

@ -30,7 +30,7 @@ from stevedore import extension
from ceilometer import alarm as ceilometer_alarm from ceilometer import alarm as ceilometer_alarm
from ceilometer.alarm import rpc as rpc_alarm from ceilometer.alarm import rpc as rpc_alarm
from ceilometer import coordination as coordination from ceilometer import coordination as coordination
from ceilometer.i18n import _ from ceilometer.i18n import _, _LI
from ceilometer import messaging from ceilometer import messaging
@ -91,7 +91,7 @@ class AlarmService(object):
def _evaluate_assigned_alarms(self): def _evaluate_assigned_alarms(self):
try: try:
alarms = self._assigned_alarms() alarms = self._assigned_alarms()
LOG.info(_('initiating evaluation cycle on %d alarms') % LOG.info(_LI('initiating evaluation cycle on %d alarms') %
len(alarms)) len(alarms))
for alarm in alarms: for alarm in alarms:
self._evaluate_alarm(alarm) self._evaluate_alarm(alarm)

View File

@ -25,8 +25,7 @@ from werkzeug import serving
from ceilometer.api import config as api_config from ceilometer.api import config as api_config
from ceilometer.api import hooks from ceilometer.api import hooks
from ceilometer.api import middleware from ceilometer.api import middleware
from ceilometer.i18n import _ from ceilometer.i18n import _LI, _LW
from ceilometer.i18n import _LW
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@ -130,16 +129,16 @@ def build_server():
# Create the WSGI server and start it # Create the WSGI server and start it
host, port = cfg.CONF.api.host, cfg.CONF.api.port host, port = cfg.CONF.api.host, cfg.CONF.api.port
LOG.info(_('Starting server in PID %s') % os.getpid()) LOG.info(_LI('Starting server in PID %s') % os.getpid())
LOG.info(_("Configuration:")) LOG.info(_LI("Configuration:"))
cfg.CONF.log_opt_values(LOG, logging.INFO) cfg.CONF.log_opt_values(LOG, logging.INFO)
if host == '0.0.0.0': if host == '0.0.0.0':
LOG.info(_( LOG.info(_LI(
'serving on 0.0.0.0:%(sport)s, view at http://127.0.0.1:%(vport)s') 'serving on 0.0.0.0:%(sport)s, view at http://127.0.0.1:%(vport)s')
% ({'sport': port, 'vport': port})) % ({'sport': port, 'vport': port}))
else: else:
LOG.info(_("serving on http://%(host)s:%(port)s") % ( LOG.info(_LI("serving on http://%(host)s:%(port)s") % (
{'host': host, 'port': port})) {'host': host, 'port': port}))
serving.run_simple(cfg.CONF.api.host, cfg.CONF.api.port, serving.run_simple(cfg.CONF.api.host, cfg.CONF.api.port,

View File

@ -46,7 +46,7 @@ from ceilometer.api.controllers.v2.alarm_rules import combination
from ceilometer.api.controllers.v2 import base from ceilometer.api.controllers.v2 import base
from ceilometer.api.controllers.v2 import utils as v2_utils from ceilometer.api.controllers.v2 import utils as v2_utils
from ceilometer.api import rbac from ceilometer.api import rbac
from ceilometer.i18n import _ from ceilometer.i18n import _, _LI
from ceilometer import keystone_client from ceilometer import keystone_client
from ceilometer import messaging from ceilometer import messaging
from ceilometer import utils from ceilometer import utils
@ -326,7 +326,7 @@ class Alarm(base.Base):
action_set = set(actions) action_set = set(actions)
if len(actions) != len(action_set): if len(actions) != len(action_set):
LOG.info(_('duplicate actions are found: %s, ' LOG.info(_LI('duplicate actions are found: %s, '
'remove duplicate ones') % actions) 'remove duplicate ones') % actions)
actions = list(action_set) actions = list(action_set)
setattr(alarm, actions_name, actions) setattr(alarm, actions_name, actions)

View File

@ -24,7 +24,7 @@ import six
import yaml import yaml
from ceilometer.event.storage import models from ceilometer.event.storage import models
from ceilometer.i18n import _ from ceilometer.i18n import _, _LI
OPTS = [ OPTS = [
cfg.StrOpt('definitions_cfg_file', cfg.StrOpt('definitions_cfg_file',
@ -404,7 +404,7 @@ def setup_events(trait_plugin_mgr):
" Using default config.") " Using default config.")
events_config = [] events_config = []
LOG.info(_("Event Definitions: %s"), events_config) LOG.info(_LI("Event Definitions: %s"), events_config)
allow_drop = cfg.CONF.event.drop_unmatched_notifications allow_drop = cfg.CONF.event.drop_unmatched_notifications
return NotificationEventsConverter(events_config, return NotificationEventsConverter(events_config,

View File

@ -21,7 +21,7 @@ from oslo_log import log
from oslo_utils import timeutils from oslo_utils import timeutils
from ceilometer.agent import plugin_base from ceilometer.agent import plugin_base
from ceilometer.i18n import _ from ceilometer.i18n import _LI
from ceilometer import nova_client from ceilometer import nova_client
from ceilometer import sample from ceilometer import sample
@ -51,7 +51,7 @@ class FloatingIPPollster(plugin_base.PollsterBase):
for endpoint in resources: for endpoint in resources:
for ip in self._iter_floating_ips(manager.keystone, cache, for ip in self._iter_floating_ips(manager.keystone, cache,
endpoint): endpoint):
LOG.info(_("FLOATING IP USAGE: %s") % ip.ip) LOG.info(_LI("FLOATING IP USAGE: %s") % ip.ip)
# FIXME (flwang) Now Nova API /os-floating-ips can't provide # FIXME (flwang) Now Nova API /os-floating-ips can't provide
# those attributes were used by Ceilometer, such as project # those attributes were used by Ceilometer, such as project
# id, host. In this fix, those attributes usage will be # id, host. In this fix, those attributes usage will be

View File

@ -32,7 +32,7 @@ import yaml
from ceilometer.event.storage import models from ceilometer.event.storage import models
from ceilometer.i18n import _, _LW from ceilometer.i18n import _, _LI, _LW
from ceilometer import publisher from ceilometer import publisher
from ceilometer.publisher import utils as publisher_utils from ceilometer.publisher import utils as publisher_utils
from ceilometer import sample as sample_util from ceilometer import sample as sample_util
@ -412,7 +412,7 @@ class Sink(object):
"No transformer named %s loaded" % transformer['name'], "No transformer named %s loaded" % transformer['name'],
cfg) cfg)
transformers.append(ext.plugin(**parameter)) transformers.append(ext.plugin(**parameter))
LOG.info(_( LOG.info(_LI(
"Pipeline %(pipeline)s: Setup transformer instance %(name)s " "Pipeline %(pipeline)s: Setup transformer instance %(name)s "
"with parameter %(param)s") % ({'pipeline': self, "with parameter %(param)s") % ({'pipeline': self,
'name': transformer['name'], 'name': transformer['name'],
@ -709,7 +709,7 @@ class PipelineManager(object):
if not ('sources' in cfg and 'sinks' in cfg): if not ('sources' in cfg and 'sinks' in cfg):
raise PipelineException("Both sources & sinks are required", raise PipelineException("Both sources & sinks are required",
cfg) cfg)
LOG.info(_('detected decoupled pipeline config format')) LOG.info(_LI('detected decoupled pipeline config format'))
unique_names = set() unique_names = set()
sources = [] sources = []
@ -771,7 +771,7 @@ class PollingManager(object):
if not ('sources' in cfg and 'sinks' in cfg): if not ('sources' in cfg and 'sinks' in cfg):
raise PipelineException("Both sources & sinks are required", raise PipelineException("Both sources & sinks are required",
cfg) cfg)
LOG.info(_('detected decoupled pipeline config format')) LOG.info(_LI('detected decoupled pipeline config format'))
unique_names = set() unique_names = set()
for s in cfg.get('sources', []): for s in cfg.get('sources', []):
@ -795,7 +795,7 @@ def _setup_pipeline_manager(cfg_file, transformer_manager, p_type=SAMPLE_TYPE):
data = fap.read() data = fap.read()
pipeline_cfg = yaml.safe_load(data) pipeline_cfg = yaml.safe_load(data)
LOG.info(_("Pipeline config: %s"), pipeline_cfg) LOG.info(_LI("Pipeline config: %s"), pipeline_cfg)
return PipelineManager(pipeline_cfg, return PipelineManager(pipeline_cfg,
transformer_manager or transformer_manager or
@ -814,7 +814,7 @@ def _setup_polling_manager(cfg_file):
data = fap.read() data = fap.read()
pipeline_cfg = yaml.safe_load(data) pipeline_cfg = yaml.safe_load(data)
LOG.info(_("Pipeline config: %s"), pipeline_cfg) LOG.info(_LI("Pipeline config: %s"), pipeline_cfg)
return PollingManager(pipeline_cfg) return PollingManager(pipeline_cfg)

View File

@ -27,7 +27,7 @@ from oslo_utils import excutils
import six import six
import six.moves.urllib.parse as urlparse import six.moves.urllib.parse as urlparse
from ceilometer.i18n import _, _LE from ceilometer.i18n import _, _LI, _LE
from ceilometer import messaging from ceilometer import messaging
from ceilometer import publisher from ceilometer import publisher
from ceilometer.publisher import utils from ceilometer.publisher import utils
@ -101,7 +101,7 @@ class MessagingPublisher(publisher.PublisherBase):
self.local_queue = [] self.local_queue = []
if self.policy in ['default', 'queue', 'drop']: if self.policy in ['default', 'queue', 'drop']:
LOG.info(_('Publishing policy set to %s') % self.policy) LOG.info(_LI('Publishing policy set to %s') % self.policy)
else: else:
LOG.warn(_('Publishing policy is unknown (%s) force to default') LOG.warn(_('Publishing policy is unknown (%s) force to default')
% self.policy) % self.policy)

View File

@ -17,7 +17,7 @@
from oslo_log import log from oslo_log import log
from ceilometer.i18n import _ from ceilometer.i18n import _LI
from ceilometer.storage import base from ceilometer.storage import base
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@ -38,7 +38,7 @@ class Connection(base.Connection):
:param data: a dictionary such as returned by :param data: a dictionary such as returned by
ceilometer.meter.meter_message_from_counter. ceilometer.meter.meter_message_from_counter.
""" """
LOG.info(_('metering data %(counter_name)s for %(resource_id)s: ' LOG.info(_LI('metering data %(counter_name)s for %(resource_id)s: '
'%(counter_volume)s') '%(counter_volume)s')
% ({'counter_name': data['counter_name'], % ({'counter_name': data['counter_name'],
'resource_id': data['resource_id'], 'resource_id': data['resource_id'],
@ -50,7 +50,7 @@ class Connection(base.Connection):
Clearing occurs according to the time-to-live. Clearing occurs according to the time-to-live.
:param ttl: Number of seconds to keep records for. :param ttl: Number of seconds to keep records for.
""" """
LOG.info(_("Dropping metering data with TTL %d"), ttl) LOG.info(_LI("Dropping metering data with TTL %d"), ttl)
def get_resources(self, user=None, project=None, source=None, def get_resources(self, user=None, project=None, source=None,
start_timestamp=None, start_timestamp_op=None, start_timestamp=None, start_timestamp_op=None,

View File

@ -34,7 +34,7 @@ from sqlalchemy.orm import aliased
from sqlalchemy.sql.expression import cast from sqlalchemy.sql.expression import cast
import ceilometer import ceilometer
from ceilometer.i18n import _ from ceilometer.i18n import _, _LI
from ceilometer import storage from ceilometer import storage
from ceilometer.storage import base from ceilometer.storage import base
from ceilometer.storage import models as api_models from ceilometer.storage import models as api_models
@ -361,7 +361,7 @@ class Connection(base.Connection):
sample_q = (session.query(models.Sample) sample_q = (session.query(models.Sample)
.filter(models.Sample.timestamp < end)) .filter(models.Sample.timestamp < end))
rows = sample_q.delete() rows = sample_q.delete()
LOG.info(_("%d samples removed from database"), rows) LOG.info(_LI("%d samples removed from database"), rows)
if not cfg.CONF.sql_expire_samples_only: if not cfg.CONF.sql_expire_samples_only:
with session.begin(): with session.begin():
@ -397,7 +397,7 @@ class Connection(base.Connection):
.filter(models.Resource.metadata_hash .filter(models.Resource.metadata_hash
.like('delete_%'))) .like('delete_%')))
resource_q.delete(synchronize_session=False) resource_q.delete(synchronize_session=False)
LOG.info(_("Expired residual resource and" LOG.info(_LI("Expired residual resource and"
" meter definition data")) " meter definition data"))
def get_resources(self, user=None, project=None, source=None, def get_resources(self, user=None, project=None, source=None,

View File

@ -30,7 +30,7 @@ import pymongo.errors
import six import six
from six.moves.urllib import parse from six.moves.urllib import parse
from ceilometer.i18n import _ from ceilometer.i18n import _, _LI
ERROR_INDEX_WITH_DIFFERENT_SPEC_ALREADY_EXISTS = 86 ERROR_INDEX_WITH_DIFFERENT_SPEC_ALREADY_EXISTS = 86
@ -261,7 +261,7 @@ class ConnectionPool(object):
splitted_url = netutils.urlsplit(url) splitted_url = netutils.urlsplit(url)
log_data = {'db': splitted_url.scheme, log_data = {'db': splitted_url.scheme,
'nodelist': connection_options['nodelist']} 'nodelist': connection_options['nodelist']}
LOG.info(_('Connecting to %(db)s on %(nodelist)s') % log_data) LOG.info(_LI('Connecting to %(db)s on %(nodelist)s') % log_data)
client = self._mongo_connect(url) client = self._mongo_connect(url)
self._pool[pool_key] = weakref.ref(client) self._pool[pool_key] = weakref.ref(client)
return client return client
@ -461,7 +461,7 @@ class MongoProxy(object):
self.conn.create_index(keys, name=name, *args, **kwargs) self.conn.create_index(keys, name=name, *args, **kwargs)
except pymongo.errors.OperationFailure as e: except pymongo.errors.OperationFailure as e:
if e.code is ERROR_INDEX_WITH_DIFFERENT_SPEC_ALREADY_EXISTS: if e.code is ERROR_INDEX_WITH_DIFFERENT_SPEC_ALREADY_EXISTS:
LOG.info(_("Index %s will be recreate.") % name) LOG.info(_LI("Index %s will be recreate.") % name)
self._recreate_index(keys, name, *args, **kwargs) self._recreate_index(keys, name, *args, **kwargs)
@safe_mongo_call @safe_mongo_call