mark logging.info translation accordingly
fix logging translation levels Change-Id: Ie5386ad75c3803565280b5f8a08f8dd0366ac8dd
This commit is contained in:
parent
f87edaa1e0
commit
7cfcb7e03e
@ -175,13 +175,13 @@ class PollingTask(object):
|
||||
# If no resources, skip for this pollster
|
||||
if not polling_resources:
|
||||
p_context = 'new ' if history else ''
|
||||
LOG.info(_("Skip pollster %(name)s, no %(p_context)s"
|
||||
"resources found this cycle"),
|
||||
LOG.info(_LI("Skip pollster %(name)s, no %(p_context)s"
|
||||
"resources found this cycle"),
|
||||
{'name': pollster.name, 'p_context': p_context})
|
||||
continue
|
||||
|
||||
LOG.info(_("Polling pollster %(poll)s in the context of "
|
||||
"%(src)s"),
|
||||
LOG.info(_LI("Polling pollster %(poll)s in the context of "
|
||||
"%(src)s"),
|
||||
dict(poll=pollster.name, src=source_name))
|
||||
try:
|
||||
samples = pollster.obj.get_samples(
|
||||
|
@ -27,7 +27,7 @@ from oslo_utils import timeutils
|
||||
import pytz
|
||||
import six
|
||||
|
||||
from ceilometer.i18n import _
|
||||
from ceilometer.i18n import _, _LI
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
@ -72,10 +72,10 @@ class Evaluator(object):
|
||||
try:
|
||||
previous = alarm.state
|
||||
if previous != state:
|
||||
LOG.info(_('alarm %(id)s transitioning to %(state)s because '
|
||||
'%(reason)s') % {'id': alarm.alarm_id,
|
||||
'state': state,
|
||||
'reason': reason})
|
||||
LOG.info(_LI('alarm %(id)s transitioning to %(state)s because '
|
||||
'%(reason)s') % {'id': alarm.alarm_id,
|
||||
'state': state,
|
||||
'reason': reason})
|
||||
|
||||
self._client.alarms.set_state(alarm.alarm_id, state=state)
|
||||
alarm.state = state
|
||||
|
@ -17,7 +17,7 @@
|
||||
from oslo_log import log
|
||||
|
||||
from ceilometer.alarm import notifier
|
||||
from ceilometer.i18n import _
|
||||
from ceilometer.i18n import _LI
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
@ -28,7 +28,7 @@ class LogAlarmNotifier(notifier.AlarmNotifier):
|
||||
@staticmethod
|
||||
def notify(action, alarm_id, alarm_name, severity, previous, current,
|
||||
reason, reason_data):
|
||||
LOG.info(_(
|
||||
LOG.info(_LI(
|
||||
"Notifying alarm %(alarm_name)s %(alarm_id)s of %(severity)s "
|
||||
"priority from %(previous)s to %(current)s with action %(action)s"
|
||||
" because %(reason)s.") % ({'alarm_name': alarm_name,
|
||||
|
@ -23,7 +23,7 @@ import requests
|
||||
import six.moves.urllib.parse as urlparse
|
||||
|
||||
from ceilometer.alarm import notifier
|
||||
from ceilometer.i18n import _
|
||||
from ceilometer.i18n import _LI
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
@ -65,7 +65,7 @@ class RestAlarmNotifier(notifier.AlarmNotifier):
|
||||
if not headers.get('x-openstack-request-id'):
|
||||
headers['x-openstack-request-id'] = context.generate_request_id()
|
||||
|
||||
LOG.info(_(
|
||||
LOG.info(_LI(
|
||||
"Notifying alarm %(alarm_name)s %(alarm_id)s with severity"
|
||||
" %(severity)s from %(previous)s to %(current)s with action "
|
||||
"%(action)s because %(reason)s. request-id: %(request_id)s ") %
|
||||
|
@ -30,7 +30,7 @@ from stevedore import extension
|
||||
from ceilometer import alarm as ceilometer_alarm
|
||||
from ceilometer.alarm import rpc as rpc_alarm
|
||||
from ceilometer import coordination as coordination
|
||||
from ceilometer.i18n import _
|
||||
from ceilometer.i18n import _, _LI
|
||||
from ceilometer import messaging
|
||||
|
||||
|
||||
@ -91,7 +91,7 @@ class AlarmService(object):
|
||||
def _evaluate_assigned_alarms(self):
|
||||
try:
|
||||
alarms = self._assigned_alarms()
|
||||
LOG.info(_('initiating evaluation cycle on %d alarms') %
|
||||
LOG.info(_LI('initiating evaluation cycle on %d alarms') %
|
||||
len(alarms))
|
||||
for alarm in alarms:
|
||||
self._evaluate_alarm(alarm)
|
||||
|
@ -25,8 +25,7 @@ from werkzeug import serving
|
||||
from ceilometer.api import config as api_config
|
||||
from ceilometer.api import hooks
|
||||
from ceilometer.api import middleware
|
||||
from ceilometer.i18n import _
|
||||
from ceilometer.i18n import _LW
|
||||
from ceilometer.i18n import _LI, _LW
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
@ -130,16 +129,16 @@ def build_server():
|
||||
# Create the WSGI server and start it
|
||||
host, port = cfg.CONF.api.host, cfg.CONF.api.port
|
||||
|
||||
LOG.info(_('Starting server in PID %s') % os.getpid())
|
||||
LOG.info(_("Configuration:"))
|
||||
LOG.info(_LI('Starting server in PID %s') % os.getpid())
|
||||
LOG.info(_LI("Configuration:"))
|
||||
cfg.CONF.log_opt_values(LOG, logging.INFO)
|
||||
|
||||
if host == '0.0.0.0':
|
||||
LOG.info(_(
|
||||
LOG.info(_LI(
|
||||
'serving on 0.0.0.0:%(sport)s, view at http://127.0.0.1:%(vport)s')
|
||||
% ({'sport': port, 'vport': port}))
|
||||
else:
|
||||
LOG.info(_("serving on http://%(host)s:%(port)s") % (
|
||||
LOG.info(_LI("serving on http://%(host)s:%(port)s") % (
|
||||
{'host': host, 'port': port}))
|
||||
|
||||
serving.run_simple(cfg.CONF.api.host, cfg.CONF.api.port,
|
||||
|
@ -46,7 +46,7 @@ from ceilometer.api.controllers.v2.alarm_rules import combination
|
||||
from ceilometer.api.controllers.v2 import base
|
||||
from ceilometer.api.controllers.v2 import utils as v2_utils
|
||||
from ceilometer.api import rbac
|
||||
from ceilometer.i18n import _
|
||||
from ceilometer.i18n import _, _LI
|
||||
from ceilometer import keystone_client
|
||||
from ceilometer import messaging
|
||||
from ceilometer import utils
|
||||
@ -326,8 +326,8 @@ class Alarm(base.Base):
|
||||
|
||||
action_set = set(actions)
|
||||
if len(actions) != len(action_set):
|
||||
LOG.info(_('duplicate actions are found: %s, '
|
||||
'remove duplicate ones') % actions)
|
||||
LOG.info(_LI('duplicate actions are found: %s, '
|
||||
'remove duplicate ones') % actions)
|
||||
actions = list(action_set)
|
||||
setattr(alarm, actions_name, actions)
|
||||
|
||||
|
@ -24,7 +24,7 @@ import six
|
||||
import yaml
|
||||
|
||||
from ceilometer.event.storage import models
|
||||
from ceilometer.i18n import _
|
||||
from ceilometer.i18n import _, _LI
|
||||
|
||||
OPTS = [
|
||||
cfg.StrOpt('definitions_cfg_file',
|
||||
@ -404,7 +404,7 @@ def setup_events(trait_plugin_mgr):
|
||||
" Using default config.")
|
||||
events_config = []
|
||||
|
||||
LOG.info(_("Event Definitions: %s"), events_config)
|
||||
LOG.info(_LI("Event Definitions: %s"), events_config)
|
||||
|
||||
allow_drop = cfg.CONF.event.drop_unmatched_notifications
|
||||
return NotificationEventsConverter(events_config,
|
||||
|
@ -21,7 +21,7 @@ from oslo_log import log
|
||||
from oslo_utils import timeutils
|
||||
|
||||
from ceilometer.agent import plugin_base
|
||||
from ceilometer.i18n import _
|
||||
from ceilometer.i18n import _LI
|
||||
from ceilometer import nova_client
|
||||
from ceilometer import sample
|
||||
|
||||
@ -51,7 +51,7 @@ class FloatingIPPollster(plugin_base.PollsterBase):
|
||||
for endpoint in resources:
|
||||
for ip in self._iter_floating_ips(manager.keystone, cache,
|
||||
endpoint):
|
||||
LOG.info(_("FLOATING IP USAGE: %s") % ip.ip)
|
||||
LOG.info(_LI("FLOATING IP USAGE: %s") % ip.ip)
|
||||
# FIXME (flwang) Now Nova API /os-floating-ips can't provide
|
||||
# those attributes were used by Ceilometer, such as project
|
||||
# id, host. In this fix, those attributes usage will be
|
||||
|
@ -32,7 +32,7 @@ import yaml
|
||||
|
||||
|
||||
from ceilometer.event.storage import models
|
||||
from ceilometer.i18n import _, _LW
|
||||
from ceilometer.i18n import _, _LI, _LW
|
||||
from ceilometer import publisher
|
||||
from ceilometer.publisher import utils as publisher_utils
|
||||
from ceilometer import sample as sample_util
|
||||
@ -412,7 +412,7 @@ class Sink(object):
|
||||
"No transformer named %s loaded" % transformer['name'],
|
||||
cfg)
|
||||
transformers.append(ext.plugin(**parameter))
|
||||
LOG.info(_(
|
||||
LOG.info(_LI(
|
||||
"Pipeline %(pipeline)s: Setup transformer instance %(name)s "
|
||||
"with parameter %(param)s") % ({'pipeline': self,
|
||||
'name': transformer['name'],
|
||||
@ -709,7 +709,7 @@ class PipelineManager(object):
|
||||
if not ('sources' in cfg and 'sinks' in cfg):
|
||||
raise PipelineException("Both sources & sinks are required",
|
||||
cfg)
|
||||
LOG.info(_('detected decoupled pipeline config format'))
|
||||
LOG.info(_LI('detected decoupled pipeline config format'))
|
||||
|
||||
unique_names = set()
|
||||
sources = []
|
||||
@ -771,7 +771,7 @@ class PollingManager(object):
|
||||
if not ('sources' in cfg and 'sinks' in cfg):
|
||||
raise PipelineException("Both sources & sinks are required",
|
||||
cfg)
|
||||
LOG.info(_('detected decoupled pipeline config format'))
|
||||
LOG.info(_LI('detected decoupled pipeline config format'))
|
||||
|
||||
unique_names = set()
|
||||
for s in cfg.get('sources', []):
|
||||
@ -795,7 +795,7 @@ def _setup_pipeline_manager(cfg_file, transformer_manager, p_type=SAMPLE_TYPE):
|
||||
data = fap.read()
|
||||
|
||||
pipeline_cfg = yaml.safe_load(data)
|
||||
LOG.info(_("Pipeline config: %s"), pipeline_cfg)
|
||||
LOG.info(_LI("Pipeline config: %s"), pipeline_cfg)
|
||||
|
||||
return PipelineManager(pipeline_cfg,
|
||||
transformer_manager or
|
||||
@ -814,7 +814,7 @@ def _setup_polling_manager(cfg_file):
|
||||
data = fap.read()
|
||||
|
||||
pipeline_cfg = yaml.safe_load(data)
|
||||
LOG.info(_("Pipeline config: %s"), pipeline_cfg)
|
||||
LOG.info(_LI("Pipeline config: %s"), pipeline_cfg)
|
||||
|
||||
return PollingManager(pipeline_cfg)
|
||||
|
||||
|
@ -27,7 +27,7 @@ from oslo_utils import excutils
|
||||
import six
|
||||
import six.moves.urllib.parse as urlparse
|
||||
|
||||
from ceilometer.i18n import _, _LE
|
||||
from ceilometer.i18n import _, _LI, _LE
|
||||
from ceilometer import messaging
|
||||
from ceilometer import publisher
|
||||
from ceilometer.publisher import utils
|
||||
@ -101,7 +101,7 @@ class MessagingPublisher(publisher.PublisherBase):
|
||||
self.local_queue = []
|
||||
|
||||
if self.policy in ['default', 'queue', 'drop']:
|
||||
LOG.info(_('Publishing policy set to %s') % self.policy)
|
||||
LOG.info(_LI('Publishing policy set to %s') % self.policy)
|
||||
else:
|
||||
LOG.warn(_('Publishing policy is unknown (%s) force to default')
|
||||
% self.policy)
|
||||
|
@ -17,7 +17,7 @@
|
||||
|
||||
from oslo_log import log
|
||||
|
||||
from ceilometer.i18n import _
|
||||
from ceilometer.i18n import _LI
|
||||
from ceilometer.storage import base
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
@ -38,8 +38,8 @@ class Connection(base.Connection):
|
||||
:param data: a dictionary such as returned by
|
||||
ceilometer.meter.meter_message_from_counter.
|
||||
"""
|
||||
LOG.info(_('metering data %(counter_name)s for %(resource_id)s: '
|
||||
'%(counter_volume)s')
|
||||
LOG.info(_LI('metering data %(counter_name)s for %(resource_id)s: '
|
||||
'%(counter_volume)s')
|
||||
% ({'counter_name': data['counter_name'],
|
||||
'resource_id': data['resource_id'],
|
||||
'counter_volume': data['counter_volume']}))
|
||||
@ -50,7 +50,7 @@ class Connection(base.Connection):
|
||||
Clearing occurs according to the time-to-live.
|
||||
:param ttl: Number of seconds to keep records for.
|
||||
"""
|
||||
LOG.info(_("Dropping metering data with TTL %d"), ttl)
|
||||
LOG.info(_LI("Dropping metering data with TTL %d"), ttl)
|
||||
|
||||
def get_resources(self, user=None, project=None, source=None,
|
||||
start_timestamp=None, start_timestamp_op=None,
|
||||
|
@ -34,7 +34,7 @@ from sqlalchemy.orm import aliased
|
||||
from sqlalchemy.sql.expression import cast
|
||||
|
||||
import ceilometer
|
||||
from ceilometer.i18n import _
|
||||
from ceilometer.i18n import _, _LI
|
||||
from ceilometer import storage
|
||||
from ceilometer.storage import base
|
||||
from ceilometer.storage import models as api_models
|
||||
@ -361,7 +361,7 @@ class Connection(base.Connection):
|
||||
sample_q = (session.query(models.Sample)
|
||||
.filter(models.Sample.timestamp < end))
|
||||
rows = sample_q.delete()
|
||||
LOG.info(_("%d samples removed from database"), rows)
|
||||
LOG.info(_LI("%d samples removed from database"), rows)
|
||||
|
||||
if not cfg.CONF.sql_expire_samples_only:
|
||||
with session.begin():
|
||||
@ -397,8 +397,8 @@ class Connection(base.Connection):
|
||||
.filter(models.Resource.metadata_hash
|
||||
.like('delete_%')))
|
||||
resource_q.delete(synchronize_session=False)
|
||||
LOG.info(_("Expired residual resource and"
|
||||
" meter definition data"))
|
||||
LOG.info(_LI("Expired residual resource and"
|
||||
" meter definition data"))
|
||||
|
||||
def get_resources(self, user=None, project=None, source=None,
|
||||
start_timestamp=None, start_timestamp_op=None,
|
||||
|
@ -30,7 +30,7 @@ import pymongo.errors
|
||||
import six
|
||||
from six.moves.urllib import parse
|
||||
|
||||
from ceilometer.i18n import _
|
||||
from ceilometer.i18n import _, _LI
|
||||
|
||||
ERROR_INDEX_WITH_DIFFERENT_SPEC_ALREADY_EXISTS = 86
|
||||
|
||||
@ -261,7 +261,7 @@ class ConnectionPool(object):
|
||||
splitted_url = netutils.urlsplit(url)
|
||||
log_data = {'db': splitted_url.scheme,
|
||||
'nodelist': connection_options['nodelist']}
|
||||
LOG.info(_('Connecting to %(db)s on %(nodelist)s') % log_data)
|
||||
LOG.info(_LI('Connecting to %(db)s on %(nodelist)s') % log_data)
|
||||
client = self._mongo_connect(url)
|
||||
self._pool[pool_key] = weakref.ref(client)
|
||||
return client
|
||||
@ -461,7 +461,7 @@ class MongoProxy(object):
|
||||
self.conn.create_index(keys, name=name, *args, **kwargs)
|
||||
except pymongo.errors.OperationFailure as e:
|
||||
if e.code is ERROR_INDEX_WITH_DIFFERENT_SPEC_ALREADY_EXISTS:
|
||||
LOG.info(_("Index %s will be recreate.") % name)
|
||||
LOG.info(_LI("Index %s will be recreate.") % name)
|
||||
self._recreate_index(keys, name, *args, **kwargs)
|
||||
|
||||
@safe_mongo_call
|
||||
|
Loading…
Reference in New Issue
Block a user