Remove remaining log translation

Log translation support was removed during Pike cycle[1].

[1] https://review.opendev.org/c/openstack/oslo.i18n/+/446762

Change-Id: I4ebb24e4c2b729b0b0f85dc7dd4bb34f57eb3d29
Signed-off-by: Takashi Kajinami <kajinamit@oss.nttdata.com>
This commit is contained in:
Takashi Kajinami
2025-08-31 00:06:38 +09:00
parent 37547f904a
commit b1aeac1be3
28 changed files with 134 additions and 135 deletions

View File

@@ -105,12 +105,11 @@ class GenericComputePollster(plugin_base.PollsterBase):
def _stats_to_sample(self, instance, stats, polled_time):
volume = getattr(stats, self.sample_stats_key)
LOG.debug("%(instance_id)s/%(name)s volume: "
"%(volume)s" % {
'name': self.sample_name,
'instance_id': instance.id,
'volume': (volume if volume is not None
else 'Unavailable')})
LOG.debug(
"%(instance_id)s/%(name)s volume: %(volume)s",
{'name': self.sample_name,
'instance_id': instance.id,
'volume': (volume if volume is not None else 'Unavailable')})
if volume is None:
raise NoVolumeException()
@@ -142,7 +141,7 @@ class GenericComputePollster(plugin_base.PollsterBase):
# FIXME(sileht): This should be a removed... but I will
# not change the test logic for now
LOG.warning("%(name)s statistic in not available for "
"instance %(instance_id)s" %
"instance %(instance_id)s",
{'name': self.sample_name,
'instance_id': instance.id})
except virt_inspector.InstanceNotFoundException as err:

View File

@@ -239,5 +239,5 @@ def get_hypervisor_inspector(conf):
invoke_args=(conf, ))
return mgr.driver
except ImportError as e:
LOG.error("Unable to load the hypervisor inspector: %s" % e)
LOG.error("Unable to load the hypervisor inspector: %s", e)
return Inspector(conf)

View File

@@ -117,9 +117,10 @@ class LibvirtInspector(virt_inspector.Inspector):
try:
dom_stats = domain.interfaceStats(name)
except libvirt.libvirtError as ex:
LOG.warning(_("Error from libvirt when running instanceStats, "
"This may not be harmful, but please check : "
"%(ex)s") % {'ex': ex})
LOG.warning("Error from libvirt when running instanceStats, "
"This may not be harmful, but please check : "
"%(ex)s",
{'ex': ex})
continue
# Retrieve previous values
@@ -139,8 +140,8 @@ class LibvirtInspector(virt_inspector.Inspector):
if tx_delta < 0:
tx_delta = dom_stats[4]
else:
LOG.debug('No delta meter predecessor for %s / %s' %
(instance.id, name))
LOG.debug('No delta meter predecessor for %s / %s',
instance.id, name)
rx_delta = 0
tx_delta = 0
@@ -183,9 +184,9 @@ class LibvirtInspector(virt_inspector.Inspector):
except libvirt.libvirtError as ex:
# raised error even if lock is acquired while live migration,
# even it looks normal.
LOG.warning(_("Error from libvirt while checking blockStats, "
"This may not be harmful, but please check : "
"%(ex)s") % {'ex': ex})
LOG.warning("Error from libvirt while checking blockStats, "
"This may not be harmful, but please check : "
"%(ex)s", {'ex': ex})
pass
@libvirt_utils.retry_on_disconnect

View File

@@ -263,14 +263,15 @@ class NotificationEventsConverter:
break
if edef is None:
msg = (_('Dropping Notification %(type)s (uuid:%(msgid)s)')
% dict(type=event_type, msgid=message_id))
if self.conf.event.drop_unmatched_notifications:
LOG.debug(msg)
msg_level = log.DEBUG
else:
# If drop_unmatched_notifications is False, this should
# never happen. (mdragon)
LOG.error(msg)
msg_level = log.ERROR
LOG.log(msg_level,
'Dropping Notification %(type)s (uuid:%(msgid)s)',
dict(type=event_type, msgid=message_id))
return None
return edef.to_event(priority, notification_body)

View File

@@ -114,11 +114,10 @@ class SplitterTraitPlugin(TraitPluginBase):
"""
LOG.warning('split plugin is deprecated, '
'add ".`split(%(sep)s, %(segment)d, '
'%(max_split)d)`" to your jsonpath instead' %
'%(max_split)d)`" to your jsonpath instead',
dict(sep=separator,
segment=segment,
max_split=(-1 if max_split is None
else max_split)))
max_split=(-1 if max_split is None else max_split)))
self.separator = separator
self.segment = segment
@@ -212,9 +211,8 @@ class TimedeltaPlugin(TraitPluginBase):
except Exception as err:
LOG.warning('Failed to parse date from set fields, both '
'fields %(start)s and %(end)s must be datetime: '
'%(err)s' %
dict(start=start[0], end=end[0], err=err)
)
'%(err)s',
dict(start=start[0], end=end[0], err=err))
return [None]
return [abs((end_time - start_time).total_seconds())]

View File

@@ -146,7 +146,7 @@ class SensorNotification(endpoint.SampleEndpoint):
except InvalidSensorData as exc:
LOG.warning(
'invalid sensor data for %(resource)s: %(error)s' %
'invalid sensor data for %(resource)s: %(error)s',
dict(resource=resource_id, error=exc)
)
continue

View File

@@ -14,7 +14,6 @@
from oslo_log import log
from ceilometer.i18n import _
from ceilometer.ipmi.notifications import ironic as parser
from ceilometer.ipmi.platform import exception as ipmiexcept
from ceilometer.ipmi.platform import ipmi_sensor
@@ -64,12 +63,11 @@ class SensorPollster(plugin_base.PollsterBase):
stats = self.ipmi.read_sensor_any(self.METRIC)
except ipmiexcept.IPMIException:
self.polling_failures += 1
LOG.warning(_(
'Polling %(mtr)s sensor failed for %(cnt)s times!')
% ({'mtr': self.METRIC,
'cnt': self.polling_failures}))
LOG.warning(
'Polling %(mtr)s sensor failed for %(cnt)s times!',
{'mtr': self.METRIC, 'cnt': self.polling_failures})
if 0 <= self.conf.ipmi.polling_retry < self.polling_failures:
LOG.warning(_('Pollster for %s is disabled!') % self.METRIC)
LOG.warning('Pollster for %s is disabled!', self.METRIC)
raise plugin_base.PollsterPermanentError(resources)
else:
return

View File

@@ -161,9 +161,8 @@ class MeterDefinition:
nb = (0 if nb_values == 1 and values[0] is None
else nb_values)
LOG.warning('Only %(nb)d fetched meters contain '
'"%(name)s" field instead of %(total)d.' %
dict(name=name, nb=nb,
total=nb_samples))
'"%(name)s" field instead of %(total)d.',
dict(name=name, nb=nb, total=nb_samples))
return
# NOTE(sileht): Transform the sample with multiple values per
@@ -212,14 +211,13 @@ class ProcessMeterNotifications(endpoint.SampleEndpoint):
for meter_cfg in reversed(meters_cfg['metric']):
if meter_cfg.get('name') in definitions:
# skip duplicate meters
LOG.warning("Skipping duplicate meter definition %s"
% meter_cfg)
LOG.warning("Skipping duplicate meter definition %s",
meter_cfg)
continue
try:
md = MeterDefinition(meter_cfg, self.conf, plugin_manager)
except declarative.DefinitionException as e:
errmsg = "Error loading meter definition: %s"
LOG.error(errmsg, str(e))
LOG.error("Error loading meter definition: %s", e)
else:
definitions[meter_cfg['name']] = md
return definitions.values()

View File

@@ -18,7 +18,6 @@
from oslo_log import log
from ceilometer.i18n import _
from ceilometer.network.services import base
from ceilometer import sample
@@ -46,11 +45,11 @@ class FloatingIPPollster(base.BaseServicesPollster):
status = self.get_status_id(fip['status'])
if status == -1:
LOG.warning(
_("Unknown status %(status)s for floating IP address "
"%(address)s (%(id)s), setting volume to -1") % {
"status": fip['status'],
"address": fip['floating_ip_address'],
"id": fip['id']})
"Unknown status %(status)s for floating IP address "
"%(address)s (%(id)s), setting volume to -1",
{"status": fip['status'],
"address": fip['floating_ip_address'],
"id": fip['id']})
yield sample.Sample(
name='ip.floating',
type=sample.TYPE_GAUGE,

View File

@@ -17,7 +17,6 @@ import warnings
from oslo_log import log
from ceilometer.i18n import _
from ceilometer.network.services import base
from ceilometer import sample
@@ -49,13 +48,13 @@ class FirewallPollster(base.BaseServicesPollster):
resources = resources or []
for fw in resources:
LOG.debug("Firewall : %s" % fw)
LOG.debug("Firewall : %s", fw)
status = self.get_status_id(fw['status'])
if status == -1:
# unknown status, skip this sample
LOG.warning(_("Unknown status %(stat)s received on fw %(id)s,"
"skipping sample") % {'stat': fw['status'],
'id': fw['id']})
LOG.warning("Unknown status %(stat)s received on fw %(id)s,"
"skipping sample",
{'stat': fw['status'], 'id': fw['id']})
continue
yield sample.Sample(
@@ -96,7 +95,7 @@ class FirewallPolicyPollster(base.BaseServicesPollster):
resources = resources or []
for fw in resources:
LOG.debug("Firewall Policy: %s" % fw)
LOG.debug("Firewall Policy: %s", fw)
yield sample.Sample(
name='network.services.firewall.policy',

View File

@@ -15,7 +15,6 @@
from oslo_log import log
from ceilometer.i18n import _
from ceilometer.network.services import base
from ceilometer import sample
@@ -45,11 +44,11 @@ class VPNServicesPollster(base.BaseServicesPollster):
status = self.get_status_id(vpn['status'])
if status == -1:
LOG.warning(
_("Unknown status %(status)s for VPN %(name)s (%(id)s), "
"setting volume to -1") % {
"status": vpn['status'],
"name": vpn['name'],
"id": vpn['id']})
"Unknown status %(status)s for VPN %(name)s (%(id)s), "
"setting volume to -1",
{"status": vpn['status'],
"name": vpn['name'],
"id": vpn['id']})
yield sample.Sample(
name='network.services.vpn',
type=sample.TYPE_GAUGE,

View File

@@ -21,7 +21,6 @@ from oslo_log import log
import oslo_messaging
from stevedore import named
from ceilometer.i18n import _
from ceilometer import messaging
@@ -109,7 +108,7 @@ class NotificationService(cotyledon.Service):
@staticmethod
def _log_missing_pipeline(names):
LOG.error(_('Could not load the following pipelines: %s'), names)
LOG.error('Could not load the following pipelines: %s', names)
def run(self):
# Delay startup so workers are jittered

View File

@@ -90,7 +90,7 @@ class EventSink(base.Sink):
p.publish_events(events)
except Exception:
LOG.error("Pipeline %(pipeline)s: %(status)s "
"after error from publisher %(pub)s" %
"after error from publisher %(pub)s",
{'pipeline': self,
'status': 'Continue' if
self.multi_publish else 'Exit', 'pub': p},

View File

@@ -43,10 +43,10 @@ class SampleEndpoint(base.NotificationEndpoint):
message, self.publisher, priority, self)
with self.publisher as p:
p(list(self.build_sample(message)))
except Exception as e:
LOG.error('Fail to process notification message [%s]'
% message, exc_info=True)
raise e
except Exception:
LOG.error('Fail to process notification message [%s]',
message, exc_info=True)
raise
def build_sample(notification):
"""Build sample from provided notification."""
@@ -90,8 +90,8 @@ class SampleSink(base.Sink):
p.publish_samples(samples)
except Exception:
LOG.error("Pipeline %(pipeline)s: Continue after "
"error from publisher %(pub)s"
% {'pipeline': self, 'pub': p},
"error from publisher %(pub)s",
{'pipeline': self, 'pub': p},
exc_info=True)
@staticmethod
@@ -108,10 +108,10 @@ class SamplePipeline(base.Pipeline):
LOG.warning(
'metering data %(counter_name)s for %(resource_id)s '
'@ %(timestamp)s has no volume (volume: None), the sample will'
' be dropped'
% {'counter_name': s.name,
'resource_id': s.resource_id,
'timestamp': s.timestamp if s.timestamp else 'NO TIMESTAMP'}
' be dropped',
{'counter_name': s.name,
'resource_id': s.resource_id,
'timestamp': s.timestamp if s.timestamp else 'NO TIMESTAMP'}
)
return False
if not isinstance(volume, (int, float)):
@@ -121,12 +121,12 @@ class SamplePipeline(base.Pipeline):
LOG.warning(
'metering data %(counter_name)s for %(resource_id)s '
'@ %(timestamp)s has volume which is not a number '
'(volume: %(counter_volume)s), the sample will be dropped'
% {'counter_name': s.name,
'resource_id': s.resource_id,
'timestamp': (
s.timestamp if s.timestamp else 'NO TIMESTAMP'),
'counter_volume': volume}
'(volume: %(counter_volume)s), the sample will be dropped',
{'counter_name': s.name,
'resource_id': s.resource_id,
'timestamp': (
s.timestamp if s.timestamp else 'NO TIMESTAMP'),
'counter_volume': volume}
)
return False
return True

View File

@@ -432,8 +432,8 @@ class PollingTask:
self.resources[key].blacklist.extend(err.fail_res_list)
except Exception as err:
LOG.error(
'Continue after error from %(name)s: %(error)s'
% ({'name': pollster.name, 'error': err}),
'Continue after error from %(name)s: %(error)s',
{'name': pollster.name, 'error': err},
exc_info=True)
def _send_notification(self, samples):

View File

@@ -292,14 +292,14 @@ class GnocchiPublisher(publisher.ConfigPublisherBase):
domain_id=domain.id)
except ka_exceptions.NotFound:
LOG.warning('Filtered project [%s] not found in keystone, '
'ignoring the filter_project option' %
'ignoring the filter_project option',
self.filter_project)
self.filter_project = None
return None
except Exception:
LOG.exception('Failed to retrieve filtered project [%s].'
% self.filter_project)
LOG.exception('Failed to retrieve filtered project [%s].',
self.filter_project)
raise
self._gnocchi_project_id = project.id
LOG.debug("Filtered project [%s] found with ID [%s].",
@@ -376,7 +376,7 @@ class GnocchiPublisher(publisher.ConfigPublisherBase):
rd = self.metric_map.get(metric_name)
if rd is None:
if metric_name not in self._already_logged_metric_names:
LOG.warning("metric %s is not handled by Gnocchi" %
LOG.warning("metric %s is not handled by Gnocchi",
metric_name)
self._already_logged_metric_names.add(metric_name)
continue
@@ -623,4 +623,5 @@ class GnocchiPublisher(publisher.ConfigPublisherBase):
except Exception:
LOG.error("Fail to update the resource %s", resource,
exc_info=True)
LOG.debug('Resource {} ended at {}'.format(resource["id"], ended_at))
LOG.debug('Resource %(resource_id)s ended at %(ended_at)s',
{'resource_id': resource["id"], 'ended_at': ended_at})

View File

@@ -145,7 +145,7 @@ class HttpPublisher(publisher.ConfigPublisherBase):
self.session.mount(self.target, adapters.HTTPAdapter(**kwargs))
LOG.debug('HttpPublisher for endpoint %s is initialized!' %
LOG.debug('HttpPublisher for endpoint %s is initialized!',
self.target)
@staticmethod
@@ -153,7 +153,7 @@ class HttpPublisher(publisher.ConfigPublisherBase):
try:
return cast(params.pop(name)[-1]) if cast else params.pop(name)[-1]
except (ValueError, TypeError, KeyError):
LOG.debug('Default value %(value)s is used for %(name)s' %
LOG.debug('Default value %(value)s is used for %(name)s',
{'value': default_value, 'name': name})
return default_value
@@ -180,7 +180,7 @@ class HttpPublisher(publisher.ConfigPublisherBase):
self.target, res.status_code)
except requests.exceptions.HTTPError:
LOG.exception('Status Code: %(code)s. '
'Failed to dispatch message: %(data)s' %
'Failed to dispatch message: %(data)s',
{'code': res.status_code, 'data': data})
def publish_samples(self, samples):

View File

@@ -26,7 +26,6 @@ import oslo_messaging
from oslo_utils import excutils
from urllib import parse as urlparse
from ceilometer.i18n import _
from ceilometer import messaging
from ceilometer import publisher
from ceilometer.publisher import utils
@@ -88,8 +87,8 @@ class MessagingPublisher(publisher.ConfigPublisherBase, metaclass=abc.ABCMeta):
if self.policy in ['default', 'queue', 'drop']:
LOG.info('Publishing policy set to %s', self.policy)
else:
LOG.warning(_('Publishing policy is unknown (%s) force to '
'default'), self.policy)
LOG.warning('Publishing policy is unknown (%s) force to default',
self.policy)
self.policy = 'default'
self.retry = 1 if self.policy in ['queue', 'drop'] else None
@@ -142,8 +141,8 @@ class MessagingPublisher(publisher.ConfigPublisherBase, metaclass=abc.ABCMeta):
if queue_length > self.max_queue_length > 0:
count = queue_length - self.max_queue_length
self.local_queue = self.local_queue[count:]
LOG.warning(_("Publisher max local_queue length is exceeded, "
"dropping %d oldest samples") % count)
LOG.warning("Publisher max local_queue length is exceeded, "
"dropping %d oldest samples", count)
def _process_queue(self, queue, policy):
current_retry = 0
@@ -154,12 +153,12 @@ class MessagingPublisher(publisher.ConfigPublisherBase, metaclass=abc.ABCMeta):
except DeliveryFailure:
data = sum([len(m) for __, m in queue])
if policy == 'queue':
LOG.warning(_("Failed to publish %d datapoints, queue "
"them"), data)
LOG.warning("Failed to publish %d datapoints, queue them",
data)
return queue
elif policy == 'drop':
LOG.warning(_("Failed to publish %d datapoints, "
"dropping them"), data)
LOG.warning("Failed to publish %d datapoints, "
"dropping them", data)
return []
current_retry += 1
if current_retry >= self.max_retry:

View File

@@ -122,7 +122,7 @@ class OpentelemetryHttpPublisher(http.HttpPublisher):
return [self.get_data_points_model(
sample.timestamp, attributes, sample.volume)]
except Exception as e:
LOG.warning("Get data point error, %s" % e)
LOG.warning("Get data point error, %s", e)
return []
def get_opentelemetry_model(self, sample):

View File

@@ -22,7 +22,6 @@ from oslo_log import log
from oslo_utils import netutils
import ceilometer
from ceilometer.i18n import _
from ceilometer import publisher
from ceilometer.publisher import utils
@@ -42,17 +41,17 @@ class TCPPublisher(publisher.ConfigPublisherBase):
self.socket = socket.create_connection(self.inet_addr)
return True
except socket.gaierror:
LOG.error(_("Unable to resolv the remote %(host)s") %
LOG.error("Unable to resolv the remote %(host)s",
{'host': self.inet_addr[0],
'port': self.inet_addr[1]})
except TimeoutError:
LOG.error(_("Unable to connect to the remote endpoint "
"%(host)s:%(port)d. The connection timed out.") %
LOG.error("Unable to connect to the remote endpoint "
"%(host)s:%(port)d. The connection timed out.",
{'host': self.inet_addr[0],
'port': self.inet_addr[1]})
except ConnectionRefusedError:
LOG.error(_("Unable to connect to the remote endpoint "
"%(host)s:%(port)d. Connection refused.") %
LOG.error("Unable to connect to the remote endpoint "
"%(host)s:%(port)d. Connection refused.",
{'host': self.inet_addr[0],
'port': self.inet_addr[1]})
return False
@@ -78,15 +77,15 @@ class TCPPublisher(publisher.ConfigPublisherBase):
self.socket.send(msg_len + encoded_msg)
continue
except OSError:
LOG.warning(_("Unable to send sample over TCP, trying "
"to reconnect and resend the message"))
LOG.warning("Unable to send sample over TCP, trying "
"to reconnect and resend the message")
if self.connect_socket():
try:
self.socket.send(msg_len + encoded_msg)
continue
except OSError:
pass
LOG.error(_("Unable to reconnect and resend sample over TCP"))
LOG.error("Unable to reconnect and resend sample over TCP")
# NOTE (jokke): We do not handle exceptions in the calling code
# so raising the exception from here needs quite a bit more work.
# Same time we don't want to spam the retry messages as it's

View File

@@ -22,7 +22,6 @@ from oslo_log import log
from oslo_utils import netutils
import ceilometer
from ceilometer.i18n import _
from ceilometer import publisher
from ceilometer.publisher import utils
@@ -72,7 +71,7 @@ class UDPPublisher(publisher.ConfigPublisherBase):
self.socket.sendto(msgpack.dumps(msg, use_bin_type=True),
(self.host, self.port))
except Exception as e:
LOG.warning(_("Unable to send sample over UDP"))
LOG.warning("Unable to send sample over UDP")
LOG.exception(e)
def publish_events(self, events):

View File

@@ -168,7 +168,8 @@ class TestEventEndpoint(tests_base.BaseTestCase):
self.assertEqual(oslo_messaging.NotificationResult.REQUEUE, ret)
exception_mock = mock_logger.error
self.assertIn('Exit after error from publisher',
exception_mock.call_args_list[0][0][0])
exception_mock.call_args_list[0][0][0] %
exception_mock.call_args_list[0][0][1])
def test_message_to_event_bad_event_multi_publish(self):
@@ -188,4 +189,5 @@ class TestEventEndpoint(tests_base.BaseTestCase):
self.assertEqual(oslo_messaging.NotificationResult.HANDLED, ret)
exception_mock = mock_logger.error
self.assertIn('Continue after error from publisher',
exception_mock.call_args_list[0][0][0])
exception_mock.call_args_list[0][0][0] %
exception_mock.call_args_list[0][0][1])

View File

@@ -172,7 +172,7 @@ class TestNotifications(base.BaseTestCase):
'invalid sensor data for '
'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad-pci_riser_1_temp_(0x33): '
"missing 'Sensor Reading' in payload",
messages[0]
messages[0] % messages[1]
)
@mock.patch('ceilometer.ipmi.notifications.ironic.LOG')
@@ -188,7 +188,7 @@ class TestNotifications(base.BaseTestCase):
'invalid sensor data for '
'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad-pci_riser_1_temp_(0x33): '
'unable to parse sensor reading: some bad stuff',
messages[0]
messages[0] % messages[1]
)
@mock.patch('ceilometer.ipmi.notifications.ironic.LOG')
@@ -208,7 +208,7 @@ class TestNotifications(base.BaseTestCase):
self.assertEqual(
'invalid sensor data for missing id: missing key in payload: '
"'node_uuid'",
messages[0]
messages[0] % messages[1]
)
@mock.patch('ceilometer.ipmi.notifications.ironic.LOG')
@@ -224,5 +224,5 @@ class TestNotifications(base.BaseTestCase):
self.assertEqual(
'invalid sensor data for missing id: missing key in payload: '
"'Sensor ID'",
messages[0]
messages[0] % messages[1]
)

View File

@@ -345,7 +345,8 @@ class TestMeterProcessing(test.BaseTestCase):
self.assertEqual(2, len(self.handler.definitions))
args, kwargs = LOG.error.call_args_list[0]
self.assertEqual("Error loading meter definition: %s", args[0])
self.assertTrue(args[1].endswith("Invalid type bad_type specified"))
self.assertTrue(
str(args[1]).endswith("Invalid type bad_type specified"))
def test_jsonpath_values_parsed(self):
cfg = yaml.dump(
@@ -687,8 +688,10 @@ class TestMeterProcessing(test.BaseTestCase):
self._load_meter_def_file(cfg)
c = list(self.handler.build_sample(event))
self.assertEqual(0, len(c))
LOG.warning.assert_called_with('Only 0 fetched meters contain '
'"volume" field instead of 2.')
log_called_args = LOG.warning.call_args_list
self.assertEqual(
'Only 0 fetched meters contain "volume" field instead of 2.',
log_called_args[0][0][0] % log_called_args[0][0][1])
@mock.patch('ceilometer.meter.notifications.LOG')
def test_multi_meter_payload_invalid_short(self, LOG):
@@ -706,8 +709,10 @@ class TestMeterProcessing(test.BaseTestCase):
self._load_meter_def_file(cfg)
c = list(self.handler.build_sample(event))
self.assertEqual(0, len(c))
LOG.warning.assert_called_with('Only 1 fetched meters contain '
'"volume" field instead of 2.')
log_called_args = LOG.warning.call_args_list
self.assertEqual(
'Only 1 fetched meters contain "volume" field instead of 2.',
log_called_args[0][0][0] % log_called_args[0][0][1])
def test_arithmetic_expr_meter(self):
cfg = yaml.dump(
@@ -910,4 +915,5 @@ class TestMeterProcessing(test.BaseTestCase):
self.assertIn(s.as_dict()['name'], expected_names)
args, kwargs = LOG.error.call_args_list[0]
self.assertEqual("Error loading meter definition: %s", args[0])
self.assertTrue(args[1].endswith("Invalid type bad_type specified"))
self.assertTrue(
str(args[1]).endswith("Invalid type bad_type specified"))

View File

@@ -196,12 +196,11 @@ class BasePipelineTestCase(base.BaseTestCase, metaclass=abc.ABCMeta):
LOG.warning.assert_called_once_with(
'metering data %(counter_name)s for %(resource_id)s '
'@ %(timestamp)s has no volume (volume: %(counter_volume)s), the '
'sample will be dropped'
% {'counter_name': test_s.name,
'resource_id': test_s.resource_id,
'timestamp': test_s.timestamp,
'counter_volume': test_s.volume})
'@ %(timestamp)s has no volume (volume: None), the '
'sample will be dropped',
{'counter_name': test_s.name,
'resource_id': test_s.resource_id,
'timestamp': test_s.timestamp})
self.assertEqual(0, len(publisher.samples))
@@ -230,11 +229,11 @@ class BasePipelineTestCase(base.BaseTestCase, metaclass=abc.ABCMeta):
LOG.warning.assert_called_once_with(
'metering data %(counter_name)s for %(resource_id)s '
'@ %(timestamp)s has volume which is not a number '
'(volume: %(counter_volume)s), the sample will be dropped'
% {'counter_name': test_s.name,
'resource_id': test_s.resource_id,
'timestamp': test_s.timestamp,
'counter_volume': test_s.volume})
'(volume: %(counter_volume)s), the sample will be dropped',
{'counter_name': test_s.name,
'resource_id': test_s.resource_id,
'timestamp': test_s.timestamp,
'counter_volume': test_s.volume})
self.assertEqual(0, len(publisher.samples))

View File

@@ -1274,7 +1274,7 @@ class TestDynamicPollster(base.BaseTestCase):
samples_list.append(s)
except RuntimeError as e:
LOG.debug("Generator threw a StopIteration "
"and we need to catch it [%s]." % e)
"and we need to catch it [%s].", e)
self.assertEqual(0, len(samples_list))

View File

@@ -339,9 +339,11 @@ class PublisherTest(base.BaseTestCase):
def test_activity_gnocchi_project_not_found(self, logger):
self.ks_client.projects.find.side_effect = ka_exceptions.NotFound
self._do_test_activity_filter(2)
logger.warning.assert_called_with(
log_called_args = logger.warning.call_args_list
self.assertEqual(
'Filtered project [service] not found in keystone, ignoring the '
'filter_project option')
'filter_project option',
log_called_args[0][0][0] % log_called_args[0][0][1])
@mock.patch('ceilometer.publisher.gnocchi.GnocchiPublisher'
'._get_gnocchi_client')

View File

@@ -89,7 +89,8 @@ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,install-guide
# [H203] Use assertIs(Not)None to check for None.
# [H204] Use assert(Not)Equal to check for equality.
# [H205] Use assert(Greater|Less)(Equal) for comparison.
enable-extensions=H106,H203,H204,H205
# [H904] Delay string interpolations at logging calls.
enable-extensions=H106,H203,H204,H205,H904
show-source = True
[hacking]