Replace LOG.warn with LOG.warning

Python 3 deprecated the logger.warn method.
we prefer to use warning to avoid DeprecationWarning.
refer to:
https://docs.python.org/3/library/logging.html#logging.warning

Change-Id: Ie85ae39f69d35019c1008d35154cd717c2f8afbe
Closes-Bug: #1508442
This commit is contained in:
Chaozhe.Chen 2015-12-30 18:14:45 +08:00
parent 586a503cb7
commit 7cfda25c10
18 changed files with 62 additions and 61 deletions

View File

@ -116,7 +116,7 @@ class CollectorService(os_service.Service):
try: try:
sample = msgpack.loads(data, encoding='utf-8') sample = msgpack.loads(data, encoding='utf-8')
except Exception: except Exception:
LOG.warn(_("UDP: Cannot decode data sent by %s"), source) LOG.warning(_("UDP: Cannot decode data sent by %s"), source)
else: else:
try: try:
LOG.debug("UDP: Storing %s", sample) LOG.debug("UDP: Storing %s", sample)

View File

@ -213,7 +213,7 @@ class GnocchiDispatcher(dispatcher.MeterDispatcherBase):
except ImportError: except ImportError:
pass pass
except oslo_cache.exception.ConfigurationError as exc: except oslo_cache.exception.ConfigurationError as exc:
LOG.warn(_LW('unable to configure oslo_cache: %s') % exc) LOG.warning(_LW('unable to configure oslo_cache: %s') % exc)
self._gnocchi_project_id = None self._gnocchi_project_id = None
self._gnocchi_project_id_lock = threading.Lock() self._gnocchi_project_id_lock = threading.Lock()
@ -312,8 +312,8 @@ class GnocchiDispatcher(dispatcher.MeterDispatcherBase):
samples = list(samples) samples = list(samples)
rd = self._get_resource_definition(metric_name) rd = self._get_resource_definition(metric_name)
if rd is None: if rd is None:
LOG.warn("metric %s is not handled by gnocchi" % LOG.warning("metric %s is not handled by gnocchi" %
metric_name) metric_name)
continue continue
if rd.cfg.get("ignore"): if rd.cfg.get("ignore"):
continue continue

View File

@ -151,7 +151,7 @@ class SensorNotification(plugin_base.NotificationBase):
project_id=info['project_id']) project_id=info['project_id'])
except InvalidSensorData as exc: except InvalidSensorData as exc:
LOG.warn( LOG.warning(
'invalid sensor data for %(resource)s: %(error)s' % 'invalid sensor data for %(resource)s: %(error)s' %
dict(resource=resource_id, error=exc) dict(resource=resource_id, error=exc)
) )

View File

@ -116,10 +116,10 @@ def register_keystoneauth_opts(conf):
def setup_keystoneauth(conf): def setup_keystoneauth(conf):
if conf[CFG_GROUP].auth_type == "password-ceilometer-legacy": if conf[CFG_GROUP].auth_type == "password-ceilometer-legacy":
LOG.warn("Value 'password-ceilometer-legacy' for '[%s]/auth_type' " LOG.warning("Value 'password-ceilometer-legacy' for '[%s]/auth_type' "
"is deprecated. And will be removed in Ceilometer 7.0. " "is deprecated. And will be removed in Ceilometer 7.0. "
"Use 'password' instead.", "Use 'password' instead.",
CFG_GROUP) CFG_GROUP)
ka_loading.load_auth_from_conf_options(conf, CFG_GROUP) ka_loading.load_auth_from_conf_options(conf, CFG_GROUP)

View File

@ -45,9 +45,9 @@ class FirewallPollster(base.BaseServicesPollster):
status = self.get_status_id(fw['status']) status = self.get_status_id(fw['status'])
if status == -1: if status == -1:
# unknown status, skip this sample # unknown status, skip this sample
LOG.warn(_("Unknown status %(stat)s received on fw %(id)s," LOG.warning(_("Unknown status %(stat)s received on fw %(id)s,"
"skipping sample") % {'stat': fw['status'], "skipping sample") % {'stat': fw['status'],
'id': fw['id']}) 'id': fw['id']})
continue continue
yield sample.Sample( yield sample.Sample(

View File

@ -60,9 +60,9 @@ class LBPoolPollster(base.BaseServicesPollster):
status = self.get_status_id(pool['status']) status = self.get_status_id(pool['status'])
if status == -1: if status == -1:
# unknown status, skip this sample # unknown status, skip this sample
LOG.warn(_("Unknown status %(stat)s received on pool %(id)s, " LOG.warning(_("Unknown status %(stat)s received on pool "
"skipping sample") % {'stat': pool['status'], "%(id)s, skipping sample")
'id': pool['id']}) % {'stat': pool['status'], 'id': pool['id']})
continue continue
yield sample.Sample( yield sample.Sample(
@ -108,9 +108,9 @@ class LBVipPollster(base.BaseServicesPollster):
status = self.get_status_id(vip['status']) status = self.get_status_id(vip['status'])
if status == -1: if status == -1:
# unknown status, skip this sample # unknown status, skip this sample
LOG.warn(_("Unknown status %(stat)s received on vip %(id)s, " LOG.warning(_("Unknown status %(stat)s received on vip "
"skipping sample") % {'stat': vip['status'], "%(id)s, skipping sample")
'id': vip['id']}) % {'stat': vip['status'], 'id': vip['id']})
continue continue
yield sample.Sample( yield sample.Sample(
@ -149,9 +149,9 @@ class LBMemberPollster(base.BaseServicesPollster):
LOG.debug("Load Balancer Member : %s" % member) LOG.debug("Load Balancer Member : %s" % member)
status = self.get_status_id(member['status']) status = self.get_status_id(member['status'])
if status == -1: if status == -1:
LOG.warn(_("Unknown status %(stat)s received on member %(id)s," LOG.warning(_("Unknown status %(stat)s received on member "
"skipping sample") % {'stat': member['status'], "%(id)s, skipping sample")
'id': member['id']}) % {'stat': member['status'], 'id': member['id']})
continue continue
yield sample.Sample( yield sample.Sample(
name='network.services.lb.member', name='network.services.lb.member',

View File

@ -46,9 +46,9 @@ class VPNServicesPollster(base.BaseServicesPollster):
status = self.get_status_id(vpn['status']) status = self.get_status_id(vpn['status'])
if status == -1: if status == -1:
# unknown status, skip this sample # unknown status, skip this sample
LOG.warn(_("Unknown status %(stat)s received on vpn %(id)s," LOG.warning(_("Unknown status %(stat)s received on vpn "
"skipping sample") % {'stat': vpn['status'], "%(id)s, skipping sample")
'id': vpn['id']}) % {'stat': vpn['status'], 'id': vpn['id']})
continue continue
yield sample.Sample( yield sample.Sample(

View File

@ -41,9 +41,9 @@ def logged(func):
return func(*args, **kwargs) return func(*args, **kwargs)
except exceptions.NeutronClientException as e: except exceptions.NeutronClientException as e:
if e.status_code == 404: if e.status_code == 404:
LOG.warn("The resource could not be found.") LOG.warning("The resource could not be found.")
else: else:
LOG.warn(e) LOG.warning(e)
return [] return []
except Exception as e: except Exception as e:
LOG.exception(e) LOG.exception(e)

View File

@ -92,8 +92,8 @@ class MessagingPublisher(publisher.PublisherBase):
if self.policy in ['default', 'queue', 'drop']: if self.policy in ['default', 'queue', 'drop']:
LOG.info(_LI('Publishing policy set to %s') % self.policy) LOG.info(_LI('Publishing policy set to %s') % self.policy)
else: else:
LOG.warn(_('Publishing policy is unknown (%s) force to default') LOG.warning(_('Publishing policy is unknown (%s) force to '
% self.policy) 'default') % self.policy)
self.policy = 'default' self.policy = 'default'
self.retry = 1 if self.policy in ['queue', 'drop'] else None self.retry = 1 if self.policy in ['queue', 'drop'] else None
@ -144,8 +144,8 @@ class MessagingPublisher(publisher.PublisherBase):
if queue_length > self.max_queue_length > 0: if queue_length > self.max_queue_length > 0:
count = queue_length - self.max_queue_length count = queue_length - self.max_queue_length
self.local_queue = self.local_queue[count:] self.local_queue = self.local_queue[count:]
LOG.warn(_("Publisher max local_queue length is exceeded, " LOG.warning(_("Publisher max local_queue length is exceeded, "
"dropping %d oldest samples") % count) "dropping %d oldest samples") % count)
def _process_queue(self, queue, policy): def _process_queue(self, queue, policy):
current_retry = 0 current_retry = 0
@ -156,12 +156,12 @@ class MessagingPublisher(publisher.PublisherBase):
except DeliveryFailure: except DeliveryFailure:
data = sum([len(m) for __, __, m in queue]) data = sum([len(m) for __, __, m in queue])
if policy == 'queue': if policy == 'queue':
LOG.warn(_("Failed to publish %d datapoints, queue them"), LOG.warning(_("Failed to publish %d datapoints, queue "
data) "them"), data)
return queue return queue
elif policy == 'drop': elif policy == 'drop':
LOG.warn(_("Failed to publish %d datapoints, " LOG.warning(_("Failed to publish %d datapoints, "
"dropping them"), data) "dropping them"), data)
return [] return []
current_retry += 1 current_retry += 1
if current_retry >= self.max_retry: if current_retry >= self.max_retry:

View File

@ -64,7 +64,7 @@ class UDPPublisher(publisher.PublisherBase):
self.socket.sendto(msgpack.dumps(msg), self.socket.sendto(msgpack.dumps(msg),
(self.host, self.port)) (self.host, self.port))
except Exception as e: except Exception as e:
LOG.warn(_("Unable to send sample over UDP")) LOG.warning(_("Unable to send sample over UDP"))
LOG.exception(e) LOG.exception(e)
def publish_events(self, context, events): def publish_events(self, context, events):

View File

@ -471,9 +471,9 @@ def create_tables(conn, tables, column_families):
separator=conn.table_prefix_separator, separator=conn.table_prefix_separator,
table_name=table)) table_name=table))
LOG.warn(_("Cannot create table %(table_name)s " LOG.warning(_("Cannot create table %(table_name)s "
"it already exists. Ignoring error") "it already exists. Ignoring error")
% {'table_name': table}) % {'table_name': table})
def quote(s, *args): def quote(s, *args):

View File

@ -307,8 +307,9 @@ class Connection(base.Connection):
{'id': internal_id, 'meta_key': key, {'id': internal_id, 'meta_key': key,
'value': v}) 'value': v})
except KeyError: except KeyError:
LOG.warn(_("Unknown metadata type. Key (%s) " LOG.warning(_("Unknown metadata type. Key "
"will not be queryable."), key) "(%s) will not be queryable."),
key)
for _model in meta_map.keys(): for _model in meta_map.keys():
conn.execute(_model.__table__.insert(), conn.execute(_model.__table__.insert(),
meta_map[_model]) meta_map[_model])

View File

@ -271,8 +271,8 @@ class ConnectionPool(object):
try: try:
return MongoProxy(pymongo.MongoClient(url)) return MongoProxy(pymongo.MongoClient(url))
except pymongo.errors.ConnectionFailure as e: except pymongo.errors.ConnectionFailure as e:
LOG.warn(_('Unable to connect to the database server: ' LOG.warning(_('Unable to connect to the database server: '
'%(errmsg)s.') % {'errmsg': e}) '%(errmsg)s.') % {'errmsg': e})
raise raise
@ -414,10 +414,10 @@ def safe_mongo_call(call):
'after %(retries)d retries. Giving up.') % 'after %(retries)d retries. Giving up.') %
{'retries': max_retries}) {'retries': max_retries})
raise raise
LOG.warn(_('Unable to reconnect to the primary mongodb: ' LOG.warning(_('Unable to reconnect to the primary '
'%(errmsg)s. Trying again in %(retry_interval)d ' 'mongodb: %(errmsg)s. Trying again in '
'seconds.') % '%(retry_interval)d seconds.') %
{'errmsg': err, 'retry_interval': retry_interval}) {'errmsg': err, 'retry_interval': retry_interval})
attempts += 1 attempts += 1
time.sleep(retry_interval) time.sleep(retry_interval)
return closure return closure

View File

@ -1157,7 +1157,7 @@ class BasePipelineTestCase(base.BaseTestCase):
cpu_util_sample = publisher.samples[0] cpu_util_sample = publisher.samples[0]
self.assertEqual(12.5, cpu_util_sample.volume) self.assertEqual(12.5, cpu_util_sample.volume)
the_log.warn.assert_called_with( the_log.warning.assert_called_with(
'dropping out of time order sample: %s', 'dropping out of time order sample: %s',
(counters[1],) (counters[1],)
) )
@ -1523,7 +1523,7 @@ class BasePipelineTestCase(base.BaseTestCase):
'target': {'name': 'aggregated-bytes'} 'target': {'name': 'aggregated-bytes'}
}, expected_length=1) }, expected_length=1)
s = samples[0] s = samples[0]
self.assertTrue(mylog.warn.called) self.assertTrue(mylog.warning.called)
self.assertEqual('aggregated-bytes', s.name) self.assertEqual('aggregated-bytes', s.name)
self.assertEqual(154, s.volume) self.assertEqual(154, s.volume)
self.assertEqual('test_user_bis', s.user_id) self.assertEqual('test_user_bis', s.user_id)

View File

@ -150,7 +150,7 @@ class TestNotifications(base.BaseTestCase):
processor = ipmi.TemperatureSensorNotification(None) processor = ipmi.TemperatureSensorNotification(None)
messages = [] messages = []
mylog.warn = lambda *args: messages.extend(args) mylog.warning = lambda *args: messages.extend(args)
list(processor.process_notification(ipmi_test_data.MISSING_SENSOR)) list(processor.process_notification(ipmi_test_data.MISSING_SENSOR))
@ -166,7 +166,7 @@ class TestNotifications(base.BaseTestCase):
processor = ipmi.TemperatureSensorNotification(None) processor = ipmi.TemperatureSensorNotification(None)
messages = [] messages = []
mylog.warn = lambda *args: messages.extend(args) mylog.warning = lambda *args: messages.extend(args)
list(processor.process_notification(ipmi_test_data.BAD_SENSOR)) list(processor.process_notification(ipmi_test_data.BAD_SENSOR))
@ -187,7 +187,7 @@ class TestNotifications(base.BaseTestCase):
processor = ipmi.TemperatureSensorNotification(None) processor = ipmi.TemperatureSensorNotification(None)
messages = [] messages = []
mylog.warn = lambda *args: messages.extend(args) mylog.warning = lambda *args: messages.extend(args)
list(processor.process_notification(ipmi_test_data.NO_NODE_ID)) list(processor.process_notification(ipmi_test_data.NO_NODE_ID))
@ -203,7 +203,7 @@ class TestNotifications(base.BaseTestCase):
processor = ipmi.TemperatureSensorNotification(None) processor = ipmi.TemperatureSensorNotification(None)
messages = [] messages = []
mylog.warn = lambda *args: messages.extend(args) mylog.warning = lambda *args: messages.extend(args)
list(processor.process_notification(ipmi_test_data.NO_SENSOR_ID)) list(processor.process_notification(ipmi_test_data.NO_SENSOR_ID))

View File

@ -185,7 +185,7 @@ class TestPublisherPolicy(TestPublisher):
msg_publisher.DeliveryFailure, msg_publisher.DeliveryFailure,
getattr(publisher, self.pub_func), getattr(publisher, self.pub_func),
mock.MagicMock(), self.test_data) mock.MagicMock(), self.test_data)
self.assertTrue(mylog.warn.called) self.assertTrue(mylog.warning.called)
self.assertEqual('default', publisher.policy) self.assertEqual('default', publisher.policy)
self.assertEqual(0, len(publisher.local_queue)) self.assertEqual(0, len(publisher.local_queue))
fake_send.assert_called_once_with( fake_send.assert_called_once_with(

View File

@ -54,8 +54,8 @@ class ArithmeticTransformer(transformer.TransformerBase):
self.cache = collections.defaultdict(dict) self.cache = collections.defaultdict(dict)
self.latest_timestamp = None self.latest_timestamp = None
else: else:
LOG.warn(_('Arithmetic transformer must use at least one' LOG.warning(_('Arithmetic transformer must use at least one'
' meter in expression \'%s\''), self.expr) ' meter in expression \'%s\''), self.expr)
def _update_cache(self, _sample): def _update_cache(self, _sample):
"""Update the cache with the latest sample.""" """Update the cache with the latest sample."""
@ -92,8 +92,8 @@ class ArithmeticTransformer(transformer.TransformerBase):
resource_metadata=reference_sample.resource_metadata resource_metadata=reference_sample.resource_metadata
) )
except Exception as e: except Exception as e:
LOG.warn(_('Unable to evaluate expression %(expr)s: %(exc)s'), LOG.warning(_('Unable to evaluate expression %(expr)s: %(exc)s'),
{'expr': self.expr, 'exc': e}) {'expr': self.expr, 'exc': e})
def handle_sample(self, context, _sample): def handle_sample(self, context, _sample):
self._update_cache(_sample) self._update_cache(_sample)

View File

@ -195,7 +195,7 @@ class RateOfChangeTransformer(ScalingTransformer):
time_delta = timeutils.delta_seconds(prev_timestamp, timestamp) time_delta = timeutils.delta_seconds(prev_timestamp, timestamp)
# disallow violations of the arrow of time # disallow violations of the arrow of time
if time_delta < 0: if time_delta < 0:
LOG.warn(_('dropping out of time order sample: %s'), (s,)) LOG.warning(_('dropping out of time order sample: %s'), (s,))
# Reset the cache to the newer sample. # Reset the cache to the newer sample.
self.cache[key] = prev self.cache[key] = prev
return None return None
@ -213,8 +213,8 @@ class RateOfChangeTransformer(ScalingTransformer):
s = self._convert(s, rate_of_change) s = self._convert(s, rate_of_change)
LOG.debug('converted to: %s', s) LOG.debug('converted to: %s', s)
else: else:
LOG.warn(_('dropping sample with no predecessor: %s'), LOG.warning(_('dropping sample with no predecessor: %s'),
(s,)) (s,))
s = None s = None
return s return s
@ -262,7 +262,7 @@ class AggregatorTransformer(ScalingTransformer):
drop = ['drop'] if is_droppable else [] drop = ['drop'] if is_droppable else []
if value or mandatory: if value or mandatory:
if value not in ['last', 'first'] + drop: if value not in ['last', 'first'] + drop:
LOG.warn('%s is unknown (%s), using last' % (name, value)) LOG.warning('%s is unknown (%s), using last' % (name, value))
value = 'last' value = 'last'
self.merged_attribute_policy[name] = value self.merged_attribute_policy[name] = value
else: else: