From a6200c0a006be977cb23158050df6613bbef86ed Mon Sep 17 00:00:00 2001 From: Christian Berendt Date: Fri, 9 May 2014 17:12:44 +0200 Subject: [PATCH] debug level logs should not be translated According to the OpenStack translation policy available at https://wiki.openstack.org/wiki/LoggingStandards debug messages should not be translated. Like mentioned in several changes in Nova by garyk this is to help prioritize log translation. Change-Id: I4af4a45a56b1364a2f5196b75cff299d607ab393 Partial-Bug: #1317950 --- oslo/messaging/_drivers/amqp.py | 6 ++--- oslo/messaging/_drivers/impl_qpid.py | 4 +-- oslo/messaging/_drivers/impl_rabbit.py | 2 +- oslo/messaging/_drivers/impl_zmq.py | 36 +++++++++++++------------- oslo/messaging/notify/_impl_routing.py | 2 +- 5 files changed, 25 insertions(+), 25 deletions(-) diff --git a/oslo/messaging/_drivers/amqp.py b/oslo/messaging/_drivers/amqp.py index b7ec5945a..325c3609a 100644 --- a/oslo/messaging/_drivers/amqp.py +++ b/oslo/messaging/_drivers/amqp.py @@ -68,7 +68,7 @@ class ConnectionPool(pool.Pool): # TODO(comstud): Timeout connections not used in a while def create(self): - LOG.debug(_('Pool creating new connection')) + LOG.debug('Pool creating new connection') return self.connection_cls(self.conf, self.url) def empty(self): @@ -193,7 +193,7 @@ def unpack_context(conf, msg): context_dict['reply_q'] = msg.pop('_reply_q', None) context_dict['conf'] = conf ctx = RpcContext.from_dict(context_dict) - rpc_common._safe_log(LOG.debug, _('unpacked context: %s'), ctx.to_dict()) + rpc_common._safe_log(LOG.debug, 'unpacked context: %s', ctx.to_dict()) return ctx @@ -248,7 +248,7 @@ def _add_unique_id(msg): """Add unique_id for checking duplicate messages.""" unique_id = uuid.uuid4().hex msg.update({UNIQUE_ID: unique_id}) - LOG.debug(_('UNIQUE_ID is %s.') % (unique_id)) + LOG.debug('UNIQUE_ID is %s.' % (unique_id)) def get_control_exchange(conf): diff --git a/oslo/messaging/_drivers/impl_qpid.py b/oslo/messaging/_drivers/impl_qpid.py index c656b517e..10fd7207c 100644 --- a/oslo/messaging/_drivers/impl_qpid.py +++ b/oslo/messaging/_drivers/impl_qpid.py @@ -550,7 +550,7 @@ class Connection(object): consumer.reconnect(self.session) self._register_consumer(consumer) - LOG.debug(_("Re-established AMQP queues")) + LOG.debug("Re-established AMQP queues") def ensure(self, error_callback, method, *args, **kwargs): while True: @@ -600,7 +600,7 @@ class Connection(object): def _error_callback(exc): if isinstance(exc, qpid_exceptions.Empty): - LOG.debug(_('Timed out waiting for RPC response: %s') % exc) + LOG.debug('Timed out waiting for RPC response: %s' % exc) raise rpc_common.Timeout() else: LOG.exception(_('Failed to consume message from queue: %s') % diff --git a/oslo/messaging/_drivers/impl_rabbit.py b/oslo/messaging/_drivers/impl_rabbit.py index fea87860c..29d101a07 100644 --- a/oslo/messaging/_drivers/impl_rabbit.py +++ b/oslo/messaging/_drivers/impl_rabbit.py @@ -700,7 +700,7 @@ class Connection(object): def _error_callback(exc): if isinstance(exc, socket.timeout): - LOG.debug(_('Timed out waiting for RPC response: %s') % exc) + LOG.debug('Timed out waiting for RPC response: %s' % exc) raise rpc_common.Timeout() else: LOG.exception(_('Failed to consume message from queue: %s') % diff --git a/oslo/messaging/_drivers/impl_zmq.py b/oslo/messaging/_drivers/impl_zmq.py index cb4d06b82..b8be8251e 100644 --- a/oslo/messaging/_drivers/impl_zmq.py +++ b/oslo/messaging/_drivers/impl_zmq.py @@ -107,7 +107,7 @@ def _serialize(data): def _deserialize(data): """Deserialization wrapper.""" - LOG.debug(_("Deserializing: %s"), data) + LOG.debug("Deserializing: %s", data) return jsonutils.loads(data) @@ -142,9 +142,9 @@ class ZmqSocket(object): str_data = {'addr': addr, 'type': self.socket_s(), 'subscribe': subscribe, 'bind': bind} - LOG.debug(_("Connecting to %(addr)s with %(type)s"), str_data) - LOG.debug(_("-> Subscribed to %(subscribe)s"), str_data) - LOG.debug(_("-> bind: %(bind)s"), str_data) + LOG.debug("Connecting to %(addr)s with %(type)s", str_data) + LOG.debug("-> Subscribed to %(subscribe)s", str_data) + LOG.debug("-> bind: %(bind)s", str_data) try: if bind: @@ -164,7 +164,7 @@ class ZmqSocket(object): """Subscribe.""" if not self.can_sub: raise RPCException("Cannot subscribe on this socket.") - LOG.debug(_("Subscribing to %s"), msg_filter) + LOG.debug("Subscribing to %s", msg_filter) try: self.sock.setsockopt(zmq.SUBSCRIBE, msg_filter) @@ -273,7 +273,7 @@ class InternalContext(object): def _get_response(self, ctx, proxy, topic, data): """Process a curried message and cast the result to topic.""" - LOG.debug(_("Running func with context: %s"), ctx.to_dict()) + LOG.debug("Running func with context: %s", ctx.to_dict()) data.setdefault('version', None) data.setdefault('args', {}) @@ -286,7 +286,7 @@ class InternalContext(object): # ignore these since they are just from shutdowns pass except rpc_common.ClientException as e: - LOG.debug(_("Expected exception during message handling (%s)") % + LOG.debug("Expected exception during message handling (%s)" % e._exc_info[1]) return {'exc': rpc_common.serialize_remote_exception(e._exc_info, @@ -311,7 +311,7 @@ class InternalContext(object): self._get_response(ctx, proxy, topic, payload), ctx.replies) - LOG.debug(_("Sending reply")) + LOG.debug("Sending reply") _multi_send(_cast, ctx, topic, { 'method': '-process_reply', 'args': { @@ -549,7 +549,7 @@ class ZmqReactor(ZmqBaseReactor): def consume(self, sock): #TODO(ewindisch): use zero-copy (i.e. references, not copying) data = sock.recv() - LOG.debug(_("CONSUMER RECEIVED DATA: %s"), data) + LOG.debug("CONSUMER RECEIVED DATA: %s", data) proxy = self.proxies[sock] @@ -603,7 +603,7 @@ class Connection(rpc_common.Connection): inaddr = "ipc://%s/zmq_topic_%s" % \ (CONF.rpc_zmq_ipc_dir, topic) - LOG.debug(_("Consumer is a zmq.%s"), + LOG.debug("Consumer is a zmq.%s", ['PULL', 'SUB'][sock_type == zmq.SUB]) self.reactor.register(proxy, inaddr, sock_type, @@ -655,7 +655,7 @@ def _call(addr, context, topic, msg, timeout=None, # Replies always come into the reply service. reply_topic = "zmq_replies.%s" % CONF.rpc_zmq_host - LOG.debug(_("Creating payload")) + LOG.debug("Creating payload") # Curry the original request into a reply method. mcontext = RpcContext.marshal(context) payload = { @@ -668,7 +668,7 @@ def _call(addr, context, topic, msg, timeout=None, } } - LOG.debug(_("Creating queue socket for reply waiter")) + LOG.debug("Creating queue socket for reply waiter") # Messages arriving async. # TODO(ewindisch): have reply consumer with dynamic subscription mgmt @@ -681,14 +681,14 @@ def _call(addr, context, topic, msg, timeout=None, zmq.SUB, subscribe=msg_id, bind=False ) - LOG.debug(_("Sending cast")) + LOG.debug("Sending cast") _cast(addr, context, topic, payload, envelope=envelope) - LOG.debug(_("Cast sent; Waiting reply")) + LOG.debug("Cast sent; Waiting reply") # Blocks until receives reply msg = msg_waiter.recv() - LOG.debug(_("Received message: %s"), msg) - LOG.debug(_("Unpacking response")) + LOG.debug("Received message: %s", msg) + LOG.debug("Unpacking response") if msg[2] == 'cast': # Legacy version raw_msg = _deserialize(msg[-1])[-1] @@ -728,10 +728,10 @@ def _multi_send(method, context, topic, msg, timeout=None, Dispatches to the matchmaker and sends message to all relevant hosts. """ conf = CONF - LOG.debug(_("%(msg)s") % {'msg': ' '.join(map(pformat, (topic, msg)))}) + LOG.debug("%(msg)s" % {'msg': ' '.join(map(pformat, (topic, msg)))}) queues = _get_matchmaker().queues(topic) - LOG.debug(_("Sending message(s) to: %s"), queues) + LOG.debug("Sending message(s) to: %s", queues) # Don't stack if we have no matchmaker results if not queues: diff --git a/oslo/messaging/notify/_impl_routing.py b/oslo/messaging/notify/_impl_routing.py index 5b32cd05c..f9d6dceca 100644 --- a/oslo/messaging/notify/_impl_routing.py +++ b/oslo/messaging/notify/_impl_routing.py @@ -70,7 +70,7 @@ class RoutingDriver(notifier._Driver): for group in self.routing_groups.values(): self.used_drivers.update(group.keys()) - LOG.debug(_('loading notifiers from %(namespace)s') % + LOG.debug('loading notifiers from %(namespace)s' % {'namespace': self.NOTIFIER_PLUGIN_NAMESPACE}) self.plugin_manager = dispatch.DispatchExtensionManager( namespace=self.NOTIFIER_PLUGIN_NAMESPACE,