From 8587774d36f905661cd43c5f9c2182b51bd834a8 Mon Sep 17 00:00:00 2001 From: Doug Hellmann Date: Tue, 16 Jun 2015 09:55:00 -0400 Subject: [PATCH 01/28] Update .gitreview for feature/zmq Change-Id: I773307e72a61da6aac6adda9592abcb3c65e8e67 --- .gitreview | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitreview b/.gitreview index beb811ae3..019f90dcb 100644 --- a/.gitreview +++ b/.gitreview @@ -2,3 +2,4 @@ host=review.openstack.org port=29418 project=openstack/oslo.messaging.git +defaultbranch=feature/zmq From 76ec03c8f9fcc33cf6b407c05bea6fcbb5c74126 Mon Sep 17 00:00:00 2001 From: Doug Royal Date: Thu, 25 Jun 2015 17:43:56 -0500 Subject: [PATCH 02/28] fix typo Change-Id: I8a935ffc795b7233e2e83ae0680786d34dfd6ec0 --- doc/source/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/index.rst b/doc/source/index.rst index 775f5a818..3c609cd87 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -2,7 +2,7 @@ oslo.messaging ============== The Oslo messaging API supports RPC and notifications over a number of -different messsaging transports. +different messaging transports. Contents ======== From 73cd49129f0ce2a799938b4fec9dcd847b0a77ad Mon Sep 17 00:00:00 2001 From: Oleksii Zamiatin Date: Fri, 19 Jun 2015 14:29:18 +0300 Subject: [PATCH 03/28] Initial commit for new zmq driver implementation - Minimal RPC (CALL + direct CAST) implementation - Has up and running oslo_messaging/tests/drivers/test_impl_zmq - Pep8 fixed. - Works over REQ/REP pipeline according to [1] - Has a beginning of eventlet/threading behavior differentiation Fanout and Notifier are not yet supported Devstack not yet fixed Functional tests not yet fixed ..[1] - https://review.openstack.org/#/c/171131/ Change-Id: I44cd48070bf7c7f46152fdf0e54664a7dee97de9 --- oslo_messaging/_drivers/impl_zmq.py | 1069 +---------------- .../_drivers/zmq_driver/__init__.py | 0 .../_drivers/zmq_driver/broker/__init__.py | 1 + .../zmq_driver/broker/zmq_base_proxy.py | 92 ++ .../_drivers/zmq_driver/broker/zmq_broker.py | 71 ++ .../zmq_driver/broker/zmq_call_proxy.py | 110 ++ .../zmq_driver/broker/zmq_cast_proxy.py | 79 ++ .../_drivers/zmq_driver/notifier/__init__.py | 1 + .../_drivers/zmq_driver/poller/__init__.py | 0 .../zmq_driver/poller/green_poller.py | 111 ++ .../zmq_driver/poller/threading_poller.py | 50 + .../_drivers/zmq_driver/rpc/__init__.py | 0 .../zmq_driver/rpc/client/__init__.py | 0 .../zmq_driver/rpc/client/zmq_call_request.py | 49 + .../zmq_driver/rpc/client/zmq_cast_dealer.py | 72 ++ .../rpc/client/zmq_cast_publisher.py | 40 + .../zmq_driver/rpc/client/zmq_client.py | 33 + .../zmq_driver/rpc/client/zmq_request.py | 76 ++ .../zmq_driver/rpc/server/__init__.py | 0 .../rpc/server/zmq_base_consumer.py | 35 + .../rpc/server/zmq_call_responder.py | 96 ++ .../zmq_driver/rpc/server/zmq_server.py | 49 + .../_drivers/zmq_driver/zmq_async.py | 59 + .../_drivers/zmq_driver/zmq_context.py | 33 + .../_drivers/zmq_driver/zmq_poller.py | 48 + .../_drivers/zmq_driver/zmq_serializer.py | 54 + .../_drivers/zmq_driver/zmq_topic.py | 61 + oslo_messaging/tests/drivers/test_impl_zmq.py | 467 ++----- tests/drivers/test_impl_zmq.py | 446 ++----- 29 files changed, 1490 insertions(+), 1712 deletions(-) create mode 100644 oslo_messaging/_drivers/zmq_driver/__init__.py create mode 100644 oslo_messaging/_drivers/zmq_driver/broker/__init__.py create mode 100644 oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py create mode 100644 oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py create mode 100644 oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py create mode 100644 oslo_messaging/_drivers/zmq_driver/broker/zmq_cast_proxy.py create mode 100644 oslo_messaging/_drivers/zmq_driver/notifier/__init__.py create mode 100644 oslo_messaging/_drivers/zmq_driver/poller/__init__.py create mode 100644 oslo_messaging/_drivers/zmq_driver/poller/green_poller.py create mode 100644 oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py create mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/__init__.py create mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/client/__init__.py create mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py create mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py create mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_publisher.py create mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py create mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py create mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/server/__init__.py create mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py create mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py create mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py create mode 100644 oslo_messaging/_drivers/zmq_driver/zmq_async.py create mode 100644 oslo_messaging/_drivers/zmq_driver/zmq_context.py create mode 100644 oslo_messaging/_drivers/zmq_driver/zmq_poller.py create mode 100644 oslo_messaging/_drivers/zmq_driver/zmq_serializer.py create mode 100644 oslo_messaging/_drivers/zmq_driver/zmq_topic.py diff --git a/oslo_messaging/_drivers/impl_zmq.py b/oslo_messaging/_drivers/impl_zmq.py index f673b9c06..7357aa3e3 100644 --- a/oslo_messaging/_drivers/impl_zmq.py +++ b/oslo_messaging/_drivers/impl_zmq.py @@ -12,39 +12,20 @@ # License for the specific language governing permissions and limitations # under the License. -import collections import logging -import os import pprint -import re import socket -import sys -import threading -import types -import uuid -import eventlet -import greenlet from oslo_config import cfg -from oslo_serialization import jsonutils -from oslo_utils import excutils -from oslo_utils import importutils -import six -from six import moves -from stevedore import driver from oslo_messaging._drivers import base from oslo_messaging._drivers import common as rpc_common -from oslo_messaging._executors import base as executor_base # FIXME(markmc) -from oslo_messaging._i18n import _, _LE, _LW -from oslo_messaging._drivers import pool +from oslo_messaging._drivers.zmq_driver.rpc.client import zmq_client +from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_server +from oslo_messaging._executors import base as executor_base -zmq = importutils.try_import('eventlet.green.zmq') - -# for convenience, are not modified. pformat = pprint.pformat -Timeout = eventlet.timeout.Timeout LOG = logging.getLogger(__name__) RPCException = rpc_common.RPCException @@ -62,6 +43,11 @@ zmq_opts = [ help='MatchMaker driver.', ), + cfg.BoolOpt('rpc_zmq_all_req_rep', + default=True, + deprecated_group='DEFAULT', + help='Use REQ/REP pattern for all methods CALL/CAST/FANOUT.'), + # The following port is unassigned by IANA as of 2012-05-21 cfg.IntOpt('rpc_zmq_port', default=9501, help='ZeroMQ receiver listening port.'), @@ -87,905 +73,6 @@ zmq_opts = [ 'Only supported by impl_zmq.'), ] -CONF = cfg.CONF - -matchmaker = None # memoized matchmaker object - - -def _serialize(data): - """Serialization wrapper. - - We prefer using JSON, but it cannot encode all types. - Error if a developer passes us bad data. - """ - try: - return jsonutils.dumps(data, ensure_ascii=True) - except TypeError: - with excutils.save_and_reraise_exception(): - LOG.error(_("JSON serialization failed.")) - - -def _deserialize(data): - """Deserialization wrapper.""" - LOG.debug("Deserializing: %r", data) - return jsonutils.loads(data) - - -class ZmqSocket(object): - """A tiny wrapper around ZeroMQ. - - Simplifies the send/recv protocol and connection management. - Can be used as a Context (supports the 'with' statement). - """ - - def __init__(self, addr, zmq_type, bind=True, subscribe=None, ctxt=None): - self.ctxt = ctxt or zmq.Context(CONF.rpc_zmq_contexts) - self.sock = self.ctxt.socket(zmq_type) - - # Enable IPv6-support in libzmq. - # When IPv6 is enabled, a socket will connect to, or accept - # connections from, both IPv4 and IPv6 hosts. - try: - self.sock.ipv6 = True - except AttributeError: - # NOTE(dhellmann): Sometimes the underlying library does - # not recognize the IPV6 option. There's nothing we can - # really do in that case, so ignore the error and keep - # trying to work. - pass - - self.addr = addr - self.type = zmq_type - self.subscriptions = [] - - # Support failures on sending/receiving on wrong socket type. - self.can_recv = zmq_type in (zmq.PULL, zmq.SUB) - self.can_send = zmq_type in (zmq.PUSH, zmq.PUB) - self.can_sub = zmq_type in (zmq.SUB, ) - - # Support list, str, & None for subscribe arg (cast to list) - do_sub = { - list: subscribe, - str: [subscribe], - type(None): [] - }[type(subscribe)] - - for f in do_sub: - self.subscribe(f) - - str_data = {'addr': addr, 'type': self.socket_s(), - 'subscribe': subscribe, 'bind': bind} - - LOG.debug("Connecting to %(addr)s with %(type)s", str_data) - LOG.debug("-> Subscribed to %(subscribe)s", str_data) - LOG.debug("-> bind: %(bind)s", str_data) - - try: - if bind: - self.sock.bind(addr) - else: - self.sock.connect(addr) - except Exception: - raise RPCException(_("Could not open socket.")) - - def socket_s(self): - """Get socket type as string.""" - t_enum = ('PUSH', 'PULL', 'PUB', 'SUB', 'REP', 'REQ', 'ROUTER', - 'DEALER') - return dict(map(lambda t: (getattr(zmq, t), t), t_enum))[self.type] - - def subscribe(self, msg_filter): - """Subscribe.""" - if not self.can_sub: - raise RPCException("Cannot subscribe on this socket.") - LOG.debug("Subscribing to %s", msg_filter) - - try: - arg = msg_filter - if six.PY3: - arg = arg.encode('utf-8') - self.sock.setsockopt(zmq.SUBSCRIBE, arg) - except Exception: - return - - self.subscriptions.append(msg_filter) - - def unsubscribe(self, msg_filter): - """Unsubscribe.""" - if msg_filter not in self.subscriptions: - return - arg = msg_filter - if six.PY3: - arg = arg.encode('utf-8') - self.sock.setsockopt(zmq.UNSUBSCRIBE, arg) - self.subscriptions.remove(msg_filter) - - @property - def closed(self): - return self.sock is None or self.sock.closed - - def close(self): - if self.sock is None or self.sock.closed: - return - - # We must unsubscribe, or we'll leak descriptors. - if self.subscriptions: - for f in self.subscriptions: - try: - self.sock.setsockopt(zmq.UNSUBSCRIBE, f) - except Exception: - pass - self.subscriptions = [] - - try: - # Default is to linger - self.sock.close() - self.ctxt.term() - except Exception: - # While this is a bad thing to happen, - # it would be much worse if some of the code calling this - # were to fail. For now, lets log, and later evaluate - # if we can safely raise here. - LOG.error("ZeroMQ socket could not be closed.") - self.sock = None - - def recv(self, **kwargs): - if not self.can_recv: - raise RPCException(_("You cannot recv on this socket.")) - return self.sock.recv_multipart(**kwargs) - - def send(self, data, **kwargs): - if not self.can_send: - raise RPCException(_("You cannot send on this socket.")) - self.sock.send_multipart(data, **kwargs) - - -class ZmqClient(object): - """Client for ZMQ sockets.""" - - def __init__(self, addr, ctxt=None): - self.address = addr - self.outq = ZmqSocket(addr, zmq.PUSH, bind=False, ctxt=ctxt) - - def cast(self, msg_id, topic, data, envelope): - msg_id = msg_id or '0' - - if six.PY3: - msg_id = msg_id.encode('utf-8') - - if not envelope: - data = _serialize(data) - if six.PY3: - data = data.encode('utf-8') - data = (msg_id, topic, b'cast', data) - self.outq.send([bytes(item) for item in data]) - return - - rpc_envelope = rpc_common.serialize_msg(data[1]) - zmq_msg = moves.reduce(lambda x, y: x + y, rpc_envelope.items()) - data = (msg_id, topic, b'impl_zmq_v2', data[0]) + zmq_msg - self.outq.send([bytes(item) for item in data]) - - def close(self): - self.outq.close() - - -class ZmqClientContext(object): - """This is essentially a wrapper around ZmqClient that supports 'with'. - It can also return a new ZmqClient, or one from a pool. - - The function will also catch when an instance of this class is to be - deleted. With that we can return ZmqClients to the pool on exceptions - and so forth without making the caller be responsible for catching them. - If possible the function makes sure to return a client to the pool. - - Based on amqp.ConnectionContext. - """ - - def __init__(self, address, connection_pool=None, pooled=False): - self.connection = None - self.connection_pool = connection_pool - self.pooled = pooled - if self.pooled and self.connection_pool is not None: - self.connection = self.connection_pool.get(address) - else: - self.connection = ZmqClient(address) - - def __enter__(self): - """When with ZmqClientContext() is used, return self.""" - return self - - def _done(self): - """If the client came from a pool, clean it up and put it back. - If it did not come from a pool, close it. - """ - if self.connection: - if self.pooled and self.connection_pool is not None: - # Reset the connection so it's ready for the next caller - # to grab from the pool - self.connection_pool.put(self.connection) - else: - try: - self.connection.close() - except Exception: - pass - self.connection = None - - def __exit__(self, exc_type, exc_value, tb): - """End of 'with' statement. We're done here.""" - self._done() - - def __del__(self): - """Caller is done with this client. Make sure we cleaned up.""" - self._done() - - def close(self): - """Caller is done with this client.""" - self._done() - - def __getattr__(self, key): - """Proxy all other calls to the ZmqClient instance.""" - if self.connection: - return getattr(self.connection, key) - else: - raise rpc_common.InvalidRPCConnectionReuse() - - -class RpcContext(rpc_common.CommonRpcContext): - """Context that supports replying to a rpc.call.""" - def __init__(self, **kwargs): - self.replies = [] - super(RpcContext, self).__init__(**kwargs) - - def deepcopy(self): - values = self.to_dict() - values['replies'] = self.replies - return self.__class__(**values) - - def reply(self, reply=None, failure=None, ending=False): - if ending: - return - self.replies.append(reply) - - @classmethod - def marshal(self, ctx): - if not isinstance(ctx, dict): - ctx_data = ctx.to_dict() - else: - ctx_data = ctx - return _serialize(ctx_data) - - @classmethod - def unmarshal(self, data): - return RpcContext.from_dict(_deserialize(data)) - - -class InternalContext(object): - """Used by ConsumerBase as a private context for - methods.""" - - def __init__(self, proxy): - self.proxy = proxy - self.msg_waiter = None - - def _get_response(self, ctx, proxy, topic, data): - """Process a curried message and cast the result to topic.""" - LOG.debug("Running func with context: %s", ctx.to_dict()) - data.setdefault('version', None) - data.setdefault('args', {}) - - try: - if not data.get("method"): - raise KeyError - result = proxy.dispatch(ctx, data) - return ConsumerBase.normalize_reply(result, ctx.replies) - except greenlet.GreenletExit: - # ignore these since they are just from shutdowns - pass - except rpc_common.ClientException as e: - LOG.debug("Expected exception during message handling (%s)", - e._exc_info[1]) - return {'exc': - rpc_common.serialize_remote_exception(e._exc_info, - log_failure=False)} - except Exception: - LOG.error(_("Exception during message handling")) - return {'exc': - rpc_common.serialize_remote_exception(sys.exc_info())} - - def reply(self, driver, ctx, proxy, - msg_id=None, context=None, topic=None, msg=None): - """Reply to a casted call.""" - # NOTE(ewindisch): context kwarg exists for Grizzly compat. - # this may be able to be removed earlier than - # 'I' if ConsumerBase.process were refactored. - if type(msg) is list: - payload = msg[-1] - else: - payload = msg - - response = ConsumerBase.normalize_reply( - self._get_response(ctx, proxy, topic, payload), - ctx.replies) - - LOG.debug("Sending reply") - _multi_send(driver, _cast, ctx, topic, { - 'method': '-process_reply', - 'args': { - 'msg_id': msg_id, # Include for Folsom compat. - 'response': response - } - }, _msg_id=msg_id, pooled=True) - - -class ConsumerBase(object): - """Base Consumer.""" - - def __init__(self, driver): - self.driver = driver - self.private_ctx = InternalContext(None) - - @classmethod - def normalize_reply(self, result, replies): - # TODO(ewindisch): re-evaluate and document this method. - if isinstance(result, types.GeneratorType): - return list(result) - elif replies: - return replies - else: - return [result] - - def process(self, proxy, ctx, data): - data.setdefault('version', None) - data.setdefault('args', {}) - - # Method starting with - are - # processed internally. (non-valid method name) - method = data.get('method') - # Internal method - # uses internal context for safety. - if method == '-reply': - self.private_ctx.reply(self.driver, ctx, proxy, **data['args']) - return - - proxy.dispatch(ctx, data) - - -class ZmqBaseReactor(ConsumerBase): - """A consumer class implementing a centralized casting broker (PULL-PUSH). - - Used for RoundRobin requests. - """ - - def __init__(self, conf, driver=None): - super(ZmqBaseReactor, self).__init__(driver) - - self.driver = driver - self.proxies = {} - self.threads = [] - self.sockets = [] - self.subscribe = {} - - self.pool = eventlet.greenpool.GreenPool(conf.rpc_thread_pool_size) - - def register(self, proxy, in_addr, zmq_type_in, - in_bind=True, subscribe=None): - - LOG.info(_("Registering reactor")) - - if zmq_type_in not in (zmq.PULL, zmq.SUB): - raise RPCException("Bad input socktype") - - # Items push in. - inq = ZmqSocket(in_addr, zmq_type_in, bind=in_bind, - subscribe=subscribe) - - self.proxies[inq] = proxy - self.sockets.append(inq) - - LOG.info(_("In reactor registered")) - - def consume_in_thread(self): - def _consume(sock): - LOG.info(_("Consuming socket")) - while not sock.closed: - self.consume(sock) - - for k in self.proxies.keys(): - self.threads.append( - self.pool.spawn(_consume, k) - ) - - def wait(self): - for t in self.threads: - t.wait() - - def close(self): - for t in self.threads: - t.kill() - - for s in self.sockets: - s.close() - - -class ZmqProxy(ZmqBaseReactor): - """A consumer class implementing a topic-based proxy. - - Forwards to IPC sockets. - """ - - def __init__(self, conf): - super(ZmqProxy, self).__init__(conf) - pathsep = set((os.path.sep or '', os.path.altsep or '', '/', '\\')) - self.badchars = re.compile(r'[%s]' % re.escape(''.join(pathsep))) - - self.topic_proxy = {} - - def consume(self, sock): - ipc_dir = CONF.rpc_zmq_ipc_dir - - data = sock.recv(copy=False) - topic = data[1].bytes - if six.PY3: - topic = topic.decode('utf-8') - - if topic.startswith('fanout~'): - sock_type = zmq.PUB - topic = topic.split('.', 1)[0] - elif topic.startswith('zmq_replies'): - sock_type = zmq.PUB - else: - sock_type = zmq.PUSH - - if topic not in self.topic_proxy: - def publisher(waiter): - LOG.info(_("Creating proxy for topic: %s"), topic) - - try: - # The topic is received over the network, - # don't trust this input. - if self.badchars.search(topic) is not None: - emsg = _("Topic contained dangerous characters.") - LOG.warn(emsg) - raise RPCException(emsg) - - out_sock = ZmqSocket("ipc://%s/zmq_topic_%s" % - (ipc_dir, topic), - sock_type, bind=True) - except RPCException: - waiter.send_exception(*sys.exc_info()) - return - - self.topic_proxy[topic] = eventlet.queue.LightQueue( - CONF.rpc_zmq_topic_backlog) - self.sockets.append(out_sock) - - # It takes some time for a pub socket to open, - # before we can have any faith in doing a send() to it. - if sock_type == zmq.PUB: - eventlet.sleep(.5) - - waiter.send(True) - - while(True): - data = self.topic_proxy[topic].get() - out_sock.send(data, copy=False) - - wait_sock_creation = eventlet.event.Event() - eventlet.spawn(publisher, wait_sock_creation) - - try: - wait_sock_creation.wait() - except RPCException: - LOG.error(_("Topic socket file creation failed.")) - return - - try: - self.topic_proxy[topic].put_nowait(data) - except eventlet.queue.Full: - LOG.error(_("Local per-topic backlog buffer full for topic " - "%s. Dropping message."), topic) - - def consume_in_thread(self): - """Runs the ZmqProxy service.""" - ipc_dir = CONF.rpc_zmq_ipc_dir - consume_in = "tcp://%s:%s" % \ - (CONF.rpc_zmq_bind_address, - CONF.rpc_zmq_port) - consumption_proxy = InternalContext(None) - - try: - os.makedirs(ipc_dir) - except os.error: - if not os.path.isdir(ipc_dir): - with excutils.save_and_reraise_exception(): - LOG.error(_("Required IPC directory does not exist at" - " %s"), ipc_dir) - try: - self.register(consumption_proxy, - consume_in, - zmq.PULL) - except zmq.ZMQError: - if os.access(ipc_dir, os.X_OK): - with excutils.save_and_reraise_exception(): - LOG.error(_("Permission denied to IPC directory at" - " %s"), ipc_dir) - with excutils.save_and_reraise_exception(): - LOG.error(_("Could not create ZeroMQ receiver daemon. " - "Socket may already be in use.")) - - super(ZmqProxy, self).consume_in_thread() - - -def unflatten_envelope(packenv): - """Unflattens the RPC envelope. - - Takes a list and returns a dictionary. - i.e. [1,2,3,4] => {1: 2, 3: 4} - """ - i = iter(packenv) - h = {} - try: - while True: - k = six.next(i) - h[k] = six.next(i) - except StopIteration: - return h - - -class ZmqReactor(ZmqBaseReactor): - """A consumer class implementing a consumer for messages. - - Can also be used as a 1:1 proxy - """ - - def __init__(self, conf, driver): - super(ZmqReactor, self).__init__(conf, driver) - - def consume(self, sock): - # TODO(ewindisch): use zero-copy (i.e. references, not copying) - data = sock.recv() - LOG.debug("CONSUMER RECEIVED DATA: %s", data) - - proxy = self.proxies[sock] - - if data[2] == b'cast': # Legacy protocol - packenv = data[3] - - ctx, msg = _deserialize(packenv) - request = rpc_common.deserialize_msg(msg) - ctx = RpcContext.unmarshal(ctx) - elif data[2] == b'impl_zmq_v2': - packenv = data[4:] - - msg = unflatten_envelope(packenv) - request = rpc_common.deserialize_msg(msg) - - # Unmarshal only after verifying the message. - ctx = RpcContext.unmarshal(data[3]) - else: - LOG.error(_("ZMQ Envelope version unsupported or unknown.")) - return - - self.pool.spawn_n(self.process, proxy, ctx, request) - - -class Connection(rpc_common.Connection): - """Manages connections and threads.""" - - def __init__(self, conf, driver): - self.topics = [] - self.reactor = ZmqReactor(conf, driver) - - def create_consumer(self, topic, proxy, fanout=False): - # Register with matchmaker. - _get_matchmaker().register(topic, CONF.rpc_zmq_host) - - # Subscription scenarios - if fanout: - sock_type = zmq.SUB - subscribe = ('', fanout)[type(fanout) == str] - topic = 'fanout~' + topic.split('.', 1)[0] - else: - sock_type = zmq.PULL - subscribe = None - topic = '.'.join((topic.split('.', 1)[0], CONF.rpc_zmq_host)) - - if topic in self.topics: - LOG.info(_("Skipping topic registration. Already registered.")) - return - - # Receive messages from (local) proxy - inaddr = "ipc://%s/zmq_topic_%s" % \ - (CONF.rpc_zmq_ipc_dir, topic) - - LOG.debug("Consumer is a zmq.%s", - ['PULL', 'SUB'][sock_type == zmq.SUB]) - - self.reactor.register(proxy, inaddr, sock_type, - subscribe=subscribe, in_bind=False) - self.topics.append(topic) - - def close(self): - mm = _get_matchmaker() - mm.stop_heartbeat() - for topic in self.topics: - try: - mm.unregister(topic, CONF.rpc_zmq_host) - except Exception as err: - LOG.error(_LE('Unable to unregister topic %(topic)s' - ' from matchmaker: %(err)s') % - {'topic': topic, 'err': err}) - - self.reactor.close() - self.topics = [] - - def wait(self): - self.reactor.wait() - - def consume_in_thread(self): - _get_matchmaker().start_heartbeat() - self.reactor.consume_in_thread() - - -def _cast(driver, addr, context, topic, msg, timeout=None, envelope=False, - _msg_id=None, allowed_remote_exmods=None, pooled=False): - allowed_remote_exmods = allowed_remote_exmods or [] - timeout_cast = timeout or CONF.rpc_cast_timeout - payload = [RpcContext.marshal(context), msg] - if six.PY3: - topic = topic.encode('utf-8') - - with Timeout(timeout_cast, exception=rpc_common.Timeout): - with driver.get_connection(addr, pooled) as conn: - try: - # assumes cast can't return an exception - conn.cast(_msg_id, topic, payload, envelope) - except zmq.ZMQError: - raise RPCException("Cast failed. ZMQ Socket Exception") - - -def _call(driver, addr, context, topic, msg, timeout=None, - envelope=False, allowed_remote_exmods=None, pooled=False): - allowed_remote_exmods = allowed_remote_exmods or [] - # timeout_response is how long we wait for a response - timeout = timeout or CONF.rpc_response_timeout - - # The msg_id is used to track replies. - msg_id = uuid.uuid4().hex - - # Replies always come into the reply service. - reply_topic = "zmq_replies.%s" % CONF.rpc_zmq_host - - LOG.debug("Creating payload") - # Curry the original request into a reply method. - mcontext = RpcContext.marshal(context) - payload = { - 'method': '-reply', - 'args': { - 'msg_id': msg_id, - 'topic': reply_topic, - # TODO(ewindisch): safe to remove mcontext in I. - 'msg': [mcontext, msg] - } - } - - LOG.debug("Creating queue socket for reply waiter") - - # Messages arriving async. - # TODO(ewindisch): have reply consumer with dynamic subscription mgmt - with Timeout(timeout, exception=rpc_common.Timeout): - try: - msg_waiter = ZmqSocket( - "ipc://%s/zmq_topic_zmq_replies.%s" % - (CONF.rpc_zmq_ipc_dir, - CONF.rpc_zmq_host), - zmq.SUB, subscribe=msg_id, bind=False - ) - - LOG.debug("Sending cast: %s", topic) - _cast(driver, addr, context, topic, payload, envelope=envelope, - pooled=pooled) - - LOG.debug("Cast sent; Waiting reply") - # Blocks until receives reply - msg = msg_waiter.recv() - if msg is None: - raise rpc_common.Timeout() - LOG.debug("Received message: %s", msg) - LOG.debug("Unpacking response") - - if msg[2] == b'cast': # Legacy version - raw_msg = _deserialize(msg[-1])[-1] - elif msg[2] == b'impl_zmq_v2': - rpc_envelope = unflatten_envelope(msg[4:]) - raw_msg = rpc_common.deserialize_msg(rpc_envelope) - else: - raise rpc_common.UnsupportedRpcEnvelopeVersion( - _("Unsupported or unknown ZMQ envelope returned.")) - - responses = raw_msg['args']['response'] - # ZMQError trumps the Timeout error. - except zmq.ZMQError: - raise RPCException("ZMQ Socket Error") - except (IndexError, KeyError): - raise RPCException(_("RPC Message Invalid.")) - finally: - if 'msg_waiter' in vars(): - msg_waiter.close() - - # It seems we don't need to do all of the following, - # but perhaps it would be useful for multicall? - # One effect of this is that we're checking all - # responses for Exceptions. - for resp in responses: - if isinstance(resp, dict) and 'exc' in resp: - raise rpc_common.deserialize_remote_exception( - resp['exc'], allowed_remote_exmods) - - return responses[-1] - - -def _multi_send(driver, method, context, topic, msg, timeout=None, - envelope=False, _msg_id=None, allowed_remote_exmods=None, - pooled=False): - """Wraps the sending of messages. - - Dispatches to the matchmaker and sends message to all relevant hosts. - """ - allowed_remote_exmods = allowed_remote_exmods or [] - conf = CONF - LOG.debug(' '.join(map(pformat, (topic, msg)))) - - queues = _get_matchmaker().queues(topic) - LOG.debug("Sending message(s) to: %s", queues) - - # Don't stack if we have no matchmaker results - if not queues: - warn_log = _LW("No matchmaker results. Not sending.") - - if method.__name__ == '_cast': - LOG.warn(warn_log) - return - - # While not strictly a timeout, callers know how to handle - # this exception and a timeout isn't too big a lie. - raise rpc_common.Timeout(warn_log) - - # This supports brokerless fanout (addresses > 1) - return_val = None - for queue in queues: - _topic, ip_addr = queue - _addr = "tcp://%s:%s" % (ip_addr, conf.rpc_zmq_port) - - if method.__name__ == '_cast': - eventlet.spawn_n(method, driver, _addr, context, - _topic, msg, timeout, envelope, _msg_id, - None, pooled) - else: - return_val = method(driver, _addr, context, _topic, msg, timeout, - envelope, allowed_remote_exmods, pooled) - - return return_val - - -def _get_matchmaker(*args, **kwargs): - global matchmaker - mm_name = CONF.rpc_zmq_matchmaker - - # Back compatibility for old class names - mm_mapping = { - 'oslo_messaging._drivers.matchmaker_redis.MatchMakerRedis': 'redis', - 'oslo_messaging._drivers.matchmaker_ring.MatchMakerRing': 'ring', - 'oslo_messaging._drivers.matchmaker.MatchMakerLocalhost': 'local', - 'oslo.messaging._drivers.matchmaker_redis.MatchMakerRedis': 'redis', - 'oslo.messaging._drivers.matchmaker_ring.MatchMakerRing': 'ring', - 'oslo.messaging._drivers.matchmaker.MatchMakerLocalhost': 'local'} - if mm_name in mm_mapping: - LOG.warn(_LW('rpc_zmq_matchmaker = %(old_val)s is deprecated. ' - 'It is suggested to change the value to %(new_val)s.'), - {'old_val': mm_name, 'new_val': mm_mapping[mm_name]}) - mm_name = mm_mapping[mm_name] - - if not matchmaker: - mgr = driver.DriverManager('oslo.messaging.zmq.matchmaker', - mm_name) - matchmaker = mgr.driver(*args, **kwargs) - return matchmaker - - -class ZmqIncomingMessage(base.IncomingMessage): - - ReceivedReply = collections.namedtuple( - 'ReceivedReply', ['reply', 'failure', 'log_failure']) - - def __init__(self, listener, ctxt, message): - super(ZmqIncomingMessage, self).__init__(listener, ctxt, message) - self.condition = threading.Condition() - self.received = None - - def reply(self, reply=None, failure=None, log_failure=True): - self.received = self.ReceivedReply(reply, failure, log_failure) - with self.condition: - self.condition.notify() - - def requeue(self): - LOG.debug("WARNING: requeue not supported") - - -class ZmqListener(base.Listener): - - def __init__(self, driver): - super(ZmqListener, self).__init__(driver) - self.incoming_queue = moves.queue.Queue() - - def dispatch(self, ctxt, message): - incoming = ZmqIncomingMessage(self, - ctxt.to_dict(), - message) - - self.incoming_queue.put(incoming) - - with incoming.condition: - incoming.condition.wait() - - assert incoming.received - - if incoming.received.failure: - raise incoming.received.failure - else: - return incoming.received.reply - - def poll(self, timeout=None): - try: - return self.incoming_queue.get(timeout=timeout) - except six.moves.queue.Empty: - # timeout - return None - - -class ZmqClientPool(pool.Pool): - """Class that implements a pool of Zmq Clients for a single endpoint""" - def __init__(self, conf, address, connection_cls, ctxt): - self.connection_cls = connection_cls - self.ctxt = ctxt - self.address = address - super(ZmqClientPool, self).__init__(conf.rpc_conn_pool_size) - - def create(self): - LOG.debug('Pool creating new ZMQ connection for %s' % self.address) - return self.connection_cls(self.address, self.ctxt) - - def empty(self): - for item in self.iter_free(): - item.close() - - -class ZmqClientPoolManager(object): - """Class that manages pools of clients for Zmq endpoints""" - - def __init__(self, conf, ctxt=None): - self._pools = {} - self._lock = threading.Lock() - self.conf = conf - self.ctxt = ctxt - - def get(self, address): - if address not in self._pools: - with self._lock: - if address not in self._pools: - self._pools[address] = ZmqClientPool(self.conf, - address, - ZmqClient, - self.ctxt) - return self._pools[address].get() - - def put(self, item): - self._pools[item.address].put(item) - - def empty(self): - for p in self._pools: - self._pools[p].empty() - class ZmqDriver(base.BaseDriver): """ZeroMQ Driver @@ -994,142 +81,38 @@ class ZmqDriver(base.BaseDriver): """ - # FIXME(markmc): allow this driver to be used without eventlet - def __init__(self, conf, url, default_exchange=None, allowed_remote_exmods=None): - if not zmq: - raise ImportError("Failed to import eventlet.green.zmq") conf.register_opts(zmq_opts) conf.register_opts(executor_base._pool_opts) - conf.register_opts(base.base_opts) - + self.conf = conf + self.server = None + self.client = None + self.matchmaker = None super(ZmqDriver, self).__init__(conf, url, default_exchange, allowed_remote_exmods) - # FIXME(markmc): handle default_exchange - - # FIXME(markmc): handle transport URL - if self._url.hosts: - raise NotImplementedError('The ZeroMQ driver does not yet support ' - 'transport URLs') - - # FIXME(markmc): use self.conf everywhere - if self.conf is not CONF: - raise NotImplementedError('The ZeroMQ driver currently only works ' - 'with oslo.config.cfg.CONF') - - self.listeners = [] - - # NOTE(jamespage): Create pool manager on first use to deal with - # os.fork calls in openstack daemons. - self._pool = None - self._pid = None - self._lock = threading.Lock() - - def _configure_pool_manager(func): - """Causes a new pool manager to be created when the messaging service - is first used by the current process. This is important as all - connections in the pools manager by the pool manager will share the - same ZMQ context, which must not be shared across OS processes. - """ - def wrap(self, *args, **kws): - with self._lock: - old_pid = self._pid - self._pid = os.getpid() - - if old_pid != self._pid: - # Create fresh pool manager for the current process - # along with a new ZMQ context. - self._pool = ZmqClientPoolManager( - self.conf, - zmq.Context(self.conf.rpc_zmq_contexts) - ) - return func(self, *args, **kws) - return wrap - - def _send(self, target, ctxt, message, - wait_for_reply=None, timeout=None, envelope=False): - - if wait_for_reply: - method = _call - else: - method = _cast - - topic = target.topic - if target.fanout: - # NOTE(ewindisch): fanout~ is used because it avoid splitting on - # and acts as a non-subtle hint to the matchmaker and ZmqProxy. - topic = 'fanout~' + topic - elif target.server: - topic = '%s.%s' % (topic, target.server) - - reply = _multi_send(self, method, ctxt, topic, message, - envelope=envelope, - allowed_remote_exmods=self._allowed_remote_exmods, - pooled=True) - - if wait_for_reply: - return reply[-1] - - @_configure_pool_manager def send(self, target, ctxt, message, wait_for_reply=None, timeout=None, retry=None): - # NOTE(sileht): retry is not implemented because this driver never - # retry anything - return self._send(target, ctxt, message, wait_for_reply, timeout) + if self.client is None: + self.client = zmq_client.ZmqClient(self.conf, self.matchmaker) + if wait_for_reply: + return self.client.call(target, ctxt, message, timeout, retry) + else: + self.client.cast(target, ctxt, message, timeout, retry) + return None - @_configure_pool_manager def send_notification(self, target, ctxt, message, version, retry=None): - # NOTE(ewindisch): dot-priority in rpc notifier does not - # work with our assumptions. - # NOTE(sileht): retry is not implemented because this driver never - # retry anything - target = target(topic=target.topic.replace('.', '-')) - return self._send(target, ctxt, message, envelope=(version == 2.0)) + return None - @_configure_pool_manager def listen(self, target): - conn = Connection(self.conf, self) + if self.server is None: + self.server = zmq_server.ZmqServer(self.conf, self.matchmaker) + self.server.listen(target) + return self.server - listener = ZmqListener(self) - - conn.create_consumer(target.topic, listener) - conn.create_consumer('%s.%s' % (target.topic, target.server), - listener) - conn.create_consumer(target.topic, listener, fanout=True) - - conn.consume_in_thread() - self.listeners.append(conn) - - return listener - - @_configure_pool_manager def listen_for_notifications(self, targets_and_priorities, pool): - # NOTE(sileht): this listener implementation is limited - # because zeromq doesn't support: - # * requeing message - # * pool - conn = Connection(self.conf, self) - - listener = ZmqListener(self) - for target, priority in targets_and_priorities: - # NOTE(ewindisch): dot-priority in rpc notifier does not - # work with our assumptions. - # NOTE(sileht): create_consumer doesn't support target.exchange - conn.create_consumer('%s-%s' % (target.topic, priority), - listener) - conn.consume_in_thread() - self.listeners.append(conn) - - return listener + return None def cleanup(self): - for c in self.listeners: - c.close() - self.listeners = [] - if self._pool: - self._pool.empty() - - def get_connection(self, address, pooled=False): - return ZmqClientContext(address, self._pool, pooled) + pass diff --git a/oslo_messaging/_drivers/zmq_driver/__init__.py b/oslo_messaging/_drivers/zmq_driver/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/oslo_messaging/_drivers/zmq_driver/broker/__init__.py b/oslo_messaging/_drivers/zmq_driver/broker/__init__.py new file mode 100644 index 000000000..8af3e63a7 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/broker/__init__.py @@ -0,0 +1 @@ +__author__ = 'ozamiatin' diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py new file mode 100644 index 000000000..9e11a08a5 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py @@ -0,0 +1,92 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import abc + +import six + +from oslo_messaging._drivers.zmq_driver import zmq_async + + +@six.add_metaclass(abc.ABCMeta) +class BaseProxy(object): + + def __init__(self, conf, context): + super(BaseProxy, self).__init__() + self.conf = conf + self.context = context + self.executor = zmq_async.get_executor(self.run) + + @abc.abstractmethod + def run(self): + "Main execution point of the proxy" + + def start(self): + self.executor.execute() + + def stop(self): + self.executor.stop() + + def wait(self): + self.executor.wait() + + +@six.add_metaclass(abc.ABCMeta) +class BaseTcpFrontend(object): + + def __init__(self, conf, poller, context): + self.conf = conf + self.poller = poller + self.context = context + + def receive_incoming(self): + message, socket = self.poller.poll(1) + return message + + +@six.add_metaclass(abc.ABCMeta) +class BaseBackendMatcher(object): + + def __init__(self, conf, poller, context): + self.conf = conf + self.context = context + self.backends = {} + self.poller = poller + + def redirect_to_backend(self, message): + backend, topic = self._match_backend(message) + self._send_message(backend, message, topic) + + def _match_backend(self, message): + topic = self._get_topic(message) + ipc_address = self._get_ipc_address(topic) + if ipc_address not in self.backends: + self._create_backend(ipc_address) + return self.backend, topic + + @abc.abstractmethod + def _get_topic(self, message): + "Extract topic from message" + + @abc.abstractmethod + def _get_ipc_address(self, topic): + "Get ipc backend address from topic" + + @abc.abstractmethod + def _send_message(self, backend, message, topic): + "Backend specific sending logic" + + @abc.abstractmethod + def _create_backend(self, ipc_address): + "Backend specific socket opening logic" diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py new file mode 100644 index 000000000..a0d3f4fe2 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py @@ -0,0 +1,71 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging +import os + +from oslo_utils import excutils + +from oslo_messaging._drivers.zmq_driver.broker.zmq_call_proxy import CallProxy +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._i18n import _LE, _LI + + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class ZmqBroker(object): + """Local messaging IPC broker (nodes are still peers). + + The main purpose is to have one TCP connection + (one TCP port assigned for ZMQ messaging) per node. + There could be a number of services running on a node. + Without such broker a number of opened TCP ports used for + messaging become unpredictable for the engine. + + All messages are coming to TCP ROUTER socket and then + distributed between their targets by topic via IPC. + """ + + def __init__(self, conf): + super(ZmqBroker, self).__init__() + self.conf = conf + self.context = zmq.Context() + self.proxies = [CallProxy(conf, self.context)] + self._create_ipc_dirs() + + def _create_ipc_dirs(self): + ipc_dir = self.conf.rpc_zmq_ipc_dir + try: + os.makedirs("%s/fanout" % ipc_dir) + except os.error: + if not os.path.isdir(ipc_dir): + with excutils.save_and_reraise_exception(): + LOG.error(_LE("Required IPC directory does not exist at" + " %s"), ipc_dir) + + def start(self): + for proxy in self.proxies: + proxy.start() + + def wait(self): + for proxy in self.proxies: + proxy.wait() + + def close(self): + LOG.info(_LI("Broker shutting down ...")) + for proxy in self.proxies: + proxy.stop() diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py new file mode 100644 index 000000000..f4471b532 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py @@ -0,0 +1,110 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from oslo_messaging._drivers.common import RPCException +import oslo_messaging._drivers.zmq_driver.broker.zmq_base_proxy as base_proxy +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_serializer +from oslo_messaging._drivers.zmq_driver import zmq_topic +from oslo_messaging._i18n import _LE, _LI + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class CallProxy(base_proxy.BaseProxy): + + def __init__(self, conf, context): + super(CallProxy, self).__init__(conf, context) + self.tcp_frontend = FrontendTcpRouter(self.conf, context) + self.backend_matcher = CallBackendMatcher(self.conf, context) + LOG.info(_LI("Starting call proxy thread")) + + def run(self): + message = self.tcp_frontend.receive_incoming() + if message is not None: + self.backend_matcher.redirect_to_backend(message) + + reply, socket = self.backend_matcher.receive_outgoing_reply() + if reply is not None: + self.tcp_frontend.redirect_outgoing_reply(reply) + + +class CallBackendMatcher(base_proxy.BaseBackendMatcher): + + def __init__(self, conf, context): + super(CallBackendMatcher, self).__init__(conf, + zmq_async.get_poller(), + context) + self.backend = self.context.socket(zmq.DEALER) + self.poller.register(self.backend) + + def receive_outgoing_reply(self): + reply_message = self.poller.poll(1) + return reply_message + + def _get_topic(self, message): + topic, server = zmq_serializer.get_topic_from_call_message(message) + return zmq_topic.Topic(self.conf, topic, server) + + def _get_ipc_address(self, topic): + return zmq_topic.get_ipc_address_call(self.conf, topic) + + def _send_message(self, backend, message, topic): + # Empty needed for awaiting REP socket to work properly + # (DEALER-REP usage specific) + backend.send(b'', zmq.SNDMORE) + backend.send_multipart(message) + + def _create_backend(self, ipc_address): + self.backend.connect(ipc_address) + self.backends[str(ipc_address)] = True + + +class FrontendTcpRouter(base_proxy.BaseTcpFrontend): + + def __init__(self, conf, context): + super(FrontendTcpRouter, self).__init__(conf, + zmq_async.get_poller(), + context) + + try: + self.frontend = self.context.socket(zmq.ROUTER) + bind_address = zmq_topic.get_tcp_bind_address(conf.rpc_zmq_port) + LOG.info(_LI("Binding to TCP ROUTER %s") % bind_address) + self.frontend.bind(bind_address) + self.poller.register(self.frontend) + except zmq.ZMQError: + errmsg = _LE("Could not create ZeroMQ receiver daemon. " + "Socket may already be in use.") + LOG.error(errmsg) + raise RPCException(errmsg) + + @staticmethod + def _reduce_empty(reply): + reply.pop(0) + return reply + + def redirect_outgoing_reply(self, reply): + self._reduce_empty(reply) + try: + self.frontend.send_multipart(reply) + LOG.info(_LI("Redirecting reply to client %s") % reply) + except zmq.ZMQError: + errmsg = _LE("Failed redirecting reply to client %s") % reply + LOG.error(errmsg) + raise RPCException(errmsg) diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_cast_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_cast_proxy.py new file mode 100644 index 000000000..8eef8befc --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_cast_proxy.py @@ -0,0 +1,79 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +import oslo_messaging._drivers.zmq_driver.broker.zmq_base_proxy as base_proxy +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_serializer +from oslo_messaging._drivers.zmq_driver import zmq_topic +from oslo_messaging._i18n import _LI + +zmq = zmq_async.import_zmq() + +LOG = logging.getLogger(__name__) + + +class CastProxy(base_proxy.BaseProxy): + + def __init__(self, conf, context): + super(CastProxy, self).__init__(conf, context) + self.tcp_frontend = FrontendTcpPull(self.conf, context) + self.backend_matcher = CastPushBackendMatcher(self.conf, context) + LOG.info(_LI("Starting cast proxy thread")) + + def run(self): + message = self.tcp_frontend.receive_incoming() + if message is not None: + self.backend_matcher.redirect_to_backend(message) + + +class FrontendTcpPull(base_proxy.BaseTcpFrontend): + + def __init__(self, conf, context): + super(FrontendTcpPull, self).__init__(conf, zmq_async.get_poller(), + context) + self.frontend = self.context.socket(zmq.PULL) + address = zmq_topic.get_tcp_bind_address(conf.rpc_zmq_fanout_port) + LOG.info(_LI("Binding to TCP PULL %s") % address) + self.frontend.bind(address) + self.poller.register(self.frontend) + + def _receive_message(self): + message = self.poller.poll() + return message + + +class CastPushBackendMatcher(base_proxy.BaseBackendMatcher): + + def __init__(self, conf, context): + super(CastPushBackendMatcher, self).__init__(conf, + zmq_async.get_poller(), + context) + self.backend = self.context.socket(zmq.PUSH) + + def _get_topic(self, message): + topic, server = zmq_serializer.get_topic_from_cast_message(message) + return zmq_topic.Topic(self.conf, topic, server) + + def _get_ipc_address(self, topic): + return zmq_topic.get_ipc_address_cast(self.conf, topic) + + def _send_message(self, backend, message, topic): + backend.send_multipart(message) + + def _create_backend(self, ipc_address): + LOG.debug("[Cast Proxy] Creating PUSH backend %s", ipc_address) + self.backend.connect(ipc_address) + self.backends[str(ipc_address)] = True diff --git a/oslo_messaging/_drivers/zmq_driver/notifier/__init__.py b/oslo_messaging/_drivers/zmq_driver/notifier/__init__.py new file mode 100644 index 000000000..8af3e63a7 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/notifier/__init__.py @@ -0,0 +1 @@ +__author__ = 'ozamiatin' diff --git a/oslo_messaging/_drivers/zmq_driver/poller/__init__.py b/oslo_messaging/_drivers/zmq_driver/poller/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py b/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py new file mode 100644 index 000000000..b2c26c8a8 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py @@ -0,0 +1,111 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging +import threading + +import eventlet +import six + +from oslo_messaging._drivers import common as rpc_common +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_poller + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class GreenPoller(zmq_poller.ZmqPoller): + + def __init__(self): + self.incoming_queue = six.moves.queue.Queue() + self.green_pool = eventlet.GreenPool() + self.sockets = [] + + def register(self, socket, recv_method=None): + self.sockets.append(socket) + return self.green_pool.spawn(self._socket_receive, socket, + recv_method) + + def _socket_receive(self, socket, recv_method=None): + while True: + if recv_method: + incoming = recv_method(socket) + else: + incoming = socket.recv_multipart() + self.incoming_queue.put((incoming, socket)) + eventlet.sleep() + + def poll(self, timeout=None): + incoming = None + try: + with eventlet.Timeout(timeout, exception=rpc_common.Timeout): + while incoming is None: + try: + incoming = self.incoming_queue.get_nowait() + except six.moves.queue.Empty: + eventlet.sleep() + except rpc_common.Timeout: + return None, None + return incoming[0], incoming[1] + + +class HoldReplyPoller(GreenPoller): + + def __init__(self): + super(HoldReplyPoller, self).__init__() + self.event_by_socket = {} + + def register(self, socket, recv_method=None): + super(HoldReplyPoller, self).register(socket, recv_method) + self.event_by_socket[socket] = threading.Event() + + def resume_polling(self, socket): + pause = self.event_by_socket[socket] + pause.set() + + def _socket_receive(self, socket, recv_method=None): + pause = self.event_by_socket[socket] + while True: + pause.clear() + if recv_method: + incoming = recv_method(socket) + else: + incoming = socket.recv_multipart() + self.incoming_queue.put((incoming, socket)) + pause.wait() + + +class GreenExecutor(zmq_poller.Executor): + + def __init__(self, method): + self._method = method + super(GreenExecutor, self).__init__(None) + + def _loop(self): + while True: + self._method() + eventlet.sleep() + + def execute(self): + self.thread = eventlet.spawn(self._loop) + + def wait(self): + if self.thread is not None: + self.thread.wait() + + def stop(self): + if self.thread is not None: + self.thread.kill() diff --git a/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py b/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py new file mode 100644 index 000000000..e4317c487 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py @@ -0,0 +1,50 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging +import threading + +import zmq + +from oslo_messaging._drivers.zmq_driver import zmq_poller + +LOG = logging.getLogger(__name__) + + +class ThreadingPoller(zmq_poller.ZmqPoller): + + def __init__(self): + self.poller = zmq.Poller() + + def register(self, socket): + self.poller.register(socket, zmq.POLLOUT) + + def poll(self, timeout=None): + socks = dict(self.poller.poll(timeout)) + for socket in socks: + incoming = socket.recv() + return incoming + + +class ThreadingExecutor(zmq_poller.Executor): + + def __init__(self, method): + thread = threading.Thread(target=method) + super(ThreadingExecutor, self).__init__(thread) + + def execute(self): + self.thread.start() + + def wait(self): + self.thread.join() diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/__init__.py b/oslo_messaging/_drivers/zmq_driver/rpc/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/__init__.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py new file mode 100644 index 000000000..fb20efd4a --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py @@ -0,0 +1,49 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from oslo_messaging._drivers.zmq_driver.rpc.client.zmq_request import Request +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_topic +from oslo_messaging._i18n import _LE, _LI + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class CallRequest(Request): + + def __init__(self, conf, target, context, message, timeout=None, + retry=None): + try: + self.zmq_context = zmq.Context() + socket = self.zmq_context.socket(zmq.REQ) + + super(CallRequest, self).__init__(conf, target, context, + message, socket, timeout, retry) + + self.connect_address = zmq_topic.get_tcp_address_call(conf, + self.topic) + LOG.info(_LI("Connecting REQ to %s") % self.connect_address) + self.socket.connect(self.connect_address) + except zmq.ZMQError as e: + LOG.error(_LE("Error connecting to socket: %s") % str(e)) + + def receive_reply(self): + # NOTE(ozamiatin): Check for retry here (no retries now) + self.socket.setsockopt(zmq.RCVTIMEO, self.timeout) + reply = self.socket.recv_json() + return reply[u'reply'] diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py new file mode 100644 index 000000000..40fddd97b --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py @@ -0,0 +1,72 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from oslo_messaging._drivers.zmq_driver.rpc.client import zmq_cast_publisher +from oslo_messaging._drivers.zmq_driver.rpc.client.zmq_request import Request +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_topic +from oslo_messaging._i18n import _LE, _LI + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class CastRequest(Request): + + def __init__(self, conf, target, context, + message, socket, address, timeout=None, retry=None): + self.connect_address = address + super(CastRequest, self).__init__(conf, target, context, message, + socket, timeout, retry) + + def __call__(self, *args, **kwargs): + self.send_request() + + def send_request(self): + self.socket.send(b'', zmq.SNDMORE) + super(CastRequest, self).send_request() + + def receive_reply(self): + # Ignore reply for CAST + pass + + +class DealerCastPublisher(zmq_cast_publisher.CastPublisherBase): + + def __init__(self, conf, matchmaker): + super(DealerCastPublisher, self).__init__(conf) + self.matchmaker = matchmaker + + def cast(self, target, context, + message, timeout=None, retry=None): + topic = zmq_topic.Topic.from_target(self.conf, target) + connect_address = zmq_topic.get_tcp_address_call(self.conf, topic) + dealer_socket = self._create_socket(connect_address) + request = CastRequest(self.conf, target, context, message, + dealer_socket, connect_address, timeout, retry) + request.send_request() + + def _create_socket(self, address): + if address in self.outbound_sockets: + return self.outbound_sockets[address] + try: + dealer_socket = self.zmq_context.socket(zmq.DEALER) + LOG.info(_LI("Connecting DEALER to %s") % address) + dealer_socket.connect(address) + except zmq.ZMQError: + LOG.error(_LE("Failed connecting DEALER to %s") % address) + return dealer_socket diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_publisher.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_publisher.py new file mode 100644 index 000000000..098454524 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_publisher.py @@ -0,0 +1,40 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import abc +import logging + +import six + +from oslo_messaging._drivers.zmq_driver import zmq_async + + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +@six.add_metaclass(abc.ABCMeta) +class CastPublisherBase(object): + + def __init__(self, conf): + self.conf = conf + self.zmq_context = zmq.Context() + self.outbound_sockets = {} + super(CastPublisherBase, self).__init__() + + @abc.abstractmethod + def cast(self, target, context, + message, timeout=None, retry=None): + "Send CAST to target" diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py new file mode 100644 index 000000000..a4eed4953 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py @@ -0,0 +1,33 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +from oslo_messaging._drivers.zmq_driver.rpc.client import zmq_call_request +from oslo_messaging._drivers.zmq_driver.rpc.client import zmq_cast_dealer + + +class ZmqClient(object): + + def __init__(self, conf, matchmaker=None): + self.conf = conf + self.cast_publisher = zmq_cast_dealer.DealerCastPublisher(conf, + matchmaker) + + def call(self, target, context, message, timeout=None, retry=None): + request = zmq_call_request.CallRequest(self.conf, target, context, + message, timeout, retry) + return request() + + def cast(self, target, context, message, timeout=None, retry=None): + self.cast_publisher.cast(target, context, message, timeout, retry) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py new file mode 100644 index 000000000..2bfe755bf --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py @@ -0,0 +1,76 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import abc +from abc import abstractmethod +import logging +import uuid + +import six + +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_topic +from oslo_messaging._i18n import _LE + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +@six.add_metaclass(abc.ABCMeta) +class Request(object): + + def __init__(self, conf, target, context, message, + socket, timeout=None, retry=None): + + if message['method'] is None: + errmsg = _LE("No method specified for RPC call") + LOG.error(errmsg) + raise KeyError(errmsg) + + self.msg_id = uuid.uuid4().hex + self.target = target + self.context = context + self.message = message + self.timeout = self._to_milliseconds(conf, timeout) + self.retry = retry + self.reply = None + self.socket = socket + self.topic = zmq_topic.Topic.from_target(conf, target) + + @staticmethod + def _to_milliseconds(conf, timeout): + return timeout * 1000 if timeout else conf.rpc_response_timeout * 1000 + + @property + def is_replied(self): + return self.reply is not None + + @property + def is_timed_out(self): + return False + + def send_request(self): + self.socket.send_string(str(self.topic), zmq.SNDMORE) + self.socket.send_string(self.msg_id, zmq.SNDMORE) + self.socket.send_json(self.context, zmq.SNDMORE) + self.socket.send_json(self.message) + + def __call__(self): + self.send_request() + return self.receive_reply() + + @abstractmethod + def receive_reply(self): + "Receive reply from server side" diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/__init__.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py new file mode 100644 index 000000000..2eeb55f22 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py @@ -0,0 +1,35 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +class ConsumerBase(object): + + def __init__(self, listener, conf, zmq_poller, context): + self.listener = listener + self.conf = conf + self.poller = zmq_poller + self.context = context + self.sockets_per_topic = {} + + def poll(self, timeout=None): + pass + + def stop(self): + pass + + def cleanup(self): + pass + + def listen(self, target): + pass diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py new file mode 100644 index 000000000..959ffd70d --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py @@ -0,0 +1,96 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +import logging + +from oslo_messaging._drivers import base +from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_base_consumer +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_topic as topic_utils +from oslo_messaging._i18n import _LE + + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class ZmqIncomingRequest(base.IncomingMessage): + + def __init__(self, listener, context, message, socket, rep_id, poller): + super(ZmqIncomingRequest, self).__init__(listener, context, message) + self.reply_socket = socket + self.reply_id = rep_id + self.received = None + self.poller = poller + + def reply(self, reply=None, failure=None, log_failure=True): + message_reply = {u'reply': reply, + u'failure': failure, + u'log_failure': log_failure} + LOG.debug("Replying %s REP", (str(message_reply))) + self.received = True + self.reply_socket.send(self.reply_id, zmq.SNDMORE) + self.reply_socket.send(b'', zmq.SNDMORE) + self.reply_socket.send_json(message_reply) + self.poller.resume_polling(self.reply_socket) + + def acknowledge(self): + pass + + def requeue(self): + pass + + +class CallResponder(zmq_base_consumer.ConsumerBase): + + def __init__(self, listener, conf, poller, context): + super(CallResponder, self).__init__(listener, conf, poller, context) + + def poll(self, timeout=None): + try: + incoming, socket = self.poller.poll(timeout) + reply_id, context, message = incoming + LOG.debug("[Server] REP Received message %s" % str(message)) + incoming = ZmqIncomingRequest(self.listener, + context, + message, socket, + reply_id, + self.poller) + return incoming + + except zmq.ZMQError as e: + LOG.error(_LE("Receiving message failed ... {}"), e) + + def listen(self, target): + + def _receive_message(socket): + reply_id = socket.recv() + empty = socket.recv() + assert empty == b'', 'Bad format: empty separator expected' + topic = socket.recv_string() + assert topic is not None, 'Bad format: topic string expected' + msg_id = socket.recv_string() + assert msg_id is not None, 'Bad format: message ID expected' + context = socket.recv_json() + message = socket.recv_json() + return (reply_id, context, message) + + topic = topic_utils.Topic.from_target(self.conf, target) + ipc_rep_address = topic_utils.get_ipc_address_call(self.conf, topic) + rep_socket = self.context.socket(zmq.REP) + rep_socket.bind(ipc_rep_address) + self.sockets_per_topic[str(topic)] = rep_socket + self.poller.register(rep_socket, _receive_message) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py new file mode 100644 index 000000000..e6f67ab95 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py @@ -0,0 +1,49 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from oslo_messaging._drivers import base +from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_call_responder +from oslo_messaging._drivers.zmq_driver import zmq_async + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class ZmqServer(base.Listener): + + def __init__(self, conf, matchmaker=None): + LOG.info("[Server] __init__") + self.conf = conf + self.context = zmq.Context() + poller = zmq_async.get_reply_poller() + self.call_responder = zmq_call_responder.CallResponder(self, conf, + poller, + self.context) + + def poll(self, timeout=None): + incoming = self.call_responder.poll(timeout) + return incoming + + def stop(self): + LOG.info("[Server] Stop") + + def cleanup(self): + pass + + def listen(self, target): + LOG.info("[Server] Listen to Target %s" % target) + self.call_responder.listen(target) diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_async.py b/oslo_messaging/_drivers/zmq_driver/zmq_async.py new file mode 100644 index 000000000..3694d0f5a --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/zmq_async.py @@ -0,0 +1,59 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from oslo_utils import importutils + +from oslo_messaging._i18n import _LE + +LOG = logging.getLogger(__name__) + +green_zmq = importutils.try_import('eventlet.green.zmq') + + +def import_zmq(): + imported_zmq = green_zmq or importutils.try_import('zmq') + if imported_zmq is None: + errmsg = _LE("ZeroMQ not found!") + LOG.error(errmsg) + raise ImportError(errmsg) + return imported_zmq + + +def get_poller(): + if green_zmq: + from oslo_messaging._drivers.zmq_driver.poller import green_poller + return green_poller.GreenPoller() + else: + from oslo_messaging._drivers.zmq_driver.poller import threading_poller + return threading_poller.ThreadingPoller() + + +def get_reply_poller(): + if green_zmq: + from oslo_messaging._drivers.zmq_driver.poller import green_poller + return green_poller.HoldReplyPoller() + else: + from oslo_messaging._drivers.zmq_driver.poller import threading_poller + return threading_poller.ThreadingPoller() + + +def get_executor(method): + if green_zmq is not None: + from oslo_messaging._drivers.zmq_driver.poller import green_poller + return green_poller.GreenExecutor(method) + else: + from oslo_messaging._drivers.zmq_driver.poller import threading_poller + return threading_poller.ThreadingExecutor() diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_context.py b/oslo_messaging/_drivers/zmq_driver/zmq_context.py new file mode 100644 index 000000000..f986e41db --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/zmq_context.py @@ -0,0 +1,33 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +from oslo_messaging._drivers import common as rpc_common + + +class RpcContext(rpc_common.CommonRpcContext): + """Context that supports replying to a rpc.call.""" + def __init__(self, **kwargs): + self.replies = [] + super(RpcContext, self).__init__(**kwargs) + + def deepcopy(self): + values = self.to_dict() + values['replies'] = self.replies + return self.__class__(**values) + + def reply(self, reply=None, failure=None, ending=False): + if ending: + return + self.replies.append(reply) diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_poller.py b/oslo_messaging/_drivers/zmq_driver/zmq_poller.py new file mode 100644 index 000000000..dcd51ad7b --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/zmq_poller.py @@ -0,0 +1,48 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class ZmqPoller(object): + + @abc.abstractmethod + def register(self, socket, recv_method=None): + 'Register socket to poll' + + @abc.abstractmethod + def poll(self, timeout=None): + 'Poll for messages' + + +@six.add_metaclass(abc.ABCMeta) +class Executor(object): + + def __init__(self, thread): + self.thread = thread + + @abc.abstractmethod + def execute(self): + 'Run execution' + + @abc.abstractmethod + def stop(self): + 'Stop execution' + + @abc.abstractmethod + def wait(self): + 'Wait until pass' diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py b/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py new file mode 100644 index 000000000..0f0733ae9 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py @@ -0,0 +1,54 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging +import os +import re + +import six + +from oslo_messaging._drivers import common as rpc_common +from oslo_messaging._i18n import _LE, _LW + +LOG = logging.getLogger(__name__) + +MESSAGE_CALL_TOPIC_POSITION = 2 + + +def _get_topic_from_msg(message, position): + pathsep = set((os.path.sep or '', os.path.altsep or '', '/', '\\')) + badchars = re.compile(r'[%s]' % re.escape(''.join(pathsep))) + topic = message[position] + topic_items = None + + if six.PY3: + topic = topic.decode('utf-8') + + try: + # The topic is received over the network, + # don't trust this input. + if badchars.search(topic) is not None: + emsg = _LW("Topic contained dangerous characters") + LOG.warn(emsg) + raise rpc_common.RPCException(emsg) + topic_items = topic.split('.', 1) + except Exception as e: + errmsg = _LE("Failed topic string parsing, %s") % str(e) + LOG.error(errmsg) + rpc_common.RPCException(errmsg) + return topic_items[0], topic_items[1] + + +def get_topic_from_call_message(message): + return _get_topic_from_msg(message, MESSAGE_CALL_TOPIC_POSITION) diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_topic.py b/oslo_messaging/_drivers/zmq_driver/zmq_topic.py new file mode 100644 index 000000000..c338b69c5 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/zmq_topic.py @@ -0,0 +1,61 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +def get_ipc_address_call(conf, topic): + return "ipc://%s/%s" % (conf.rpc_zmq_ipc_dir, str(topic)) + + +def get_tcp_bind_address(port): + return "tcp://*:%s" % port + + +def get_tcp_address_call(conf, topic): + return "tcp://%s:%s" % (topic.server, conf.rpc_zmq_port) + + +def get_ipc_address_cast(conf, topic): + return "ipc://%s/fanout/%s" % (conf.rpc_zmq_ipc_dir, str(topic)) + + +class Topic(object): + + def __init__(self, conf, topic, server=None, fanout=False): + + if server is None: + self.server = conf.rpc_zmq_host + else: + self.server = server + + self._topic = topic + self.fanout = fanout + + @staticmethod + def _extract_cinder_server(server): + return server.split('@', 1)[0] + + @staticmethod + def from_target(conf, target): + if target.server is not None: + return Topic(conf, target.topic, target.server, + fanout=target.fanout) + else: + return Topic(conf, target.topic, fanout=target.fanout) + + @property + def topic(self): + return self._topic if self._topic else "" + + def __str__(self, *args, **kwargs): + return "%s.%s" % (self.topic, self.server) diff --git a/oslo_messaging/tests/drivers/test_impl_zmq.py b/oslo_messaging/tests/drivers/test_impl_zmq.py index 85e5dd377..a6eef2f7c 100644 --- a/oslo_messaging/tests/drivers/test_impl_zmq.py +++ b/oslo_messaging/tests/drivers/test_impl_zmq.py @@ -1,5 +1,4 @@ -# Copyright 2014 Canonical, Ltd. -# All Rights Reserved. +# Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -15,28 +14,52 @@ import logging import socket +import threading import fixtures -from oslo_utils import importutils import testtools -try: - import zmq -except ImportError: - zmq = None - import oslo_messaging -from oslo_messaging._drivers import common as rpc_common +from oslo_messaging._drivers import impl_zmq +from oslo_messaging._drivers.zmq_driver.broker.zmq_broker import ZmqBroker +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._i18n import _ from oslo_messaging.tests import utils as test_utils -from six.moves import mock - -# eventlet is not yet py3 compatible, so skip if not installed -eventlet = importutils.try_import('eventlet') - -impl_zmq = importutils.try_import('oslo_messaging._drivers.impl_zmq') LOG = logging.getLogger(__name__) +zmq = zmq_async.import_zmq() + + +class TestRPCServerListener(object): + + def __init__(self, driver): + self.driver = driver + self.target = None + self.listener = None + self.executor = zmq_async.get_executor(self._run) + self._stop = threading.Event() + self._received = threading.Event() + self.message = None + + def listen(self, target): + self.target = target + self.listener = self.driver.listen(self.target) + self.executor.execute() + + def _run(self): + try: + message = self.listener.poll() + if message is not None: + self._received.set() + self.message = message + message.reply(reply=True) + except Exception: + LOG.exception(_("Unexpected exception occurred.")) + + def stop(self): + self.executor.stop() + def get_unused_port(): """Returns an unused port on localhost.""" @@ -70,10 +93,11 @@ class ZmqBaseTestCase(test_utils.BaseTestCase): # Start RPC LOG.info("Running internal zmq receiver.") - self.reactor = impl_zmq.ZmqProxy(self.conf) - self.reactor.consume_in_thread() + self.broker = ZmqBroker(self.conf) + self.broker.start() + + self.listener = TestRPCServerListener(self.driver) - self.matchmaker = impl_zmq._get_matchmaker(host='127.0.0.1') self.addCleanup(stopRpc(self.__dict__)) @@ -94,380 +118,127 @@ class stopRpc(object): self.attrs = attrs def __call__(self): - if self.attrs['reactor']: - self.attrs['reactor'].close() + if self.attrs['broker']: + self.attrs['broker'].close() if self.attrs['driver']: self.attrs['driver'].cleanup() + if self.attrs['listener']: + self.attrs['listener'].stop() class TestZmqBasics(ZmqBaseTestCase): - def test_start_stop_listener(self): - target = oslo_messaging.Target(topic='testtopic') - listener = self.driver.listen(target) - result = listener.poll(0.01) - self.assertEqual(result, None) - def test_send_receive_raises(self): """Call() without method.""" target = oslo_messaging.Target(topic='testtopic') - self.driver.listen(target) + self.listener.listen(target) self.assertRaises( KeyError, self.driver.send, target, {}, {'tx_id': 1}, wait_for_reply=True) - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqIncomingMessage') - def test_send_receive_topic(self, mock_msg): - """Call() with method.""" - mock_msg.return_value = msg = mock.MagicMock() - msg.received = received = mock.MagicMock() - received.failure = False - received.reply = True - msg.condition = condition = mock.MagicMock() - condition.wait.return_value = True + def test_send_receive_topic(self): + """Call() with topic.""" target = oslo_messaging.Target(topic='testtopic') - self.driver.listen(target) + self.listener.listen(target) result = self.driver.send( target, {}, {'method': 'hello-world', 'tx_id': 1}, wait_for_reply=True) - self.assertEqual(result, True) + self.assertIsNotNone(result) - @mock.patch('oslo_messaging._drivers.impl_zmq._call', autospec=True) - def test_send_receive_fanout(self, mock_call): + def test_send_noreply(self): + """Cast() with topic.""" + + target = oslo_messaging.Target(topic='testtopic', server="127.0.0.1") + self.listener.listen(target) + result = self.driver.send( + target, {}, + {'method': 'hello-world', 'tx_id': 1}, + wait_for_reply=False) + + self.listener._received.wait() + + self.assertIsNone(result) + self.assertEqual(True, self.listener._received.isSet()) + method = self.listener.message.message[u'method'] + self.assertEqual(u'hello-world', method) + + @testtools.skip("Not implemented feature") + def test_send_fanout(self): target = oslo_messaging.Target(topic='testtopic', fanout=True) self.driver.listen(target) - mock_call.__name__ = '_call' - mock_call.return_value = [True] - result = self.driver.send( target, {}, {'method': 'hello-world', 'tx_id': 1}, - wait_for_reply=True) + wait_for_reply=False) - self.assertEqual(result, True) - mock_call.assert_called_once_with( - self.driver, - 'tcp://127.0.0.1:%s' % self.conf['rpc_zmq_port'], - {}, 'fanout~testtopic.127.0.0.1', - {'tx_id': 1, 'method': 'hello-world'}, - None, False, [], True) + self.assertIsNone(result) + self.assertEqual(True, self.listener._received.isSet()) + msg_pattern = "{'method': 'hello-world', 'tx_id': 1}" + self.assertEqual(msg_pattern, self.listener.message) - @mock.patch('oslo_messaging._drivers.impl_zmq._call', autospec=True) - def test_send_receive_direct(self, mock_call): - # Also verifies fix for bug http://pad.lv/1301723 - target = oslo_messaging.Target(topic='testtopic', server='localhost') - self.driver.listen(target) + def test_send_receive_direct(self): + """Call() without topic.""" - mock_call.__name__ = '_call' - mock_call.return_value = [True] - - result = self.driver.send( - target, {}, - {'method': 'hello-world', 'tx_id': 1}, - wait_for_reply=True) - - self.assertEqual(result, True) - mock_call.assert_called_once_with( - self.driver, - 'tcp://localhost:%s' % self.conf['rpc_zmq_port'], - {}, 'testtopic.localhost', - {'tx_id': 1, 'method': 'hello-world'}, - None, False, [], True) + target = oslo_messaging.Target(server='127.0.0.1') + self.listener.listen(target) + message = {'method': 'hello-world', 'tx_id': 1} + context = {} + result = self.driver.send(target, context, message, + wait_for_reply=True) + self.assertTrue(result) -class TestZmqSocket(test_utils.BaseTestCase): +class TestPoller(test_utils.BaseTestCase): - @testtools.skipIf(zmq is None, "zmq not available") def setUp(self): - super(TestZmqSocket, self).setUp() - self.messaging_conf.transport_driver = 'zmq' - # Get driver - transport = oslo_messaging.get_transport(self.conf) - self.driver = transport._driver + super(TestPoller, self).setUp() + self.poller = zmq_async.get_poller() + self.ctx = zmq.Context() + self.ADDR_REQ = "ipc://request1" - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqSocket.subscribe') - @mock.patch('oslo_messaging._drivers.impl_zmq.zmq.Context') - def test_zmqsocket_init_type_pull(self, mock_context, mock_subscribe): - mock_ctxt = mock.Mock() - mock_context.return_value = mock_ctxt - mock_sock = mock.Mock() - mock_ctxt.socket = mock.Mock(return_value=mock_sock) - mock_sock.connect = mock.Mock() - mock_sock.bind = mock.Mock() - addr = '127.0.0.1' + def test_poll_blocking(self): - sock = impl_zmq.ZmqSocket(addr, impl_zmq.zmq.PULL, bind=False, - subscribe=None) - self.assertTrue(sock.can_recv) - self.assertFalse(sock.can_send) - self.assertFalse(sock.can_sub) - self.assertTrue(mock_sock.connect.called) - self.assertFalse(mock_sock.bind.called) + rep = self.ctx.socket(zmq.REP) + rep.bind(self.ADDR_REQ) - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqSocket.subscribe') - @mock.patch('oslo_messaging._drivers.impl_zmq.zmq.Context') - def test_zmqsocket_init_type_sub(self, mock_context, mock_subscribe): - mock_ctxt = mock.Mock() - mock_context.return_value = mock_ctxt - mock_sock = mock.Mock() - mock_ctxt.socket = mock.Mock(return_value=mock_sock) - mock_sock.connect = mock.Mock() - mock_sock.bind = mock.Mock() - addr = '127.0.0.1' + reply_poller = zmq_async.get_reply_poller() + reply_poller.register(rep) - sock = impl_zmq.ZmqSocket(addr, impl_zmq.zmq.SUB, bind=False, - subscribe=None) - self.assertTrue(sock.can_recv) - self.assertFalse(sock.can_send) - self.assertTrue(sock.can_sub) - self.assertTrue(mock_sock.connect.called) - self.assertFalse(mock_sock.bind.called) + def listener(): + incoming, socket = reply_poller.poll() + self.assertEqual(b'Hello', incoming[0]) + socket.send_string('Reply') + reply_poller.resume_polling(socket) - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqSocket.subscribe') - @mock.patch('oslo_messaging._drivers.impl_zmq.zmq.Context') - def test_zmqsocket_init_type_push(self, mock_context, mock_subscribe): - mock_ctxt = mock.Mock() - mock_context.return_value = mock_ctxt - mock_sock = mock.Mock() - mock_ctxt.socket = mock.Mock(return_value=mock_sock) - mock_sock.connect = mock.Mock() - mock_sock.bind = mock.Mock() - addr = '127.0.0.1' + executor = zmq_async.get_executor(listener) + executor.execute() - sock = impl_zmq.ZmqSocket(addr, impl_zmq.zmq.PUSH, bind=False, - subscribe=None) - self.assertFalse(sock.can_recv) - self.assertTrue(sock.can_send) - self.assertFalse(sock.can_sub) - self.assertTrue(mock_sock.connect.called) - self.assertFalse(mock_sock.bind.called) + req1 = self.ctx.socket(zmq.REQ) + req1.connect(self.ADDR_REQ) - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqSocket.subscribe') - @mock.patch('oslo_messaging._drivers.impl_zmq.zmq.Context') - def test_zmqsocket_init_type_pub(self, mock_context, mock_subscribe): - mock_ctxt = mock.Mock() - mock_context.return_value = mock_ctxt - mock_sock = mock.Mock() - mock_ctxt.socket = mock.Mock(return_value=mock_sock) - mock_sock.connect = mock.Mock() - mock_sock.bind = mock.Mock() - addr = '127.0.0.1' + req2 = self.ctx.socket(zmq.REQ) + req2.connect(self.ADDR_REQ) - sock = impl_zmq.ZmqSocket(addr, impl_zmq.zmq.PUB, bind=False, - subscribe=None) - self.assertFalse(sock.can_recv) - self.assertTrue(sock.can_send) - self.assertFalse(sock.can_sub) - self.assertTrue(mock_sock.connect.called) - self.assertFalse(mock_sock.bind.called) + req1.send_string('Hello') + req2.send_string('Hello') + reply = req1.recv_string() + self.assertEqual('Reply', reply) -class TestZmqIncomingMessage(test_utils.BaseTestCase): + reply = req2.recv_string() + self.assertEqual('Reply', reply) - @testtools.skipIf(zmq is None, "zmq not available") - def setUp(self): - super(TestZmqIncomingMessage, self).setUp() - self.messaging_conf.transport_driver = 'zmq' - # Get driver - transport = oslo_messaging.get_transport(self.conf) - self.driver = transport._driver + def test_poll_timeout(self): + rep = self.ctx.socket(zmq.REP) + rep.bind(self.ADDR_REQ) - def test_zmqincomingmessage(self): - msg = impl_zmq.ZmqIncomingMessage(mock.Mock(), None, 'msg.foo') - msg.reply("abc") - self.assertIsInstance( - msg.received, impl_zmq.ZmqIncomingMessage.ReceivedReply) - self.assertIsInstance( - msg.received, impl_zmq.ZmqIncomingMessage.ReceivedReply) - self.assertEqual(msg.received.reply, "abc") - msg.requeue() + reply_poller = zmq_async.get_reply_poller() + reply_poller.register(rep) - -class TestZmqConnection(ZmqBaseTestCase): - - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqReactor', autospec=True) - def test_zmqconnection_create_consumer(self, mock_reactor): - - mock_reactor.register = mock.Mock() - conn = impl_zmq.Connection(self.driver.conf, self.driver) - topic = 'topic.foo' - context = mock.Mock() - inaddr = ('ipc://%s/zmq_topic_topic.127.0.0.1' % - (self.internal_ipc_dir)) - # No Fanout - conn.create_consumer(topic, context) - conn.reactor.register.assert_called_with(context, inaddr, - impl_zmq.zmq.PULL, - subscribe=None, in_bind=False) - - # Reset for next bunch of checks - conn.reactor.register.reset_mock() - - # Fanout - inaddr = ('ipc://%s/zmq_topic_fanout~topic' % - (self.internal_ipc_dir)) - conn.create_consumer(topic, context, fanout='subscriber.foo') - conn.reactor.register.assert_called_with(context, inaddr, - impl_zmq.zmq.SUB, - subscribe='subscriber.foo', - in_bind=False) - - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqReactor', autospec=True) - def test_zmqconnection_create_consumer_topic_exists(self, mock_reactor): - mock_reactor.register = mock.Mock() - conn = impl_zmq.Connection(self.driver.conf, self.driver) - topic = 'topic.foo' - context = mock.Mock() - inaddr = ('ipc://%s/zmq_topic_topic.127.0.0.1' % - (self.internal_ipc_dir)) - - conn.create_consumer(topic, context) - conn.reactor.register.assert_called_with( - context, inaddr, impl_zmq.zmq.PULL, subscribe=None, in_bind=False) - conn.reactor.register.reset_mock() - # Call again with same topic - conn.create_consumer(topic, context) - self.assertFalse(conn.reactor.register.called) - - @mock.patch('oslo_messaging._drivers.impl_zmq._get_matchmaker', - autospec=True) - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqReactor', autospec=True) - def test_zmqconnection_close(self, mock_reactor, mock_getmatchmaker): - conn = impl_zmq.Connection(self.driver.conf, self.driver) - conn.reactor.close = mock.Mock() - mock_getmatchmaker.return_value.stop_heartbeat = mock.Mock() - conn.close() - self.assertTrue(mock_getmatchmaker.return_value.stop_heartbeat.called) - self.assertTrue(conn.reactor.close.called) - - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqReactor', autospec=True) - def test_zmqconnection_wait(self, mock_reactor): - conn = impl_zmq.Connection(self.driver, self.driver) - conn.reactor.wait = mock.Mock() - conn.wait() - self.assertTrue(conn.reactor.wait.called) - - @mock.patch('oslo_messaging._drivers.impl_zmq._get_matchmaker', - autospec=True) - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqReactor', autospec=True) - def test_zmqconnection_consume_in_thread(self, mock_reactor, - mock_getmatchmaker): - mock_getmatchmaker.return_value.start_heartbeat = mock.Mock() - conn = impl_zmq.Connection(self.driver, self.driver) - conn.reactor.consume_in_thread = mock.Mock() - conn.consume_in_thread() - self.assertTrue(mock_getmatchmaker.return_value.start_heartbeat.called) - self.assertTrue(conn.reactor.consume_in_thread.called) - - -class TestZmqListener(ZmqBaseTestCase): - - def test_zmqlistener_no_msg(self): - listener = impl_zmq.ZmqListener(self.driver) - # Timeout = 0 should return straight away since the queue is empty - listener.poll(timeout=0) - - def test_zmqlistener_w_msg(self): - listener = impl_zmq.ZmqListener(self.driver) - kwargs = {'a': 1, 'b': 2} - m = mock.Mock() - ctxt = mock.Mock(autospec=impl_zmq.RpcContext) - message = {'namespace': 'name.space', 'method': m.fake_method, - 'args': kwargs} - eventlet.spawn_n(listener.dispatch, ctxt, message) - resp = listener.poll(timeout=10) - msg = {'method': m.fake_method, 'namespace': 'name.space', - 'args': kwargs} - self.assertEqual(resp.message, msg) - - -class TestZmqDriver(ZmqBaseTestCase): - - @mock.patch('oslo_messaging._drivers.impl_zmq._cast', autospec=True) - @mock.patch('oslo_messaging._drivers.matchmaker.MatchMakerBase.queues', - autospec=True) - def test_zmqdriver_multi_send_cast_with_no_queues(self, - mock_queues, - mock_cast): - context = mock.Mock(autospec=impl_zmq.RpcContext) - topic = 'testtopic' - msg = 'jeronimo' - - with mock.patch.object(impl_zmq.LOG, 'warn') as flog: - mock_queues.return_value = None - impl_zmq._multi_send(self.driver, mock_cast, - context, topic, msg) - self.assertEqual(1, flog.call_count) - args, kwargs = flog.call_args - self.assertIn('No matchmaker results', args[0]) - - @mock.patch('oslo_messaging._drivers.impl_zmq._call', autospec=True) - @mock.patch('oslo_messaging._drivers.matchmaker.MatchMakerBase.queues', - autospec=True) - def test_zmqdriver_multi_send_call_with_no_queues(self, - mock_queues, - mock_call): - context = mock.Mock(autospec=impl_zmq.RpcContext) - topic = 'testtopic' - msg = 'jeronimo' - - mock_queues.return_value = None - self.assertRaises(rpc_common.Timeout, - impl_zmq._multi_send, self.driver, - mock_call, context, topic, msg) - - @mock.patch('oslo_messaging._drivers.impl_zmq._cast', autospec=True) - @mock.patch('oslo_messaging._drivers.impl_zmq._multi_send', autospec=True) - def test_zmqdriver_send(self, mock_multi_send, mock_cast): - context = mock.Mock(autospec=impl_zmq.RpcContext) - topic = 'testtopic' - msg = 'jeronimo' - self.driver.send(oslo_messaging.Target(topic=topic), context, msg, - False, 0, False) - mock_multi_send.assert_called_with(self.driver, mock_cast, context, - topic, msg, - allowed_remote_exmods=[], - envelope=False, pooled=True) - - @mock.patch('oslo_messaging._drivers.impl_zmq._cast', autospec=True) - @mock.patch('oslo_messaging._drivers.impl_zmq._multi_send', autospec=True) - def test_zmqdriver_send_notification(self, mock_multi_send, mock_cast): - context = mock.Mock(autospec=impl_zmq.RpcContext) - topic = 'testtopic.foo' - topic_reformat = 'testtopic-foo' - msg = 'jeronimo' - self.driver.send_notification(oslo_messaging.Target(topic=topic), - context, msg, False, False) - mock_multi_send.assert_called_with(self.driver, mock_cast, context, - topic_reformat, msg, - allowed_remote_exmods=[], - envelope=False, pooled=True) - - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqListener', autospec=True) - @mock.patch('oslo_messaging._drivers.impl_zmq.Connection', autospec=True) - def test_zmqdriver_listen(self, mock_connection, mock_listener): - mock_listener.return_value = listener = mock.Mock() - mock_connection.return_value = conn = mock.Mock() - conn.create_consumer = mock.Mock() - conn.consume_in_thread = mock.Mock() - topic = 'testtopic.foo' - self.driver.listen(oslo_messaging.Target(topic=topic)) - conn.create_consumer.assert_called_with(topic, listener, fanout=True) - - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqListener', autospec=True) - @mock.patch('oslo_messaging._drivers.impl_zmq.Connection', autospec=True) - def test_zmqdriver_listen_for_notification(self, mock_connection, - mock_listener): - mock_listener.return_value = listener = mock.Mock() - mock_connection.return_value = conn = mock.Mock() - conn.create_consumer = mock.Mock() - conn.consume_in_thread = mock.Mock() - topic = 'testtopic.foo' - data = [(oslo_messaging.Target(topic=topic), 0)] - # NOTE(jamespage): Pooling not supported, just pass None for now. - self.driver.listen_for_notifications(data, None) - conn.create_consumer.assert_called_with("%s-%s" % (topic, 0), listener) + incoming, socket = reply_poller.poll(1) + self.assertIsNone(incoming) + self.assertIsNone(socket) diff --git a/tests/drivers/test_impl_zmq.py b/tests/drivers/test_impl_zmq.py index ddc6753ea..a6eef2f7c 100644 --- a/tests/drivers/test_impl_zmq.py +++ b/tests/drivers/test_impl_zmq.py @@ -1,5 +1,4 @@ -# Copyright 2014 Canonical, Ltd. -# All Rights Reserved. +# Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -15,28 +14,52 @@ import logging import socket +import threading import fixtures import testtools -from six.moves import mock - -try: - import zmq -except ImportError: - zmq = None - -from oslo import messaging -from oslo.utils import importutils +import oslo_messaging +from oslo_messaging._drivers import impl_zmq +from oslo_messaging._drivers.zmq_driver.broker.zmq_broker import ZmqBroker +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._i18n import _ from oslo_messaging.tests import utils as test_utils -# eventlet is not yet py3 compatible, so skip if not installed -eventlet = importutils.try_import('eventlet') - -impl_zmq = importutils.try_import('oslo_messaging._drivers.impl_zmq') - LOG = logging.getLogger(__name__) +zmq = zmq_async.import_zmq() + + +class TestRPCServerListener(object): + + def __init__(self, driver): + self.driver = driver + self.target = None + self.listener = None + self.executor = zmq_async.get_executor(self._run) + self._stop = threading.Event() + self._received = threading.Event() + self.message = None + + def listen(self, target): + self.target = target + self.listener = self.driver.listen(self.target) + self.executor.execute() + + def _run(self): + try: + message = self.listener.poll() + if message is not None: + self._received.set() + self.message = message + message.reply(reply=True) + except Exception: + LOG.exception(_("Unexpected exception occurred.")) + + def stop(self): + self.executor.stop() + def get_unused_port(): """Returns an unused port on localhost.""" @@ -56,7 +79,7 @@ class ZmqBaseTestCase(test_utils.BaseTestCase): super(ZmqBaseTestCase, self).setUp() self.messaging_conf.transport_driver = 'zmq' # Get driver - transport = messaging.get_transport(self.conf) + transport = oslo_messaging.get_transport(self.conf) self.driver = transport._driver # Set config values @@ -70,10 +93,11 @@ class ZmqBaseTestCase(test_utils.BaseTestCase): # Start RPC LOG.info("Running internal zmq receiver.") - self.reactor = impl_zmq.ZmqProxy(self.conf) - self.reactor.consume_in_thread() + self.broker = ZmqBroker(self.conf) + self.broker.start() + + self.listener = TestRPCServerListener(self.driver) - self.matchmaker = impl_zmq._get_matchmaker(host='127.0.0.1') self.addCleanup(stopRpc(self.__dict__)) @@ -85,7 +109,7 @@ class TestConfZmqDriverLoad(test_utils.BaseTestCase): self.messaging_conf.transport_driver = 'zmq' def test_driver_load(self): - transport = messaging.get_transport(self.conf) + transport = oslo_messaging.get_transport(self.conf) self.assertIsInstance(transport._driver, impl_zmq.ZmqDriver) @@ -94,347 +118,127 @@ class stopRpc(object): self.attrs = attrs def __call__(self): - if self.attrs['reactor']: - self.attrs['reactor'].close() + if self.attrs['broker']: + self.attrs['broker'].close() if self.attrs['driver']: self.attrs['driver'].cleanup() + if self.attrs['listener']: + self.attrs['listener'].stop() class TestZmqBasics(ZmqBaseTestCase): - def test_start_stop_listener(self): - target = messaging.Target(topic='testtopic') - listener = self.driver.listen(target) - result = listener.poll(0.01) - self.assertEqual(result, None) - def test_send_receive_raises(self): """Call() without method.""" - target = messaging.Target(topic='testtopic') - self.driver.listen(target) + target = oslo_messaging.Target(topic='testtopic') + self.listener.listen(target) self.assertRaises( KeyError, self.driver.send, target, {}, {'tx_id': 1}, wait_for_reply=True) - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqIncomingMessage') - def test_send_receive_topic(self, mock_msg): - """Call() with method.""" - mock_msg.return_value = msg = mock.MagicMock() - msg.received = received = mock.MagicMock() - received.failure = False - received.reply = True - msg.condition = condition = mock.MagicMock() - condition.wait.return_value = True + def test_send_receive_topic(self): + """Call() with topic.""" - target = messaging.Target(topic='testtopic') - self.driver.listen(target) + target = oslo_messaging.Target(topic='testtopic') + self.listener.listen(target) result = self.driver.send( target, {}, {'method': 'hello-world', 'tx_id': 1}, wait_for_reply=True) - self.assertEqual(result, True) + self.assertIsNotNone(result) - @mock.patch('oslo_messaging._drivers.impl_zmq._call', autospec=True) - def test_send_receive_fanout(self, mock_call): - target = messaging.Target(topic='testtopic', fanout=True) + def test_send_noreply(self): + """Cast() with topic.""" + + target = oslo_messaging.Target(topic='testtopic', server="127.0.0.1") + self.listener.listen(target) + result = self.driver.send( + target, {}, + {'method': 'hello-world', 'tx_id': 1}, + wait_for_reply=False) + + self.listener._received.wait() + + self.assertIsNone(result) + self.assertEqual(True, self.listener._received.isSet()) + method = self.listener.message.message[u'method'] + self.assertEqual(u'hello-world', method) + + @testtools.skip("Not implemented feature") + def test_send_fanout(self): + target = oslo_messaging.Target(topic='testtopic', fanout=True) self.driver.listen(target) - mock_call.__name__ = '_call' - mock_call.return_value = [True] - result = self.driver.send( target, {}, {'method': 'hello-world', 'tx_id': 1}, - wait_for_reply=True) + wait_for_reply=False) - self.assertEqual(result, True) - mock_call.assert_called_once_with( - self.driver, - 'tcp://127.0.0.1:%s' % self.conf['rpc_zmq_port'], - {}, 'fanout~testtopic.127.0.0.1', - {'tx_id': 1, 'method': 'hello-world'}, - None, False, [], True) + self.assertIsNone(result) + self.assertEqual(True, self.listener._received.isSet()) + msg_pattern = "{'method': 'hello-world', 'tx_id': 1}" + self.assertEqual(msg_pattern, self.listener.message) - @mock.patch('oslo_messaging._drivers.impl_zmq._call', autospec=True) - def test_send_receive_direct(self, mock_call): - # Also verifies fix for bug http://pad.lv/1301723 - target = messaging.Target(topic='testtopic', server='localhost') - self.driver.listen(target) + def test_send_receive_direct(self): + """Call() without topic.""" - mock_call.__name__ = '_call' - mock_call.return_value = [True] - - result = self.driver.send( - target, {}, - {'method': 'hello-world', 'tx_id': 1}, - wait_for_reply=True) - - self.assertEqual(result, True) - mock_call.assert_called_once_with( - self.driver, - 'tcp://localhost:%s' % self.conf['rpc_zmq_port'], - {}, 'testtopic.localhost', - {'tx_id': 1, 'method': 'hello-world'}, - None, False, [], True) + target = oslo_messaging.Target(server='127.0.0.1') + self.listener.listen(target) + message = {'method': 'hello-world', 'tx_id': 1} + context = {} + result = self.driver.send(target, context, message, + wait_for_reply=True) + self.assertTrue(result) -class TestZmqSocket(test_utils.BaseTestCase): +class TestPoller(test_utils.BaseTestCase): - @testtools.skipIf(zmq is None, "zmq not available") def setUp(self): - super(TestZmqSocket, self).setUp() - self.messaging_conf.transport_driver = 'zmq' - # Get driver - transport = messaging.get_transport(self.conf) - self.driver = transport._driver + super(TestPoller, self).setUp() + self.poller = zmq_async.get_poller() + self.ctx = zmq.Context() + self.ADDR_REQ = "ipc://request1" - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqSocket.subscribe') - @mock.patch('oslo_messaging._drivers.impl_zmq.zmq.Context') - def test_zmqsocket_init_type_pull(self, mock_context, mock_subscribe): - mock_ctxt = mock.Mock() - mock_context.return_value = mock_ctxt - mock_sock = mock.Mock() - mock_ctxt.socket = mock.Mock(return_value=mock_sock) - mock_sock.connect = mock.Mock() - mock_sock.bind = mock.Mock() - addr = '127.0.0.1' + def test_poll_blocking(self): - sock = impl_zmq.ZmqSocket(addr, impl_zmq.zmq.PULL, bind=False, - subscribe=None) - self.assertTrue(sock.can_recv) - self.assertFalse(sock.can_send) - self.assertFalse(sock.can_sub) - self.assertTrue(mock_sock.connect.called) - self.assertFalse(mock_sock.bind.called) + rep = self.ctx.socket(zmq.REP) + rep.bind(self.ADDR_REQ) - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqSocket.subscribe') - @mock.patch('oslo_messaging._drivers.impl_zmq.zmq.Context') - def test_zmqsocket_init_type_sub(self, mock_context, mock_subscribe): - mock_ctxt = mock.Mock() - mock_context.return_value = mock_ctxt - mock_sock = mock.Mock() - mock_ctxt.socket = mock.Mock(return_value=mock_sock) - mock_sock.connect = mock.Mock() - mock_sock.bind = mock.Mock() - addr = '127.0.0.1' + reply_poller = zmq_async.get_reply_poller() + reply_poller.register(rep) - sock = impl_zmq.ZmqSocket(addr, impl_zmq.zmq.SUB, bind=False, - subscribe=None) - self.assertTrue(sock.can_recv) - self.assertFalse(sock.can_send) - self.assertTrue(sock.can_sub) - self.assertTrue(mock_sock.connect.called) - self.assertFalse(mock_sock.bind.called) + def listener(): + incoming, socket = reply_poller.poll() + self.assertEqual(b'Hello', incoming[0]) + socket.send_string('Reply') + reply_poller.resume_polling(socket) - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqSocket.subscribe') - @mock.patch('oslo_messaging._drivers.impl_zmq.zmq.Context') - def test_zmqsocket_init_type_push(self, mock_context, mock_subscribe): - mock_ctxt = mock.Mock() - mock_context.return_value = mock_ctxt - mock_sock = mock.Mock() - mock_ctxt.socket = mock.Mock(return_value=mock_sock) - mock_sock.connect = mock.Mock() - mock_sock.bind = mock.Mock() - addr = '127.0.0.1' + executor = zmq_async.get_executor(listener) + executor.execute() - sock = impl_zmq.ZmqSocket(addr, impl_zmq.zmq.PUSH, bind=False, - subscribe=None) - self.assertFalse(sock.can_recv) - self.assertTrue(sock.can_send) - self.assertFalse(sock.can_sub) - self.assertTrue(mock_sock.connect.called) - self.assertFalse(mock_sock.bind.called) + req1 = self.ctx.socket(zmq.REQ) + req1.connect(self.ADDR_REQ) - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqSocket.subscribe') - @mock.patch('oslo_messaging._drivers.impl_zmq.zmq.Context') - def test_zmqsocket_init_type_pub(self, mock_context, mock_subscribe): - mock_ctxt = mock.Mock() - mock_context.return_value = mock_ctxt - mock_sock = mock.Mock() - mock_ctxt.socket = mock.Mock(return_value=mock_sock) - mock_sock.connect = mock.Mock() - mock_sock.bind = mock.Mock() - addr = '127.0.0.1' + req2 = self.ctx.socket(zmq.REQ) + req2.connect(self.ADDR_REQ) - sock = impl_zmq.ZmqSocket(addr, impl_zmq.zmq.PUB, bind=False, - subscribe=None) - self.assertFalse(sock.can_recv) - self.assertTrue(sock.can_send) - self.assertFalse(sock.can_sub) - self.assertTrue(mock_sock.connect.called) - self.assertFalse(mock_sock.bind.called) + req1.send_string('Hello') + req2.send_string('Hello') + reply = req1.recv_string() + self.assertEqual('Reply', reply) -class TestZmqIncomingMessage(test_utils.BaseTestCase): + reply = req2.recv_string() + self.assertEqual('Reply', reply) - @testtools.skipIf(zmq is None, "zmq not available") - def setUp(self): - super(TestZmqIncomingMessage, self).setUp() - self.messaging_conf.transport_driver = 'zmq' - # Get driver - transport = messaging.get_transport(self.conf) - self.driver = transport._driver + def test_poll_timeout(self): + rep = self.ctx.socket(zmq.REP) + rep.bind(self.ADDR_REQ) - def test_zmqincomingmessage(self): - msg = impl_zmq.ZmqIncomingMessage(mock.Mock(), None, 'msg.foo') - msg.reply("abc") - self.assertIsInstance( - msg.received, impl_zmq.ZmqIncomingMessage.ReceivedReply) - self.assertIsInstance( - msg.received, impl_zmq.ZmqIncomingMessage.ReceivedReply) - self.assertEqual(msg.received.reply, "abc") - msg.requeue() + reply_poller = zmq_async.get_reply_poller() + reply_poller.register(rep) - -class TestZmqConnection(ZmqBaseTestCase): - - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqReactor', autospec=True) - def test_zmqconnection_create_consumer(self, mock_reactor): - - mock_reactor.register = mock.Mock() - conn = impl_zmq.Connection(self.driver.conf, self.driver) - topic = 'topic.foo' - context = mock.Mock() - inaddr = ('ipc://%s/zmq_topic_topic.127.0.0.1' % - (self.internal_ipc_dir)) - # No Fanout - conn.create_consumer(topic, context) - conn.reactor.register.assert_called_with(context, inaddr, - impl_zmq.zmq.PULL, - subscribe=None, in_bind=False) - - # Reset for next bunch of checks - conn.reactor.register.reset_mock() - - # Fanout - inaddr = ('ipc://%s/zmq_topic_fanout~topic' % - (self.internal_ipc_dir)) - conn.create_consumer(topic, context, fanout='subscriber.foo') - conn.reactor.register.assert_called_with(context, inaddr, - impl_zmq.zmq.SUB, - subscribe='subscriber.foo', - in_bind=False) - - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqReactor', autospec=True) - def test_zmqconnection_create_consumer_topic_exists(self, mock_reactor): - mock_reactor.register = mock.Mock() - conn = impl_zmq.Connection(self.driver.conf, self.driver) - topic = 'topic.foo' - context = mock.Mock() - inaddr = ('ipc://%s/zmq_topic_topic.127.0.0.1' % - (self.internal_ipc_dir)) - - conn.create_consumer(topic, context) - conn.reactor.register.assert_called_with( - context, inaddr, impl_zmq.zmq.PULL, subscribe=None, in_bind=False) - conn.reactor.register.reset_mock() - # Call again with same topic - conn.create_consumer(topic, context) - self.assertFalse(conn.reactor.register.called) - - @mock.patch('oslo_messaging._drivers.impl_zmq._get_matchmaker', - autospec=True) - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqReactor', autospec=True) - def test_zmqconnection_close(self, mock_reactor, mock_getmatchmaker): - conn = impl_zmq.Connection(self.driver.conf, self.driver) - conn.reactor.close = mock.Mock() - mock_getmatchmaker.return_value.stop_heartbeat = mock.Mock() - conn.close() - self.assertTrue(mock_getmatchmaker.return_value.stop_heartbeat.called) - self.assertTrue(conn.reactor.close.called) - - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqReactor', autospec=True) - def test_zmqconnection_wait(self, mock_reactor): - conn = impl_zmq.Connection(self.driver.conf, self.driver) - conn.reactor.wait = mock.Mock() - conn.wait() - self.assertTrue(conn.reactor.wait.called) - - @mock.patch('oslo_messaging._drivers.impl_zmq._get_matchmaker', - autospec=True) - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqReactor', autospec=True) - def test_zmqconnection_consume_in_thread(self, mock_reactor, - mock_getmatchmaker): - mock_getmatchmaker.return_value.start_heartbeat = mock.Mock() - conn = impl_zmq.Connection(self.driver.conf, self.driver) - conn.reactor.consume_in_thread = mock.Mock() - conn.consume_in_thread() - self.assertTrue(mock_getmatchmaker.return_value.start_heartbeat.called) - self.assertTrue(conn.reactor.consume_in_thread.called) - - -class TestZmqListener(ZmqBaseTestCase): - - def test_zmqlistener_no_msg(self): - listener = impl_zmq.ZmqListener(self.driver) - # Timeout = 0 should return straight away since the queue is empty - listener.poll(timeout=0) - - def test_zmqlistener_w_msg(self): - listener = impl_zmq.ZmqListener(self.driver) - kwargs = {'a': 1, 'b': 2} - m = mock.Mock() - ctxt = mock.Mock(autospec=impl_zmq.RpcContext) - message = {'namespace': 'name.space', 'method': m.fake_method, - 'args': kwargs} - eventlet.spawn_n(listener.dispatch, ctxt, message) - resp = listener.poll(timeout=10) - msg = {'method': m.fake_method, 'namespace': 'name.space', - 'args': kwargs} - self.assertEqual(resp.message, msg) - - -class TestZmqDriver(ZmqBaseTestCase): - - @mock.patch('oslo_messaging._drivers.impl_zmq._cast', autospec=True) - @mock.patch('oslo_messaging._drivers.impl_zmq._multi_send', autospec=True) - def test_zmqdriver_send(self, mock_multi_send, mock_cast): - context = mock.Mock(autospec=impl_zmq.RpcContext) - topic = 'testtopic' - msg = 'jeronimo' - self.driver.send(messaging.Target(topic=topic), context, msg, - False, 0, False) - mock_multi_send.assert_called_with(self.driver, mock_cast, context, - topic, msg, - allowed_remote_exmods=[], - envelope=False, pooled=True) - - @mock.patch('oslo_messaging._drivers.impl_zmq._cast', autospec=True) - @mock.patch('oslo_messaging._drivers.impl_zmq._multi_send', autospec=True) - def test_zmqdriver_send_notification(self, mock_multi_send, mock_cast): - context = mock.Mock(autospec=impl_zmq.RpcContext) - topic = 'testtopic.foo' - topic_reformat = 'testtopic-foo' - msg = 'jeronimo' - self.driver.send_notification(messaging.Target(topic=topic), context, - msg, False, False) - mock_multi_send.assert_called_with(self.driver, mock_cast, context, - topic_reformat, msg, - allowed_remote_exmods=[], - envelope=False, pooled=True) - - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqListener', autospec=True) - @mock.patch('oslo_messaging._drivers.impl_zmq.Connection', autospec=True) - def test_zmqdriver_listen(self, mock_connection, mock_listener): - mock_listener.return_value = listener = mock.Mock() - mock_connection.return_value = conn = mock.Mock() - conn.create_consumer = mock.Mock() - conn.consume_in_thread = mock.Mock() - topic = 'testtopic.foo' - self.driver.listen(messaging.Target(topic=topic)) - conn.create_consumer.assert_called_with(topic, listener, fanout=True) - - @mock.patch('oslo_messaging._drivers.impl_zmq.ZmqListener', autospec=True) - @mock.patch('oslo_messaging._drivers.impl_zmq.Connection', autospec=True) - def test_zmqdriver_listen_for_notification(self, mock_connection, - mock_listener): - mock_listener.return_value = listener = mock.Mock() - mock_connection.return_value = conn = mock.Mock() - conn.create_consumer = mock.Mock() - conn.consume_in_thread = mock.Mock() - topic = 'testtopic.foo' - data = [(messaging.Target(topic=topic), 0)] - # NOTE(jamespage): Pooling not supported, just pass None for now. - self.driver.listen_for_notifications(data, None) - conn.create_consumer.assert_called_with("%s-%s" % (topic, 0), listener) + incoming, socket = reply_poller.poll(1) + self.assertIsNone(incoming) + self.assertIsNone(socket) From 4073851bf83d75bb1060be9ed1d7f3895470b9f5 Mon Sep 17 00:00:00 2001 From: Flavio Percoco Date: Tue, 23 Jun 2015 16:57:13 +0200 Subject: [PATCH 04/28] Fix qpid's functional gate The loggin verbosity changed in 0.32 which our gate is pulling from the updates repository. This patch updates the script to make it work again. Closes-bug: #1468917 Change-Id: I88a1bc794246beb156d4301bd503fa51a7cd2cce (cherry picked from commit 079c04f0d06539f3507c772ccc68807a6d01a73a) --- setup-test-env-qpid.sh | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/setup-test-env-qpid.sh b/setup-test-env-qpid.sh index caf9c3884..5972718c1 100755 --- a/setup-test-env-qpid.sh +++ b/setup-test-env-qpid.sh @@ -14,10 +14,10 @@ cat > ${DATADIR}/qpidd.conf </dev/null) -[ ! -x $QPIDD ] && /usr/sbin/qpidd mkfifo ${DATADIR}/out -$QPIDD --config ${DATADIR}/qpidd.conf &> ${DATADIR}/out & +$QPIDD --log-enable info+ --log-to-file ${DATADIR}/out --config ${DATADIR}/qpidd.conf & wait_for_line "Broker .*running" "error" ${DATADIR}/out # Earlier failure if qpid-config is avialable -[ -x "$(which qpid-config)" ] && qpid-config -b stackqpid/secretqpid@localhost:65123 +[ -x "$(which qpid-config)" ] && qpid-config -b stackqpid/secretqpid@localhost:65123 &>/dev/null $* From 76f44879e166143bc557a74b7912a24ea197ad85 Mon Sep 17 00:00:00 2001 From: Doug Hellmann Date: Thu, 18 Jun 2015 22:22:37 +0000 Subject: [PATCH 05/28] Drop use of 'oslo' namespace package The Oslo libraries have moved all of their code out of the 'oslo' namespace package into per-library packages. The namespace package was retained during kilo for backwards compatibility, but will be removed by the liberty-2 milestone. This change removes the use of the namespace package, replacing it with the new package names. The patches in the libraries will be put on hold until application patches have landed, or L2, whichever comes first. At that point, new versions of the libraries without namespace packages will be released as a major version update. Please merge this patch, or an equivalent, before L2 to avoid problems with those library releases. Blueprint: remove-namespace-packages https://blueprints.launchpad.net/oslo-incubator/+spec/remove-namespace-packages Cherry-picked from 3b6ca5b6de9e7964e1a3c41b5b0146e116b248ba Change-Id: I05a408b76d4f31b803769a27759e91df770511bb --- tests/drivers/test_impl_rabbit.py | 4 ++-- tests/drivers/test_matchmaker.py | 2 +- tests/drivers/test_matchmaker_redis.py | 2 +- tests/drivers/test_matchmaker_ring.py | 2 +- tests/notify/test_listener.py | 2 +- tests/rpc/test_client.py | 2 +- tests/rpc/test_server.py | 2 +- tests/test_exception_serialization.py | 2 +- tests/test_transport.py | 2 +- 9 files changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/drivers/test_impl_rabbit.py b/tests/drivers/test_impl_rabbit.py index 515e49bec..b2da4a828 100644 --- a/tests/drivers/test_impl_rabbit.py +++ b/tests/drivers/test_impl_rabbit.py @@ -23,14 +23,14 @@ import kombu from oslotest import mockpatch import testscenarios -from oslo.config import cfg from oslo import messaging -from oslo.serialization import jsonutils +from oslo_config import cfg from oslo_messaging._drivers import amqp from oslo_messaging._drivers import amqpdriver from oslo_messaging._drivers import common as driver_common from oslo_messaging._drivers import impl_rabbit as rabbit_driver from oslo_messaging.tests import utils as test_utils +from oslo_serialization import jsonutils from six.moves import mock load_tests = testscenarios.load_tests_apply_scenarios diff --git a/tests/drivers/test_matchmaker.py b/tests/drivers/test_matchmaker.py index 767414509..fe59fef15 100644 --- a/tests/drivers/test_matchmaker.py +++ b/tests/drivers/test_matchmaker.py @@ -14,8 +14,8 @@ import testtools -from oslo.utils import importutils from oslo_messaging.tests import utils as test_utils +from oslo_utils import importutils # NOTE(jamespage) matchmaker tied directly to eventlet # which is not yet py3 compatible - skip if import fails diff --git a/tests/drivers/test_matchmaker_redis.py b/tests/drivers/test_matchmaker_redis.py index a36e14af6..35a8c1464 100644 --- a/tests/drivers/test_matchmaker_redis.py +++ b/tests/drivers/test_matchmaker_redis.py @@ -14,8 +14,8 @@ import testtools -from oslo.utils import importutils from oslo_messaging.tests import utils as test_utils +from oslo_utils import importutils redis = importutils.try_import('redis') matchmaker_redis = ( diff --git a/tests/drivers/test_matchmaker_ring.py b/tests/drivers/test_matchmaker_ring.py index c3bc52493..010746472 100644 --- a/tests/drivers/test_matchmaker_ring.py +++ b/tests/drivers/test_matchmaker_ring.py @@ -14,8 +14,8 @@ import testtools -from oslo.utils import importutils from oslo_messaging.tests import utils as test_utils +from oslo_utils import importutils # NOTE(jamespage) matchmaker tied directly to eventlet # which is not yet py3 compatible - skip if import fails diff --git a/tests/notify/test_listener.py b/tests/notify/test_listener.py index 80c26a62b..84e257d16 100644 --- a/tests/notify/test_listener.py +++ b/tests/notify/test_listener.py @@ -18,9 +18,9 @@ import time import testscenarios -from oslo.config import cfg from oslo import messaging from oslo.messaging.notify import dispatcher +from oslo_config import cfg from oslo_messaging.tests import utils as test_utils from six.moves import mock diff --git a/tests/rpc/test_client.py b/tests/rpc/test_client.py index 5f138925e..65c4f6752 100644 --- a/tests/rpc/test_client.py +++ b/tests/rpc/test_client.py @@ -15,10 +15,10 @@ import testscenarios -from oslo.config import cfg from oslo import messaging from oslo.messaging import exceptions from oslo.messaging import serializer as msg_serializer +from oslo_config import cfg from oslo_messaging.tests import utils as test_utils load_tests = testscenarios.load_tests_apply_scenarios diff --git a/tests/rpc/test_server.py b/tests/rpc/test_server.py index b429191e2..6e1ae1603 100644 --- a/tests/rpc/test_server.py +++ b/tests/rpc/test_server.py @@ -17,8 +17,8 @@ import threading import testscenarios -from oslo.config import cfg from oslo import messaging +from oslo_config import cfg from oslo_messaging.tests import utils as test_utils from six.moves import mock diff --git a/tests/test_exception_serialization.py b/tests/test_exception_serialization.py index 74d808f05..17e8ff1f1 100644 --- a/tests/test_exception_serialization.py +++ b/tests/test_exception_serialization.py @@ -19,9 +19,9 @@ import six import testscenarios from oslo import messaging -from oslo.serialization import jsonutils from oslo_messaging._drivers import common as exceptions from oslo_messaging.tests import utils as test_utils +from oslo_serialization import jsonutils load_tests = testscenarios.load_tests_apply_scenarios diff --git a/tests/test_transport.py b/tests/test_transport.py index 36f8db8ea..a3b5b9128 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -19,9 +19,9 @@ import six from stevedore import driver import testscenarios -from oslo.config import cfg from oslo import messaging from oslo.messaging import transport +from oslo_config import cfg from oslo_messaging.tests import utils as test_utils from oslo_messaging import transport as private_transport From 7df65f2937597a259ddebaded9743d9957c77740 Mon Sep 17 00:00:00 2001 From: Oleksii Zamiatin Date: Thu, 2 Jul 2015 18:11:42 +0300 Subject: [PATCH 06/28] Local Fanout implementation Fanout unit-test passes now No matchmaker used yet (multi-host fanout wouldn't work) Change-Id: I9362adab4f7c7eba8120b51efe1b8c2056df3bbe --- .../zmq_driver/broker/zmq_base_proxy.py | 72 ++++++++++++++++-- .../_drivers/zmq_driver/broker/zmq_broker.py | 5 +- .../zmq_driver/broker/zmq_call_proxy.py | 26 ++----- .../zmq_driver/broker/zmq_fanout_proxy.py | 35 +++++++++ .../zmq_driver/broker/zmq_universal_proxy.py | 58 +++++++++++++++ .../zmq_driver/rpc/client/zmq_call_request.py | 5 +- .../zmq_driver/rpc/client/zmq_cast_dealer.py | 6 +- .../zmq_driver/rpc/client/zmq_request.py | 7 +- .../rpc/server/zmq_call_responder.py | 34 ++++----- .../rpc/server/zmq_fanout_consumer.py | 74 +++++++++++++++++++ .../zmq_driver/rpc/server/zmq_server.py | 21 ++++-- .../_drivers/zmq_driver/zmq_serializer.py | 23 +++++- .../_drivers/zmq_driver/zmq_topic.py | 6 +- oslo_messaging/tests/drivers/test_impl_zmq.py | 9 ++- tests/test_exception_serialization.py | 1 + 15 files changed, 318 insertions(+), 64 deletions(-) create mode 100644 oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py create mode 100644 oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py create mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_fanout_consumer.py diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py index 9e11a08a5..d591d94eb 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py @@ -13,15 +13,30 @@ # under the License. import abc +import logging import six +from oslo_messaging._drivers.common import RPCException from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_topic +from oslo_messaging._i18n import _LE, _LI + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() @six.add_metaclass(abc.ABCMeta) class BaseProxy(object): + """Base TCP-proxy. + + TCP-proxy redirects messages received by TCP from clients to servers + over IPC. Consists of TCP-frontend and IPC-backend objects. Runs + in async executor. + """ + def __init__(self, conf, context): super(BaseProxy, self).__init__() self.conf = conf @@ -30,7 +45,7 @@ class BaseProxy(object): @abc.abstractmethod def run(self): - "Main execution point of the proxy" + """Main execution point of the proxy""" def start(self): self.executor.execute() @@ -45,10 +60,47 @@ class BaseProxy(object): @six.add_metaclass(abc.ABCMeta) class BaseTcpFrontend(object): - def __init__(self, conf, poller, context): + """Base frontend clause. + + TCP-frontend is a part of TCP-proxy which receives incoming + messages from clients. + """ + + def __init__(self, conf, poller, context, + socket_type=None, + port_number=None, + receive_meth=None): + + """Construct a TCP-frontend. + + Its attributes are: + + :param conf: Driver configuration object. + :type conf: ConfigOpts + :param poller: Messages poller-object green or threading. + :type poller: ZmqPoller + :param context: ZeroMQ context object. + :type context: zmq.Context + :param socket_type: ZeroMQ socket type. + :type socket_type: int + :param port_number: Current messaging pipeline port. + :type port_number: int + """ + self.conf = conf self.poller = poller self.context = context + try: + self.frontend = self.context.socket(socket_type) + bind_address = zmq_topic.get_tcp_bind_address(port_number) + LOG.info(_LI("Binding to TCP %s") % bind_address) + self.frontend.bind(bind_address) + self.poller.register(self.frontend, receive_meth) + except zmq.ZMQError as e: + errmsg = _LE("Could not create ZeroMQ receiver daemon. " + "Socket may already be in use: %s") % str(e) + LOG.error(errmsg) + raise RPCException(errmsg) def receive_incoming(self): message, socket = self.poller.poll(1) @@ -64,6 +116,14 @@ class BaseBackendMatcher(object): self.backends = {} self.poller = poller + @abc.abstractmethod + def redirect_to_backend(self, message): + """Redirect message""" + + +@six.add_metaclass(abc.ABCMeta) +class DirectBackendMatcher(BaseBackendMatcher): + def redirect_to_backend(self, message): backend, topic = self._match_backend(message) self._send_message(backend, message, topic) @@ -77,16 +137,16 @@ class BaseBackendMatcher(object): @abc.abstractmethod def _get_topic(self, message): - "Extract topic from message" + """Extract topic from message""" @abc.abstractmethod def _get_ipc_address(self, topic): - "Get ipc backend address from topic" + """Get ipc backend address from topic""" @abc.abstractmethod def _send_message(self, backend, message, topic): - "Backend specific sending logic" + """Backend specific sending logic""" @abc.abstractmethod def _create_backend(self, ipc_address): - "Backend specific socket opening logic" + """Backend specific socket opening logic""" diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py index a0d3f4fe2..08c5d7f79 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py @@ -17,7 +17,7 @@ import os from oslo_utils import excutils -from oslo_messaging._drivers.zmq_driver.broker.zmq_call_proxy import CallProxy +from oslo_messaging._drivers.zmq_driver.broker import zmq_universal_proxy from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._i18n import _LE, _LI @@ -44,7 +44,8 @@ class ZmqBroker(object): super(ZmqBroker, self).__init__() self.conf = conf self.context = zmq.Context() - self.proxies = [CallProxy(conf, self.context)] + proxy = zmq_universal_proxy.UniversalProxy(conf, self.context) + self.proxies = [proxy] self._create_ipc_dirs() def _create_ipc_dirs(self): diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py index f4471b532..57c7d80e8 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py @@ -31,7 +31,7 @@ class CallProxy(base_proxy.BaseProxy): def __init__(self, conf, context): super(CallProxy, self).__init__(conf, context) self.tcp_frontend = FrontendTcpRouter(self.conf, context) - self.backend_matcher = CallBackendMatcher(self.conf, context) + self.backend_matcher = DealerBackend(self.conf, context) LOG.info(_LI("Starting call proxy thread")) def run(self): @@ -44,12 +44,12 @@ class CallProxy(base_proxy.BaseProxy): self.tcp_frontend.redirect_outgoing_reply(reply) -class CallBackendMatcher(base_proxy.BaseBackendMatcher): +class DealerBackend(base_proxy.DirectBackendMatcher): def __init__(self, conf, context): - super(CallBackendMatcher, self).__init__(conf, - zmq_async.get_poller(), - context) + super(DealerBackend, self).__init__(conf, + zmq_async.get_poller(), + context) self.backend = self.context.socket(zmq.DEALER) self.poller.register(self.backend) @@ -80,19 +80,9 @@ class FrontendTcpRouter(base_proxy.BaseTcpFrontend): def __init__(self, conf, context): super(FrontendTcpRouter, self).__init__(conf, zmq_async.get_poller(), - context) - - try: - self.frontend = self.context.socket(zmq.ROUTER) - bind_address = zmq_topic.get_tcp_bind_address(conf.rpc_zmq_port) - LOG.info(_LI("Binding to TCP ROUTER %s") % bind_address) - self.frontend.bind(bind_address) - self.poller.register(self.frontend) - except zmq.ZMQError: - errmsg = _LE("Could not create ZeroMQ receiver daemon. " - "Socket may already be in use.") - LOG.error(errmsg) - raise RPCException(errmsg) + context, + socket_type=zmq.ROUTER, + port_number=conf.rpc_zmq_port) @staticmethod def _reduce_empty(reply): diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py new file mode 100644 index 000000000..131101661 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py @@ -0,0 +1,35 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +import oslo_messaging._drivers.zmq_driver.broker.zmq_base_proxy as base_proxy +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_serializer +from oslo_messaging._drivers.zmq_driver import zmq_topic + +zmq = zmq_async.import_zmq() + + +class PublisherBackend(base_proxy.BaseBackendMatcher): + + def __init__(self, conf, context): + super(PublisherBackend, self).__init__(conf, + zmq_async.get_poller(), + context) + self.backend = self.context.socket(zmq.PUB) + self.backend.bind(zmq_topic.get_ipc_address_fanout(conf)) + + def redirect_to_backend(self, message): + topic_pos = zmq_serializer.MESSAGE_CALL_TOPIC_POSITION + self.backend.send_multipart(message[topic_pos:]) diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py new file mode 100644 index 000000000..1d8982d41 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py @@ -0,0 +1,58 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +import oslo_messaging._drivers.zmq_driver.broker.zmq_base_proxy as base_proxy +from oslo_messaging._drivers.zmq_driver.broker import zmq_call_proxy +from oslo_messaging._drivers.zmq_driver.broker import zmq_fanout_proxy +from oslo_messaging._drivers.zmq_driver import zmq_serializer +from oslo_messaging._i18n import _LI + +LOG = logging.getLogger(__name__) + + +class UniversalProxy(base_proxy.BaseProxy): + + def __init__(self, conf, context): + super(UniversalProxy, self).__init__(conf, context) + self.tcp_frontend = zmq_call_proxy.FrontendTcpRouter(conf, context) + self.backend_matcher = BackendMatcher(conf, context) + call = zmq_serializer.CALL_TYPE + self.call_backend = self.backend_matcher.backends[call] + LOG.info(_LI("Starting universal-proxy thread")) + + def run(self): + message = self.tcp_frontend.receive_incoming() + if message is not None: + self.backend_matcher.redirect_to_backend(message) + + reply, socket = self.call_backend.receive_outgoing_reply() + if reply is not None: + self.tcp_frontend.redirect_outgoing_reply(reply) + + +class BackendMatcher(base_proxy.BaseBackendMatcher): + + def __init__(self, conf, context): + super(BackendMatcher, self).__init__(conf, None, context) + direct_backend = zmq_call_proxy.DealerBackend(conf, context) + self.backends[zmq_serializer.CALL_TYPE] = direct_backend + self.backends[zmq_serializer.CAST_TYPE] = direct_backend + fanout_backend = zmq_fanout_proxy.PublisherBackend(conf, context) + self.backends[zmq_serializer.FANOUT_TYPE] = fanout_backend + + def redirect_to_backend(self, message): + message_type = zmq_serializer.get_msg_type(message) + self.backends[message_type].redirect_to_backend(message) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py index fb20efd4a..682b46fb9 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py @@ -16,6 +16,7 @@ import logging from oslo_messaging._drivers.zmq_driver.rpc.client.zmq_request import Request from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_serializer from oslo_messaging._drivers.zmq_driver import zmq_topic from oslo_messaging._i18n import _LE, _LI @@ -33,7 +34,9 @@ class CallRequest(Request): socket = self.zmq_context.socket(zmq.REQ) super(CallRequest, self).__init__(conf, target, context, - message, socket, timeout, retry) + message, socket, + zmq_serializer.CALL_TYPE, + timeout, retry) self.connect_address = zmq_topic.get_tcp_address_call(conf, self.topic) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py index 40fddd97b..30a117c1e 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py @@ -17,6 +17,7 @@ import logging from oslo_messaging._drivers.zmq_driver.rpc.client import zmq_cast_publisher from oslo_messaging._drivers.zmq_driver.rpc.client.zmq_request import Request from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_serializer from oslo_messaging._drivers.zmq_driver import zmq_topic from oslo_messaging._i18n import _LE, _LI @@ -30,8 +31,11 @@ class CastRequest(Request): def __init__(self, conf, target, context, message, socket, address, timeout=None, retry=None): self.connect_address = address + fanout_type = zmq_serializer.FANOUT_TYPE + cast_type = zmq_serializer.CAST_TYPE + msg_type = fanout_type if target.fanout else cast_type super(CastRequest, self).__init__(conf, target, context, message, - socket, timeout, retry) + socket, msg_type, timeout, retry) def __call__(self, *args, **kwargs): self.send_request() diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py index 2bfe755bf..9575bdc0b 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py @@ -20,6 +20,7 @@ import uuid import six from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_serializer from oslo_messaging._drivers.zmq_driver import zmq_topic from oslo_messaging._i18n import _LE @@ -32,7 +33,9 @@ zmq = zmq_async.import_zmq() class Request(object): def __init__(self, conf, target, context, message, - socket, timeout=None, retry=None): + socket, msg_type, timeout=None, retry=None): + + assert msg_type in zmq_serializer.MESSAGE_TYPES, "Unknown msg type!" if message['method'] is None: errmsg = _LE("No method specified for RPC call") @@ -40,6 +43,7 @@ class Request(object): raise KeyError(errmsg) self.msg_id = uuid.uuid4().hex + self.msg_type = msg_type self.target = target self.context = context self.message = message @@ -62,6 +66,7 @@ class Request(object): return False def send_request(self): + self.socket.send_string(self.msg_type, zmq.SNDMORE) self.socket.send_string(str(self.topic), zmq.SNDMORE) self.socket.send_string(self.msg_id, zmq.SNDMORE) self.socket.send_json(self.context, zmq.SNDMORE) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py index 959ffd70d..9431b8f67 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py @@ -56,13 +56,19 @@ class ZmqIncomingRequest(base.IncomingMessage): class CallResponder(zmq_base_consumer.ConsumerBase): - def __init__(self, listener, conf, poller, context): - super(CallResponder, self).__init__(listener, conf, poller, context) - - def poll(self, timeout=None): + def _receive_message(self, socket): try: - incoming, socket = self.poller.poll(timeout) - reply_id, context, message = incoming + reply_id = socket.recv() + empty = socket.recv() + assert empty == b'', 'Bad format: empty separator expected' + msg_type = socket.recv_string() + assert msg_type is not None, 'Bad format: msg type expected' + topic = socket.recv_string() + assert topic is not None, 'Bad format: topic string expected' + msg_id = socket.recv_string() + assert msg_id is not None, 'Bad format: message ID expected' + context = socket.recv_json() + message = socket.recv_json() LOG.debug("[Server] REP Received message %s" % str(message)) incoming = ZmqIncomingRequest(self.listener, context, @@ -70,27 +76,13 @@ class CallResponder(zmq_base_consumer.ConsumerBase): reply_id, self.poller) return incoming - except zmq.ZMQError as e: LOG.error(_LE("Receiving message failed ... {}"), e) def listen(self, target): - - def _receive_message(socket): - reply_id = socket.recv() - empty = socket.recv() - assert empty == b'', 'Bad format: empty separator expected' - topic = socket.recv_string() - assert topic is not None, 'Bad format: topic string expected' - msg_id = socket.recv_string() - assert msg_id is not None, 'Bad format: message ID expected' - context = socket.recv_json() - message = socket.recv_json() - return (reply_id, context, message) - topic = topic_utils.Topic.from_target(self.conf, target) ipc_rep_address = topic_utils.get_ipc_address_call(self.conf, topic) rep_socket = self.context.socket(zmq.REP) rep_socket.bind(ipc_rep_address) self.sockets_per_topic[str(topic)] = rep_socket - self.poller.register(rep_socket, _receive_message) + self.poller.register(rep_socket, self._receive_message) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_fanout_consumer.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_fanout_consumer.py new file mode 100644 index 000000000..3ca78cf05 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_fanout_consumer.py @@ -0,0 +1,74 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +import logging + +import six + +from oslo_messaging._drivers import base +from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_base_consumer +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_topic as topic_utils +from oslo_messaging._i18n import _LE + + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class ZmqFanoutMessage(base.IncomingMessage): + + def __init__(self, listener, context, message, socket, poller): + super(ZmqFanoutMessage, self).__init__(listener, context, message) + poller.resume_polling(socket) + + def reply(self, reply=None, failure=None, log_failure=True): + """Reply is not needed for fanout(cast) messages""" + + def acknowledge(self): + pass + + def requeue(self): + pass + + +class FanoutConsumer(zmq_base_consumer.ConsumerBase): + + def _receive_message(self, socket): + try: + topic = socket.recv_string() + assert topic is not None, 'Bad format: Topic is expected' + msg_id = socket.recv_string() + assert msg_id is not None, 'Bad format: message ID expected' + context = socket.recv_json() + message = socket.recv_json() + LOG.debug("[Server] REP Received message %s" % str(message)) + incoming = ZmqFanoutMessage(self.listener, context, message, + socket, self.poller) + return incoming + except zmq.ZMQError as e: + LOG.error(_LE("Receiving message failed ... {}"), e) + + def listen(self, target): + topic = topic_utils.Topic.from_target(self.conf, target) + ipc_address = topic_utils.get_ipc_address_fanout(self.conf) + sub_socket = self.context.socket(zmq.SUB) + sub_socket.connect(ipc_address) + if six.PY3: + sub_socket.setsockopt_string(zmq.SUBSCRIBE, str(topic)) + else: + sub_socket.setsockopt(zmq.SUBSCRIBE, str(topic)) + self.poller.register(sub_socket, self._receive_message) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py index e6f67ab95..a5540ec59 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py @@ -16,6 +16,7 @@ import logging from oslo_messaging._drivers import base from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_call_responder +from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_fanout_consumer from oslo_messaging._drivers.zmq_driver import zmq_async LOG = logging.getLogger(__name__) @@ -29,14 +30,17 @@ class ZmqServer(base.Listener): LOG.info("[Server] __init__") self.conf = conf self.context = zmq.Context() - poller = zmq_async.get_reply_poller() - self.call_responder = zmq_call_responder.CallResponder(self, conf, - poller, - self.context) + self.poller = zmq_async.get_reply_poller() + self.call_resp = zmq_call_responder.CallResponder(self, conf, + self.poller, + self.context) + self.fanout_resp = zmq_fanout_consumer.FanoutConsumer(self, conf, + self.poller, + self.context) def poll(self, timeout=None): - incoming = self.call_responder.poll(timeout) - return incoming + incoming = self.poller.poll(timeout) + return incoming[0] def stop(self): LOG.info("[Server] Stop") @@ -46,4 +50,7 @@ class ZmqServer(base.Listener): def listen(self, target): LOG.info("[Server] Listen to Target %s" % target) - self.call_responder.listen(target) + if target.fanout: + self.fanout_resp.listen(target) + else: + self.call_resp.listen(target) diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py b/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py index 0f0733ae9..81259beaf 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py @@ -23,7 +23,26 @@ from oslo_messaging._i18n import _LE, _LW LOG = logging.getLogger(__name__) -MESSAGE_CALL_TOPIC_POSITION = 2 +MESSAGE_CALL_TYPE_POSITION = 2 +MESSAGE_CALL_TOPIC_POSITION = 3 + +CALL_TYPE = 'call' +CAST_TYPE = 'cast' +FANOUT_TYPE = 'fanout' +NOTIFY_TYPE = 'notify' + +MESSAGE_TYPES = (CALL_TYPE, CAST_TYPE, FANOUT_TYPE, NOTIFY_TYPE) + + +def get_msg_type(message): + type = message[MESSAGE_CALL_TYPE_POSITION] + if six.PY3: + type = type.decode('utf-8') + if type not in MESSAGE_TYPES: + errmsg = _LE("Unknown message type: %s") % str(type) + LOG.error(errmsg) + rpc_common.RPCException(errmsg) + return type def _get_topic_from_msg(message, position): @@ -46,7 +65,7 @@ def _get_topic_from_msg(message, position): except Exception as e: errmsg = _LE("Failed topic string parsing, %s") % str(e) LOG.error(errmsg) - rpc_common.RPCException(errmsg) + raise rpc_common.RPCException(errmsg) return topic_items[0], topic_items[1] diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_topic.py b/oslo_messaging/_drivers/zmq_driver/zmq_topic.py index c338b69c5..332c81912 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_topic.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_topic.py @@ -29,6 +29,10 @@ def get_ipc_address_cast(conf, topic): return "ipc://%s/fanout/%s" % (conf.rpc_zmq_ipc_dir, str(topic)) +def get_ipc_address_fanout(conf): + return "ipc://%s/fanout_general" % conf.rpc_zmq_ipc_dir + + class Topic(object): def __init__(self, conf, topic, server=None, fanout=False): @@ -58,4 +62,4 @@ class Topic(object): return self._topic if self._topic else "" def __str__(self, *args, **kwargs): - return "%s.%s" % (self.topic, self.server) + return u"%s.%s" % (self.topic, self.server) diff --git a/oslo_messaging/tests/drivers/test_impl_zmq.py b/oslo_messaging/tests/drivers/test_impl_zmq.py index a6eef2f7c..79b797cdb 100644 --- a/oslo_messaging/tests/drivers/test_impl_zmq.py +++ b/oslo_messaging/tests/drivers/test_impl_zmq.py @@ -165,20 +165,21 @@ class TestZmqBasics(ZmqBaseTestCase): method = self.listener.message.message[u'method'] self.assertEqual(u'hello-world', method) - @testtools.skip("Not implemented feature") def test_send_fanout(self): target = oslo_messaging.Target(topic='testtopic', fanout=True) - self.driver.listen(target) + self.listener.listen(target) result = self.driver.send( target, {}, {'method': 'hello-world', 'tx_id': 1}, wait_for_reply=False) + self.listener._received.wait() + self.assertIsNone(result) self.assertEqual(True, self.listener._received.isSet()) - msg_pattern = "{'method': 'hello-world', 'tx_id': 1}" - self.assertEqual(msg_pattern, self.listener.message) + method = self.listener.message.message[u'method'] + self.assertEqual(u'hello-world', method) def test_send_receive_direct(self): """Call() without topic.""" diff --git a/tests/test_exception_serialization.py b/tests/test_exception_serialization.py index 17e8ff1f1..baa2b79c3 100644 --- a/tests/test_exception_serialization.py +++ b/tests/test_exception_serialization.py @@ -19,6 +19,7 @@ import six import testscenarios from oslo import messaging + from oslo_messaging._drivers import common as exceptions from oslo_messaging.tests import utils as test_utils from oslo_serialization import jsonutils From bcdc0e88ecdb285dc3b21ce6ed858ddc1ac2c628 Mon Sep 17 00:00:00 2001 From: Victor Sergeyev Date: Thu, 9 Jul 2015 11:39:31 +0300 Subject: [PATCH 07/28] ZMQ: Allow to raise remote exception This patch adds possibility to re-raise on client's side exception, that was raised on server side - serialize it on server side, restore and re-raise on client. Allowed to pass `allowed_remote_exmods` parameter from impl_zmq to CallRequest class Functional test CallTestCase.test_exception() passes now, so added it to tox.ini. Modified zmq_receiver to be able run functional tests. Change-Id: Ic055f3574962f3e80a0528d5d99320386303634e --- oslo_messaging/_cmd/zmq_receiver.py | 8 ++++++-- oslo_messaging/_drivers/impl_zmq.py | 8 +++++++- .../_drivers/zmq_driver/broker/zmq_base_proxy.py | 1 + .../_drivers/zmq_driver/poller/green_poller.py | 3 --- .../zmq_driver/rpc/client/zmq_call_request.py | 11 +++++++++-- .../_drivers/zmq_driver/rpc/client/zmq_client.py | 8 +++++--- .../zmq_driver/rpc/server/zmq_call_responder.py | 4 ++++ .../_drivers/zmq_driver/rpc/server/zmq_server.py | 2 +- oslo_messaging/tests/functional/test_functional.py | 3 +-- oslo_messaging/tests/functional/utils.py | 9 +++++++-- tox.ini | 3 ++- 11 files changed, 43 insertions(+), 17 deletions(-) diff --git a/oslo_messaging/_cmd/zmq_receiver.py b/oslo_messaging/_cmd/zmq_receiver.py index cbcdfe88d..f259299f9 100644 --- a/oslo_messaging/_cmd/zmq_receiver.py +++ b/oslo_messaging/_cmd/zmq_receiver.py @@ -24,6 +24,7 @@ import sys from oslo_config import cfg from oslo_messaging._drivers import impl_zmq +from oslo_messaging._drivers.zmq_driver.broker import zmq_broker from oslo_messaging._executors import base # FIXME(markmc) CONF = cfg.CONF @@ -35,6 +36,9 @@ def main(): CONF(sys.argv[1:], project='oslo') logging.basicConfig(level=logging.DEBUG) - with contextlib.closing(impl_zmq.ZmqProxy(CONF)) as reactor: - reactor.consume_in_thread() + with contextlib.closing(zmq_broker.ZmqBroker(CONF)) as reactor: + reactor.start() reactor.wait() + +if __name__ == "__main__": + main() diff --git a/oslo_messaging/_drivers/impl_zmq.py b/oslo_messaging/_drivers/impl_zmq.py index 7357aa3e3..b75bf8f9c 100644 --- a/oslo_messaging/_drivers/impl_zmq.py +++ b/oslo_messaging/_drivers/impl_zmq.py @@ -71,6 +71,11 @@ zmq_opts = [ default=30, help='Seconds to wait before a cast expires (TTL). ' 'Only supported by impl_zmq.'), + + cfg.IntOpt('rpc_poll_timeout', + default=1, + help='The default number of seconds that poll should wait. ' + 'Poll raises timeout exception when timeout expired.'), ] @@ -95,7 +100,8 @@ class ZmqDriver(base.BaseDriver): def send(self, target, ctxt, message, wait_for_reply=None, timeout=None, retry=None): if self.client is None: - self.client = zmq_client.ZmqClient(self.conf, self.matchmaker) + self.client = zmq_client.ZmqClient(self.conf, self.matchmaker, + self._allowed_remote_exmods) if wait_for_reply: return self.client.call(target, ctxt, message, timeout, retry) else: diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py index d591d94eb..59cd42a79 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py @@ -104,6 +104,7 @@ class BaseTcpFrontend(object): def receive_incoming(self): message, socket = self.poller.poll(1) + LOG.info(_LI("Message %s received."), message) return message diff --git a/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py b/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py index b2c26c8a8..f09bb016a 100644 --- a/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py +++ b/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py @@ -19,13 +19,10 @@ import eventlet import six from oslo_messaging._drivers import common as rpc_common -from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._drivers.zmq_driver import zmq_poller LOG = logging.getLogger(__name__) -zmq = zmq_async.import_zmq() - class GreenPoller(zmq_poller.ZmqPoller): diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py index 682b46fb9..460275378 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py @@ -14,6 +14,7 @@ import logging +from oslo_messaging._drivers import common as rpc_common from oslo_messaging._drivers.zmq_driver.rpc.client.zmq_request import Request from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._drivers.zmq_driver import zmq_serializer @@ -28,7 +29,8 @@ zmq = zmq_async.import_zmq() class CallRequest(Request): def __init__(self, conf, target, context, message, timeout=None, - retry=None): + retry=None, allowed_remote_exmods=None): + self.allowed_remote_exmods = allowed_remote_exmods or [] try: self.zmq_context = zmq.Context() socket = self.zmq_context.socket(zmq.REQ) @@ -44,9 +46,14 @@ class CallRequest(Request): self.socket.connect(self.connect_address) except zmq.ZMQError as e: LOG.error(_LE("Error connecting to socket: %s") % str(e)) + raise def receive_reply(self): # NOTE(ozamiatin): Check for retry here (no retries now) self.socket.setsockopt(zmq.RCVTIMEO, self.timeout) reply = self.socket.recv_json() - return reply[u'reply'] + if reply['failure']: + raise rpc_common.deserialize_remote_exception( + reply['failure'], self.allowed_remote_exmods) + else: + return reply['reply'] diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py index a4eed4953..ec00cb912 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py @@ -19,14 +19,16 @@ from oslo_messaging._drivers.zmq_driver.rpc.client import zmq_cast_dealer class ZmqClient(object): - def __init__(self, conf, matchmaker=None): + def __init__(self, conf, matchmaker=None, allowed_remote_exmods=None): self.conf = conf + self.allowed_remote_exmods = allowed_remote_exmods or [] self.cast_publisher = zmq_cast_dealer.DealerCastPublisher(conf, matchmaker) def call(self, target, context, message, timeout=None, retry=None): - request = zmq_call_request.CallRequest(self.conf, target, context, - message, timeout, retry) + request = zmq_call_request.CallRequest( + self.conf, target, context, message, timeout, retry, + self.allowed_remote_exmods) return request() def cast(self, target, context, message, timeout=None, retry=None): diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py index 9431b8f67..59b46e535 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py @@ -16,6 +16,7 @@ import logging from oslo_messaging._drivers import base +from oslo_messaging._drivers import common as rpc_common from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_base_consumer from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._drivers.zmq_driver import zmq_topic as topic_utils @@ -37,6 +38,9 @@ class ZmqIncomingRequest(base.IncomingMessage): self.poller = poller def reply(self, reply=None, failure=None, log_failure=True): + if failure is not None: + failure = rpc_common.serialize_remote_exception(failure, + log_failure) message_reply = {u'reply': reply, u'failure': failure, u'log_failure': log_failure} diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py index a5540ec59..b51ff0187 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py @@ -39,7 +39,7 @@ class ZmqServer(base.Listener): self.context) def poll(self, timeout=None): - incoming = self.poller.poll(timeout) + incoming = self.poller.poll(timeout or self.conf.rpc_poll_timeout) return incoming[0] def stop(self): diff --git a/oslo_messaging/tests/functional/test_functional.py b/oslo_messaging/tests/functional/test_functional.py index 32cc0190c..962d473fe 100644 --- a/oslo_messaging/tests/functional/test_functional.py +++ b/oslo_messaging/tests/functional/test_functional.py @@ -103,8 +103,7 @@ class CallTestCase(utils.SkipIfNoTransportURL): group = self.useFixture(utils.RpcServerGroupFixture(self.url)) client = group.client(1) client.add(increment=2) - f = lambda: client.subtract(increment=3) - self.assertThat(f, matchers.raises(ValueError)) + self.assertRaises(ValueError, client.subtract, increment=3) def test_timeout_with_concurrently_queues(self): transport = self.useFixture(utils.TransportFixture(self.url)) diff --git a/oslo_messaging/tests/functional/utils.py b/oslo_messaging/tests/functional/utils.py index 8ac087bc4..de1673839 100644 --- a/oslo_messaging/tests/functional/utils.py +++ b/oslo_messaging/tests/functional/utils.py @@ -125,8 +125,13 @@ class RpcServerGroupFixture(fixtures.Fixture): # NOTE(sileht): topic and servier_name must be uniq # to be able to run all tests in parallel self.topic = topic or str(uuid.uuid4()) - self.names = names or ["server_%i_%s" % (i, uuid.uuid4()) - for i in range(3)] + if self.url.startswith('zmq'): + # NOTE(viktors): We need to pass correct hots name to the to + # get_tcp_.*() methods. Should we use nameserver here? + self.names = names or [cfg.CONF.rpc_zmq_host for i in range(3)] + else: + self.names = names or ["server_%i_%s" % (i, uuid.uuid4()) + for i in range(3)] self.exchange = exchange self.targets = [self._target(server=n) for n in self.names] self.use_fanout_ctrl = use_fanout_ctrl diff --git a/tox.ini b/tox.ini index 6c86be990..7d0d665cb 100644 --- a/tox.ini +++ b/tox.ini @@ -41,7 +41,8 @@ setenv = TRANSPORT_URL=amqp://stackqpid:secretqpid@127.0.0.1:65123// commands = {toxinidir}/setup-test-env-qpid.sh python setup.py testr --slowest --testr-args='oslo_messaging.tests.functional' [testenv:py27-func-zeromq] -commands = {toxinidir}/setup-test-env-zmq.sh python setup.py testr --slowest --testr-args='oslo_messaging.tests.functional' +commands = {toxinidir}/setup-test-env-zmq.sh python -m testtools.run oslo_messaging.tests.functional.test_functional.CallTestCase.test_exception +# commands = {toxinidir}/setup-test-env-zmq.sh python setup.py testr --slowest --testr-args='oslo_messaging.tests.functional' [flake8] show-source = True From 48f2a87a273a43ca9006f9bdf3315bda338bd118 Mon Sep 17 00:00:00 2001 From: Victor Sergeyev Date: Thu, 9 Jul 2015 16:04:01 +0300 Subject: [PATCH 08/28] Fix work with timeout in CallRequest.receive_reply() Refactored CallRequest.receive_reply() method to raise MessagingTimeout exception, when timeout is reached. Removed unused _to_milliseconds() method Functional test CallTestCase.test_timeout() passes now Change-Id: Idc3224646c3626a56606d019ff7ff155d3e3201a --- .../zmq_driver/rpc/client/zmq_call_request.py | 14 +++++++++++--- .../_drivers/zmq_driver/rpc/client/zmq_request.py | 6 +----- oslo_messaging/tests/test_utils.py | 2 +- tox.ini | 4 +++- 4 files changed, 16 insertions(+), 10 deletions(-) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py index 460275378..d66e5d4de 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py @@ -14,6 +14,7 @@ import logging +import oslo_messaging from oslo_messaging._drivers import common as rpc_common from oslo_messaging._drivers.zmq_driver.rpc.client.zmq_request import Request from oslo_messaging._drivers.zmq_driver import zmq_async @@ -31,10 +32,10 @@ class CallRequest(Request): def __init__(self, conf, target, context, message, timeout=None, retry=None, allowed_remote_exmods=None): self.allowed_remote_exmods = allowed_remote_exmods or [] + try: self.zmq_context = zmq.Context() socket = self.zmq_context.socket(zmq.REQ) - super(CallRequest, self).__init__(conf, target, context, message, socket, zmq_serializer.CALL_TYPE, @@ -50,8 +51,15 @@ class CallRequest(Request): def receive_reply(self): # NOTE(ozamiatin): Check for retry here (no retries now) - self.socket.setsockopt(zmq.RCVTIMEO, self.timeout) - reply = self.socket.recv_json() + poller = zmq_async.get_reply_poller() + poller.register(self.socket, + recv_method=lambda socket: socket.recv_json()) + + reply, socket = poller.poll(timeout=self.timeout) + if reply is None: + raise oslo_messaging.MessagingTimeout( + "Timeout %s seconds was reached" % self.timeout) + if reply['failure']: raise rpc_common.deserialize_remote_exception( reply['failure'], self.allowed_remote_exmods) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py index 9575bdc0b..badb04066 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py @@ -47,16 +47,12 @@ class Request(object): self.target = target self.context = context self.message = message - self.timeout = self._to_milliseconds(conf, timeout) + self.timeout = timeout or conf.rpc_response_timeout self.retry = retry self.reply = None self.socket = socket self.topic = zmq_topic.Topic.from_target(conf, target) - @staticmethod - def _to_milliseconds(conf, timeout): - return timeout * 1000 if timeout else conf.rpc_response_timeout * 1000 - @property def is_replied(self): return self.reply is not None diff --git a/oslo_messaging/tests/test_utils.py b/oslo_messaging/tests/test_utils.py index 22178d8ea..d8a2912b1 100644 --- a/oslo_messaging/tests/test_utils.py +++ b/oslo_messaging/tests/test_utils.py @@ -84,4 +84,4 @@ class TimerTestCase(test_utils.BaseTestCase): callback = mock.Mock() remaining = t.check_return(callback) self.assertEqual(0, remaining) - callback.assert_called_once + self.assertEqual(1, callback.call_count) diff --git a/tox.ini b/tox.ini index 7d0d665cb..d91508bcf 100644 --- a/tox.ini +++ b/tox.ini @@ -41,7 +41,9 @@ setenv = TRANSPORT_URL=amqp://stackqpid:secretqpid@127.0.0.1:65123// commands = {toxinidir}/setup-test-env-qpid.sh python setup.py testr --slowest --testr-args='oslo_messaging.tests.functional' [testenv:py27-func-zeromq] -commands = {toxinidir}/setup-test-env-zmq.sh python -m testtools.run oslo_messaging.tests.functional.test_functional.CallTestCase.test_exception +commands = {toxinidir}/setup-test-env-zmq.sh python -m testtools.run \ + oslo_messaging.tests.functional.test_functional.CallTestCase.test_exception \ + oslo_messaging.tests.functional.test_functional.CallTestCase.test_timeout # commands = {toxinidir}/setup-test-env-zmq.sh python setup.py testr --slowest --testr-args='oslo_messaging.tests.functional' [flake8] From 12aff74f53a7defa732257a50198815088975b50 Mon Sep 17 00:00:00 2001 From: Doug Royal Date: Fri, 10 Jul 2015 15:26:20 -0500 Subject: [PATCH 09/28] Add unit tests for zmq_serializer Change-Id: I428f2f80ca10bc888c809e4d6e7862e2ee5d442c --- .../_drivers/zmq_driver/zmq_serializer.py | 39 ++++++++----- oslo_messaging/tests/drivers/test_impl_zmq.py | 56 ++++++++++++++++++- tests/drivers/test_impl_zmq.py | 3 +- 3 files changed, 83 insertions(+), 15 deletions(-) diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py b/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py index 81259beaf..64145ab4f 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py @@ -48,26 +48,39 @@ def get_msg_type(message): def _get_topic_from_msg(message, position): pathsep = set((os.path.sep or '', os.path.altsep or '', '/', '\\')) badchars = re.compile(r'[%s]' % re.escape(''.join(pathsep))) + + if len(message) < position + 1: + errmsg = _LE("Message did not contain a topic") + LOG.error("%s: %s" % (errmsg, message)) + raise rpc_common.RPCException("%s: %s" % (errmsg, message)) + topic = message[position] - topic_items = None if six.PY3: topic = topic.decode('utf-8') - try: - # The topic is received over the network, - # don't trust this input. - if badchars.search(topic) is not None: - emsg = _LW("Topic contained dangerous characters") - LOG.warn(emsg) - raise rpc_common.RPCException(emsg) - topic_items = topic.split('.', 1) - except Exception as e: - errmsg = _LE("Failed topic string parsing, %s") % str(e) - LOG.error(errmsg) - raise rpc_common.RPCException(errmsg) + # The topic is received over the network, don't trust this input. + if badchars.search(topic) is not None: + errmsg = _LW("Topic contained dangerous characters") + LOG.warn("%s: %s" % (errmsg, topic)) + raise rpc_common.RPCException("%s: %s" % (errmsg, topic)) + + topic_items = topic.split('.', 1) + + if len(topic_items) != 2: + errmsg = _LE("Topic was not formatted correctly") + LOG.error("%s: %s" % (errmsg, topic)) + raise rpc_common.RPCException("%s: %s" % (errmsg, topic)) + return topic_items[0], topic_items[1] def get_topic_from_call_message(message): + """Extract topic and server from message. + + :param message: A message + :type message: list + + :returns: (topic: str, server: str) + """ return _get_topic_from_msg(message, MESSAGE_CALL_TOPIC_POSITION) diff --git a/oslo_messaging/tests/drivers/test_impl_zmq.py b/oslo_messaging/tests/drivers/test_impl_zmq.py index 79b797cdb..a2499ce10 100644 --- a/oslo_messaging/tests/drivers/test_impl_zmq.py +++ b/oslo_messaging/tests/drivers/test_impl_zmq.py @@ -13,6 +13,8 @@ # under the License. import logging +import os +import re import socket import threading @@ -20,9 +22,11 @@ import fixtures import testtools import oslo_messaging +from oslo_messaging._drivers.common import RPCException from oslo_messaging._drivers import impl_zmq from oslo_messaging._drivers.zmq_driver.broker.zmq_broker import ZmqBroker from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_serializer from oslo_messaging._i18n import _ from oslo_messaging.tests import utils as test_utils @@ -199,7 +203,8 @@ class TestPoller(test_utils.BaseTestCase): super(TestPoller, self).setUp() self.poller = zmq_async.get_poller() self.ctx = zmq.Context() - self.ADDR_REQ = "ipc://request1" + self.internal_ipc_dir = self.useFixture(fixtures.TempDir()).path + self.ADDR_REQ = "ipc://%s/request1" % self.internal_ipc_dir def test_poll_blocking(self): @@ -243,3 +248,52 @@ class TestPoller(test_utils.BaseTestCase): incoming, socket = reply_poller.poll(1) self.assertIsNone(incoming) self.assertIsNone(socket) + + +class TestZmqSerializer(test_utils.BaseTestCase): + + def test_message_without_topic_raises_RPCException(self): + # The topic is the 4th element of the message. + msg_without_topic = ['only', 'three', 'parts'] + + expected = "Message did not contain a topic: %s" % msg_without_topic + with self.assertRaisesRegexp(RPCException, re.escape(expected)): + zmq_serializer.get_topic_from_call_message(msg_without_topic) + + def test_invalid_topic_format_raises_RPCException(self): + invalid_topic = "no dots to split on, so not index-able".encode('utf8') + bad_message = ['', '', '', invalid_topic] + + expected_msg = "Topic was not formatted correctly: %s" + expected_msg = expected_msg % invalid_topic.decode('utf8') + with self.assertRaisesRegexp(RPCException, expected_msg): + zmq_serializer.get_topic_from_call_message(bad_message) + + def test_py3_decodes_bytes_correctly(self): + message = ['', '', '', b'topic.ipaddress'] + + actual, _ = zmq_serializer.get_topic_from_call_message(message) + + self.assertEqual('topic', actual) + + def test_bad_characters_in_topic_raise_RPCException(self): + # handle unexpected os path separators: + unexpected_evil = '<' + os.path.sep = unexpected_evil + + unexpected_alt_evil = '>' + os.path.altsep = unexpected_alt_evil + + evil_chars = [unexpected_evil, unexpected_alt_evil, '\\', '/'] + + for evil_char in evil_chars: + evil_topic = '%s%s%s' % ('trust.me', evil_char, 'please') + evil_topic = evil_topic.encode('utf8') + evil_message = ['', '', '', evil_topic] + + expected_msg = "Topic contained dangerous characters: %s" + expected_msg = expected_msg % evil_topic.decode('utf8') + expected_msg = re.escape(expected_msg) + + with self.assertRaisesRegexp(RPCException, expected_msg): + zmq_serializer.get_topic_from_call_message(evil_message) diff --git a/tests/drivers/test_impl_zmq.py b/tests/drivers/test_impl_zmq.py index a6eef2f7c..fb12ac760 100644 --- a/tests/drivers/test_impl_zmq.py +++ b/tests/drivers/test_impl_zmq.py @@ -198,7 +198,8 @@ class TestPoller(test_utils.BaseTestCase): super(TestPoller, self).setUp() self.poller = zmq_async.get_poller() self.ctx = zmq.Context() - self.ADDR_REQ = "ipc://request1" + self.internal_ipc_dir = self.useFixture(fixtures.TempDir()).path + self.ADDR_REQ = "ipc://%s/request1" % self.internal_ipc_dir def test_poll_blocking(self): From ebcadf3d5e6c095c2c6e996c338fcacd1f27af25 Mon Sep 17 00:00:00 2001 From: Oleksii Zamiatin Date: Tue, 14 Jul 2015 23:03:22 +0300 Subject: [PATCH 10/28] Fix threading zmq poller and proxy - Fixed universal proxy to not get stuck with multiple backends - Fixed threading pollers/executors (proxy side) - Driver option to switch green/no-green impl. - Swtiched to no-green in real-world proxy (green left for unit tests) - Minor names fixes in serializer Change-Id: Id6508101521d8914228c639ed58ecd29db0ef456 --- oslo_messaging/_cmd/zmq_receiver.py | 6 +-- oslo_messaging/_drivers/base.py | 3 +- oslo_messaging/_drivers/impl_zmq.py | 6 ++- .../zmq_driver/broker/zmq_base_proxy.py | 8 ++-- .../_drivers/zmq_driver/broker/zmq_broker.py | 3 +- .../zmq_driver/broker/zmq_call_proxy.py | 29 ++++++++------ .../zmq_driver/broker/zmq_cast_proxy.py | 9 ++--- .../zmq_driver/broker/zmq_fanout_proxy.py | 5 +-- .../zmq_driver/broker/zmq_universal_proxy.py | 30 +++++++++----- .../zmq_driver/poller/threading_poller.py | 40 +++++++++++++++---- .../zmq_driver/rpc/client/zmq_call_request.py | 7 ++-- .../rpc/server/zmq_call_responder.py | 7 ++-- .../_drivers/zmq_driver/zmq_async.py | 40 ++++++++++--------- .../_drivers/zmq_driver/zmq_serializer.py | 4 ++ oslo_messaging/tests/drivers/test_impl_zmq.py | 2 +- setup-test-env-zmq.sh | 3 -- 16 files changed, 123 insertions(+), 79 deletions(-) diff --git a/oslo_messaging/_cmd/zmq_receiver.py b/oslo_messaging/_cmd/zmq_receiver.py index f259299f9..abd24e8d4 100644 --- a/oslo_messaging/_cmd/zmq_receiver.py +++ b/oslo_messaging/_cmd/zmq_receiver.py @@ -14,9 +14,6 @@ # License for the specific language governing permissions and limitations # under the License. -import eventlet -eventlet.monkey_patch() - import contextlib import logging import sys @@ -30,6 +27,9 @@ from oslo_messaging._executors import base # FIXME(markmc) CONF = cfg.CONF CONF.register_opts(impl_zmq.zmq_opts) CONF.register_opts(base._pool_opts) +# TODO(ozamiatin): Move this option assignment to an external config file +# Use efficient zmq poller in real-world deployment +CONF.rpc_zmq_native = True def main(): diff --git a/oslo_messaging/_drivers/base.py b/oslo_messaging/_drivers/base.py index 2051e9a30..1d2620825 100644 --- a/oslo_messaging/_drivers/base.py +++ b/oslo_messaging/_drivers/base.py @@ -75,8 +75,7 @@ class Listener(object): def cleanup(self): """Cleanup listener. - Close connection used by listener if any. For some listeners like - zmq there is no connection so no need to close connection. + Close connection (socket) used by listener if any. As this is listener specific method, overwrite it in to derived class if cleanup of listener required. """ diff --git a/oslo_messaging/_drivers/impl_zmq.py b/oslo_messaging/_drivers/impl_zmq.py index b75bf8f9c..fbd9f081f 100644 --- a/oslo_messaging/_drivers/impl_zmq.py +++ b/oslo_messaging/_drivers/impl_zmq.py @@ -45,9 +45,13 @@ zmq_opts = [ cfg.BoolOpt('rpc_zmq_all_req_rep', default=True, - deprecated_group='DEFAULT', help='Use REQ/REP pattern for all methods CALL/CAST/FANOUT.'), + cfg.BoolOpt('rpc_zmq_native', + default=False, + help='Switches ZeroMQ eventlet/threading way of usage.' + 'Affects pollers, executors etc.'), + # The following port is unassigned by IANA as of 2012-05-21 cfg.IntOpt('rpc_zmq_port', default=9501, help='ZeroMQ receiver listening port.'), diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py index 59cd42a79..5443e6af0 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py @@ -41,7 +41,8 @@ class BaseProxy(object): super(BaseProxy, self).__init__() self.conf = conf self.context = context - self.executor = zmq_async.get_executor(self.run) + self.executor = zmq_async.get_executor( + self.run, native_zmq=conf.rpc_zmq_native) @abc.abstractmethod def run(self): @@ -132,9 +133,8 @@ class DirectBackendMatcher(BaseBackendMatcher): def _match_backend(self, message): topic = self._get_topic(message) ipc_address = self._get_ipc_address(topic) - if ipc_address not in self.backends: - self._create_backend(ipc_address) - return self.backend, topic + backend = self._create_backend(ipc_address) + return backend, topic @abc.abstractmethod def _get_topic(self, message): diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py index 08c5d7f79..e3835bae6 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py @@ -24,8 +24,6 @@ from oslo_messaging._i18n import _LE, _LI LOG = logging.getLogger(__name__) -zmq = zmq_async.import_zmq() - class ZmqBroker(object): """Local messaging IPC broker (nodes are still peers). @@ -42,6 +40,7 @@ class ZmqBroker(object): def __init__(self, conf): super(ZmqBroker, self).__init__() + zmq = zmq_async.import_zmq(native_zmq=conf.rpc_zmq_native) self.conf = conf self.context = zmq.Context() proxy = zmq_universal_proxy.UniversalProxy(conf, self.context) diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py index 57c7d80e8..a2150fa30 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py @@ -46,12 +46,11 @@ class CallProxy(base_proxy.BaseProxy): class DealerBackend(base_proxy.DirectBackendMatcher): - def __init__(self, conf, context): - super(DealerBackend, self).__init__(conf, - zmq_async.get_poller(), - context) - self.backend = self.context.socket(zmq.DEALER) - self.poller.register(self.backend) + def __init__(self, conf, context, poller=None): + if poller is None: + poller = zmq_async.get_poller( + native_zmq=conf.rpc_zmq_native) + super(DealerBackend, self).__init__(conf, poller, context) def receive_outgoing_reply(self): reply_message = self.poller.poll(1) @@ -71,16 +70,22 @@ class DealerBackend(base_proxy.DirectBackendMatcher): backend.send_multipart(message) def _create_backend(self, ipc_address): - self.backend.connect(ipc_address) - self.backends[str(ipc_address)] = True + if ipc_address in self.backends: + return self.backends[ipc_address] + backend = self.context.socket(zmq.DEALER) + backend.connect(ipc_address) + self.poller.register(backend) + self.backends[ipc_address] = backend + return backend class FrontendTcpRouter(base_proxy.BaseTcpFrontend): - def __init__(self, conf, context): - super(FrontendTcpRouter, self).__init__(conf, - zmq_async.get_poller(), - context, + def __init__(self, conf, context, poller=None): + if poller is None: + poller = zmq_async.get_poller( + native_zmq=conf.rpc_zmq_native) + super(FrontendTcpRouter, self).__init__(conf, poller, context, socket_type=zmq.ROUTER, port_number=conf.rpc_zmq_port) diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_cast_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_cast_proxy.py index 8eef8befc..9779779df 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_cast_proxy.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_cast_proxy.py @@ -42,8 +42,8 @@ class CastProxy(base_proxy.BaseProxy): class FrontendTcpPull(base_proxy.BaseTcpFrontend): def __init__(self, conf, context): - super(FrontendTcpPull, self).__init__(conf, zmq_async.get_poller(), - context) + poller = zmq_async.get_poller(native_zmq=conf.rpc_zmq_native) + super(FrontendTcpPull, self).__init__(conf, poller, context) self.frontend = self.context.socket(zmq.PULL) address = zmq_topic.get_tcp_bind_address(conf.rpc_zmq_fanout_port) LOG.info(_LI("Binding to TCP PULL %s") % address) @@ -58,9 +58,8 @@ class FrontendTcpPull(base_proxy.BaseTcpFrontend): class CastPushBackendMatcher(base_proxy.BaseBackendMatcher): def __init__(self, conf, context): - super(CastPushBackendMatcher, self).__init__(conf, - zmq_async.get_poller(), - context) + poller = zmq_async.get_poller(native_zmq=conf.rpc_zmq_native) + super(CastPushBackendMatcher, self).__init__(conf, poller, context) self.backend = self.context.socket(zmq.PUSH) def _get_topic(self, message): diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py index 131101661..8d1f8b185 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py @@ -24,9 +24,8 @@ zmq = zmq_async.import_zmq() class PublisherBackend(base_proxy.BaseBackendMatcher): def __init__(self, conf, context): - super(PublisherBackend, self).__init__(conf, - zmq_async.get_poller(), - context) + poller = zmq_async.get_poller(native_zmq=conf.rpc_zmq_native) + super(PublisherBackend, self).__init__(conf, poller, context) self.backend = self.context.socket(zmq.PUB) self.backend.bind(zmq_topic.get_ipc_address_fanout(conf)) diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py index 1d8982d41..c57a60f9f 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py @@ -17,6 +17,7 @@ import logging import oslo_messaging._drivers.zmq_driver.broker.zmq_base_proxy as base_proxy from oslo_messaging._drivers.zmq_driver.broker import zmq_call_proxy from oslo_messaging._drivers.zmq_driver.broker import zmq_fanout_proxy +from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._drivers.zmq_driver import zmq_serializer from oslo_messaging._i18n import _LI @@ -27,27 +28,34 @@ class UniversalProxy(base_proxy.BaseProxy): def __init__(self, conf, context): super(UniversalProxy, self).__init__(conf, context) - self.tcp_frontend = zmq_call_proxy.FrontendTcpRouter(conf, context) - self.backend_matcher = BackendMatcher(conf, context) + self.poller = zmq_async.get_poller( + native_zmq=conf.rpc_zmq_native) + self.tcp_frontend = zmq_call_proxy.FrontendTcpRouter( + conf, context, poller=self.poller) + self.backend_matcher = BackendMatcher( + conf, context, poller=self.poller) call = zmq_serializer.CALL_TYPE self.call_backend = self.backend_matcher.backends[call] LOG.info(_LI("Starting universal-proxy thread")) def run(self): - message = self.tcp_frontend.receive_incoming() - if message is not None: - self.backend_matcher.redirect_to_backend(message) + message, socket = self.poller.poll(self.conf.rpc_poll_timeout) + if message is None: + return - reply, socket = self.call_backend.receive_outgoing_reply() - if reply is not None: - self.tcp_frontend.redirect_outgoing_reply(reply) + LOG.info(_LI("Received message at universal proxy: %s") % str(message)) + + if socket == self.tcp_frontend.frontend: + self.backend_matcher.redirect_to_backend(message) + else: + self.tcp_frontend.redirect_outgoing_reply(message) class BackendMatcher(base_proxy.BaseBackendMatcher): - def __init__(self, conf, context): - super(BackendMatcher, self).__init__(conf, None, context) - direct_backend = zmq_call_proxy.DealerBackend(conf, context) + def __init__(self, conf, context, poller=None): + super(BackendMatcher, self).__init__(conf, poller, context) + direct_backend = zmq_call_proxy.DealerBackend(conf, context, poller) self.backends[zmq_serializer.CALL_TYPE] = direct_backend self.backends[zmq_serializer.CAST_TYPE] = direct_backend fanout_backend = zmq_fanout_proxy.PublisherBackend(conf, context) diff --git a/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py b/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py index e4317c487..db9c1463a 100644 --- a/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py +++ b/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py @@ -15,36 +15,60 @@ import logging import threading +from oslo_utils import eventletutils import zmq from oslo_messaging._drivers.zmq_driver import zmq_poller LOG = logging.getLogger(__name__) +_threading = threading + +if eventletutils.EVENTLET_AVAILABLE: + import eventlet + _threading = eventlet.patcher.original('threading') + class ThreadingPoller(zmq_poller.ZmqPoller): def __init__(self): self.poller = zmq.Poller() + self.recv_methods = {} - def register(self, socket): - self.poller.register(socket, zmq.POLLOUT) + def register(self, socket, recv_method=None): + if recv_method is not None: + self.recv_methods[socket] = recv_method + self.poller.register(socket, zmq.POLLIN) def poll(self, timeout=None): - socks = dict(self.poller.poll(timeout)) - for socket in socks: - incoming = socket.recv() - return incoming + timeout = timeout * 1000 # zmq poller waits milliseconds + sockets = dict(self.poller.poll(timeout=timeout)) + if not sockets: + return None, None + for socket in sockets: + if socket in self.recv_methods: + return self.recv_methods[socket](socket) + else: + return socket.recv_multipart(), socket class ThreadingExecutor(zmq_poller.Executor): def __init__(self, method): - thread = threading.Thread(target=method) - super(ThreadingExecutor, self).__init__(thread) + self._method = method + super(ThreadingExecutor, self).__init__( + _threading.Thread(target=self._loop)) + self._stop = _threading.Event() + + def _loop(self): + while not self._stop.is_set(): + self._method() def execute(self): self.thread.start() + def stop(self): + self._stop.set() + def wait(self): self.thread.join() diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py index d66e5d4de..47a87d1d2 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py @@ -60,8 +60,9 @@ class CallRequest(Request): raise oslo_messaging.MessagingTimeout( "Timeout %s seconds was reached" % self.timeout) - if reply['failure']: + if reply[zmq_serializer.FIELD_FAILURE]: raise rpc_common.deserialize_remote_exception( - reply['failure'], self.allowed_remote_exmods) + reply[zmq_serializer.FIELD_FAILURE], + self.allowed_remote_exmods) else: - return reply['reply'] + return reply[zmq_serializer.FIELD_REPLY] diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py index 59b46e535..7f7ec57a3 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py @@ -19,6 +19,7 @@ from oslo_messaging._drivers import base from oslo_messaging._drivers import common as rpc_common from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_base_consumer from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_serializer from oslo_messaging._drivers.zmq_driver import zmq_topic as topic_utils from oslo_messaging._i18n import _LE @@ -41,9 +42,9 @@ class ZmqIncomingRequest(base.IncomingMessage): if failure is not None: failure = rpc_common.serialize_remote_exception(failure, log_failure) - message_reply = {u'reply': reply, - u'failure': failure, - u'log_failure': log_failure} + message_reply = {zmq_serializer.FIELD_REPLY: reply, + zmq_serializer.FIELD_FAILURE: failure, + zmq_serializer.FIELD_LOG_FAILURE: log_failure} LOG.debug("Replying %s REP", (str(message_reply))) self.received = True self.reply_socket.send(self.reply_id, zmq.SNDMORE) diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_async.py b/oslo_messaging/_drivers/zmq_driver/zmq_async.py index 3694d0f5a..261746392 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_async.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_async.py @@ -23,8 +23,12 @@ LOG = logging.getLogger(__name__) green_zmq = importutils.try_import('eventlet.green.zmq') -def import_zmq(): - imported_zmq = green_zmq or importutils.try_import('zmq') +def import_zmq(native_zmq=False): + if native_zmq: + imported_zmq = importutils.try_import('zmq') + else: + imported_zmq = green_zmq or importutils.try_import('zmq') + if imported_zmq is None: errmsg = _LE("ZeroMQ not found!") LOG.error(errmsg) @@ -32,28 +36,28 @@ def import_zmq(): return imported_zmq -def get_poller(): - if green_zmq: +def get_poller(native_zmq=False): + if native_zmq or green_zmq is None: + from oslo_messaging._drivers.zmq_driver.poller import threading_poller + return threading_poller.ThreadingPoller() + else: from oslo_messaging._drivers.zmq_driver.poller import green_poller return green_poller.GreenPoller() - else: + + +def get_reply_poller(native_zmq=False): + if native_zmq or green_zmq is None: from oslo_messaging._drivers.zmq_driver.poller import threading_poller return threading_poller.ThreadingPoller() - - -def get_reply_poller(): - if green_zmq: + else: from oslo_messaging._drivers.zmq_driver.poller import green_poller return green_poller.HoldReplyPoller() - else: + + +def get_executor(method, native_zmq=False): + if native_zmq or green_zmq is None: from oslo_messaging._drivers.zmq_driver.poller import threading_poller - return threading_poller.ThreadingPoller() - - -def get_executor(method): - if green_zmq is not None: + return threading_poller.ThreadingExecutor(method) + else: from oslo_messaging._drivers.zmq_driver.poller import green_poller return green_poller.GreenExecutor(method) - else: - from oslo_messaging._drivers.zmq_driver.poller import threading_poller - return threading_poller.ThreadingExecutor() diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py b/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py index 64145ab4f..ef422ff8d 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py @@ -26,6 +26,10 @@ LOG = logging.getLogger(__name__) MESSAGE_CALL_TYPE_POSITION = 2 MESSAGE_CALL_TOPIC_POSITION = 3 +FIELD_FAILURE = 'failure' +FIELD_REPLY = 'reply' +FIELD_LOG_FAILURE = 'log_failure' + CALL_TYPE = 'call' CAST_TYPE = 'cast' FANOUT_TYPE = 'fanout' diff --git a/oslo_messaging/tests/drivers/test_impl_zmq.py b/oslo_messaging/tests/drivers/test_impl_zmq.py index a2499ce10..dd6df4448 100644 --- a/oslo_messaging/tests/drivers/test_impl_zmq.py +++ b/oslo_messaging/tests/drivers/test_impl_zmq.py @@ -150,7 +150,7 @@ class TestZmqBasics(ZmqBaseTestCase): target, {}, {'method': 'hello-world', 'tx_id': 1}, wait_for_reply=True) - self.assertIsNotNone(result) + self.assertTrue(result) def test_send_noreply(self): """Cast() with topic.""" diff --git a/setup-test-env-zmq.sh b/setup-test-env-zmq.sh index c3c8e33c1..b27ee9d3f 100755 --- a/setup-test-env-zmq.sh +++ b/setup-test-env-zmq.sh @@ -24,7 +24,4 @@ redis-server --port $ZMQ_REDIS_PORT & oslo-messaging-zmq-receiver --config-file ${DATADIR}/zmq.conf > ${DATADIR}/receiver.log 2>&1 & -# FIXME(sileht): This does the same kind of setup that devstack does -# But this doesn't work yet, a zeromq maintener should take a look on that - $* From 9e4831c02233a1d50ca78877e28389f5c214c562 Mon Sep 17 00:00:00 2001 From: Victor Sergeyev Date: Fri, 10 Jul 2015 16:45:20 +0300 Subject: [PATCH 11/28] ZMQ: Initial matchmaker implementation This patch replaces the old outdated matchmakers and replace it into the new ones. Call/Cast test_specific_server() functional tests passes now. Change-Id: I8635396110d30d26812f39b242fbbabd1a0feaaa --- oslo_messaging/_drivers/impl_zmq.py | 9 +- oslo_messaging/_drivers/matchmaker.py | 321 ------------------ oslo_messaging/_drivers/matchmaker_redis.py | 145 -------- oslo_messaging/_drivers/matchmaker_ring.py | 105 ------ .../zmq_driver/matchmaker/__init__.py | 0 .../_drivers/zmq_driver/matchmaker/base.py | 70 ++++ .../zmq_driver/matchmaker/matchmaker_redis.py | 55 +++ .../zmq_driver/rpc/client/zmq_call_request.py | 6 +- .../zmq_driver/rpc/client/zmq_cast_dealer.py | 6 +- .../zmq_driver/rpc/client/zmq_client.py | 3 +- .../rpc/server/zmq_call_responder.py | 2 +- .../zmq_driver/rpc/server/zmq_server.py | 4 + .../_drivers/zmq_driver/zmq_topic.py | 4 +- oslo_messaging/conffixture.py | 3 +- oslo_messaging/opts.py | 6 +- .../tests/drivers/test_impl_matchmaker.py | 75 ++++ .../tests/drivers/test_matchmaker.py | 69 ---- .../tests/drivers/test_matchmaker_ring.py | 73 ---- oslo_messaging/tests/functional/utils.py | 9 +- oslo_messaging/tests/test_opts.py | 3 +- setup.cfg | 5 +- tox.ini | 5 +- 22 files changed, 235 insertions(+), 743 deletions(-) delete mode 100644 oslo_messaging/_drivers/matchmaker.py delete mode 100644 oslo_messaging/_drivers/matchmaker_redis.py delete mode 100644 oslo_messaging/_drivers/matchmaker_ring.py create mode 100644 oslo_messaging/_drivers/zmq_driver/matchmaker/__init__.py create mode 100644 oslo_messaging/_drivers/zmq_driver/matchmaker/base.py create mode 100644 oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py create mode 100644 oslo_messaging/tests/drivers/test_impl_matchmaker.py delete mode 100644 oslo_messaging/tests/drivers/test_matchmaker.py delete mode 100644 oslo_messaging/tests/drivers/test_matchmaker_ring.py diff --git a/oslo_messaging/_drivers/impl_zmq.py b/oslo_messaging/_drivers/impl_zmq.py index fbd9f081f..69d2bf5c8 100644 --- a/oslo_messaging/_drivers/impl_zmq.py +++ b/oslo_messaging/_drivers/impl_zmq.py @@ -17,6 +17,7 @@ import pprint import socket from oslo_config import cfg +from stevedore import driver from oslo_messaging._drivers import base from oslo_messaging._drivers import common as rpc_common @@ -39,7 +40,7 @@ zmq_opts = [ # The module.Class to use for matchmaking. cfg.StrOpt( 'rpc_zmq_matchmaker', - default='local', + default='dummy', help='MatchMaker driver.', ), @@ -97,7 +98,11 @@ class ZmqDriver(base.BaseDriver): self.conf = conf self.server = None self.client = None - self.matchmaker = None + self.matchmaker = driver.DriverManager( + 'oslo.messaging.zmq.matchmaker', + self.conf.rpc_zmq_matchmaker, + ).driver(self.conf) + super(ZmqDriver, self).__init__(conf, url, default_exchange, allowed_remote_exmods) diff --git a/oslo_messaging/_drivers/matchmaker.py b/oslo_messaging/_drivers/matchmaker.py deleted file mode 100644 index 82b0fbd2b..000000000 --- a/oslo_messaging/_drivers/matchmaker.py +++ /dev/null @@ -1,321 +0,0 @@ -# Copyright 2011 Cloudscaling Group, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -The MatchMaker classes should except a Topic or Fanout exchange key and -return keys for direct exchanges, per (approximate) AMQP parlance. -""" - -import contextlib -import logging - -import eventlet -from oslo_config import cfg - -from oslo_messaging._i18n import _ - -matchmaker_opts = [ - cfg.IntOpt('matchmaker_heartbeat_freq', - default=300, - help='Heartbeat frequency.'), - cfg.IntOpt('matchmaker_heartbeat_ttl', - default=600, - help='Heartbeat time-to-live.'), -] - -CONF = cfg.CONF -CONF.register_opts(matchmaker_opts) -LOG = logging.getLogger(__name__) -contextmanager = contextlib.contextmanager - - -class MatchMakerException(Exception): - """Signified a match could not be found.""" - message = _("Match not found by MatchMaker.") - - -class Exchange(object): - """Implements lookups. - - Subclass this to support hashtables, dns, etc. - """ - def __init__(self): - pass - - def run(self, key): - raise NotImplementedError() - - -class Binding(object): - """A binding on which to perform a lookup.""" - def __init__(self): - pass - - def test(self, key): - raise NotImplementedError() - - -class MatchMakerBase(object): - """Match Maker Base Class. - - Build off HeartbeatMatchMakerBase if building a heartbeat-capable - MatchMaker. - """ - def __init__(self): - # Array of tuples. Index [2] toggles negation, [3] is last-if-true - self.bindings = [] - - self.no_heartbeat_msg = _('Matchmaker does not implement ' - 'registration or heartbeat.') - - def register(self, key, host): - """Register a host on a backend. - - Heartbeats, if applicable, may keepalive registration. - """ - pass - - def ack_alive(self, key, host): - """Acknowledge that a key.host is alive. - - Used internally for updating heartbeats, but may also be used - publicly to acknowledge a system is alive (i.e. rpc message - successfully sent to host) - """ - pass - - def is_alive(self, topic, host): - """Checks if a host is alive.""" - pass - - def expire(self, topic, host): - """Explicitly expire a host's registration.""" - pass - - def send_heartbeats(self): - """Send all heartbeats. - - Use start_heartbeat to spawn a heartbeat greenthread, - which loops this method. - """ - pass - - def unregister(self, key, host): - """Unregister a topic.""" - pass - - def start_heartbeat(self): - """Spawn heartbeat greenthread.""" - pass - - def stop_heartbeat(self): - """Destroys the heartbeat greenthread.""" - pass - - def add_binding(self, binding, rule, last=True): - self.bindings.append((binding, rule, False, last)) - - # NOTE(ewindisch): kept the following method in case we implement the - # underlying support. - # def add_negate_binding(self, binding, rule, last=True): - # self.bindings.append((binding, rule, True, last)) - - def queues(self, key): - workers = [] - - # bit is for negate bindings - if we choose to implement it. - # last stops processing rules if this matches. - for (binding, exchange, bit, last) in self.bindings: - if binding.test(key): - workers.extend(exchange.run(key)) - - # Support last. - if last: - return workers - return workers - - -class HeartbeatMatchMakerBase(MatchMakerBase): - """Base for a heart-beat capable MatchMaker. - - Provides common methods for registering, unregistering, and maintaining - heartbeats. - """ - def __init__(self): - self.hosts = set() - self._heart = None - self.host_topic = {} - - super(HeartbeatMatchMakerBase, self).__init__() - - def send_heartbeats(self): - """Send all heartbeats. - - Use start_heartbeat to spawn a heartbeat greenthread, - which loops this method. - """ - for key, host in self.host_topic.keys(): - self.ack_alive(key, host) - - def ack_alive(self, key, host): - """Acknowledge that a host.topic is alive. - - Used internally for updating heartbeats, but may also be used - publicly to acknowledge a system is alive (i.e. rpc message - successfully sent to host) - """ - raise NotImplementedError("Must implement ack_alive") - - def backend_register(self, key, host): - """Implements registration logic. - - Called by register(self,key,host) - """ - raise NotImplementedError("Must implement backend_register") - - def backend_unregister(self, key, key_host): - """Implements de-registration logic. - - Called by unregister(self,key,host) - """ - raise NotImplementedError("Must implement backend_unregister") - - def register(self, key, host): - """Register a host on a backend. - - Heartbeats, if applicable, may keepalive registration. - """ - self.hosts.add(host) - self.host_topic[(key, host)] = host - key_host = '.'.join((key, host)) - - self.backend_register(key, key_host) - - self.ack_alive(key, host) - - def unregister(self, key, host): - """Unregister a topic.""" - if (key, host) in self.host_topic: - del self.host_topic[(key, host)] - - self.hosts.discard(host) - self.backend_unregister(key, '.'.join((key, host))) - - LOG.info(_("Matchmaker unregistered: %(key)s, %(host)s"), - {'key': key, 'host': host}) - - def start_heartbeat(self): - """Implementation of MatchMakerBase.start_heartbeat. - - Launches greenthread looping send_heartbeats(), - yielding for CONF.matchmaker_heartbeat_freq seconds - between iterations. - """ - if not self.hosts: - raise MatchMakerException( - _("Register before starting heartbeat.")) - - def do_heartbeat(): - while True: - self.send_heartbeats() - eventlet.sleep(CONF.matchmaker_heartbeat_freq) - - self._heart = eventlet.spawn(do_heartbeat) - - def stop_heartbeat(self): - """Destroys the heartbeat greenthread.""" - if self._heart: - self._heart.kill() - - -class DirectBinding(Binding): - """Specifies a host in the key via a '.' character. - - Although dots are used in the key, the behavior here is - that it maps directly to a host, thus direct. - """ - def test(self, key): - return '.' in key - - -class TopicBinding(Binding): - """Where a 'bare' key without dots. - - AMQP generally considers topic exchanges to be those *with* dots, - but we deviate here in terminology as the behavior here matches - that of a topic exchange (whereas where there are dots, behavior - matches that of a direct exchange. - """ - def test(self, key): - return '.' not in key - - -class FanoutBinding(Binding): - """Match on fanout keys, where key starts with 'fanout.' string.""" - def test(self, key): - return key.startswith('fanout~') - - -class StubExchange(Exchange): - """Exchange that does nothing.""" - def run(self, key): - return [(key, None)] - - -class LocalhostExchange(Exchange): - """Exchange where all direct topics are local.""" - def __init__(self, host='localhost'): - self.host = host - super(Exchange, self).__init__() - - def run(self, key): - return [('.'.join((key.split('.')[0], self.host)), self.host)] - - -class DirectExchange(Exchange): - """Exchange where all topic keys are split, sending to second half. - - i.e. "compute.host" sends a message to "compute.host" running on "host" - """ - def __init__(self): - super(Exchange, self).__init__() - - def run(self, key): - e = key.split('.', 1)[1] - return [(key, e)] - - -class MatchMakerLocalhost(MatchMakerBase): - """Match Maker where all bare topics resolve to localhost. - - Useful for testing. - """ - def __init__(self, host='localhost'): - super(MatchMakerLocalhost, self).__init__() - self.add_binding(FanoutBinding(), LocalhostExchange(host)) - self.add_binding(DirectBinding(), DirectExchange()) - self.add_binding(TopicBinding(), LocalhostExchange(host)) - - -class MatchMakerStub(MatchMakerBase): - """Match Maker where topics are untouched. - - Useful for testing, or for AMQP/brokered queues. - Will not work where knowledge of hosts is known (i.e. zeromq) - """ - def __init__(self): - super(MatchMakerStub, self).__init__() - - self.add_binding(FanoutBinding(), StubExchange()) - self.add_binding(DirectBinding(), StubExchange()) - self.add_binding(TopicBinding(), StubExchange()) diff --git a/oslo_messaging/_drivers/matchmaker_redis.py b/oslo_messaging/_drivers/matchmaker_redis.py deleted file mode 100644 index 290b60351..000000000 --- a/oslo_messaging/_drivers/matchmaker_redis.py +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright 2013 Cloudscaling Group, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -The MatchMaker classes should accept a Topic or Fanout exchange key and -return keys for direct exchanges, per (approximate) AMQP parlance. -""" - -from oslo_config import cfg -from oslo_utils import importutils - -from oslo_messaging._drivers import matchmaker as mm_common - -redis = importutils.try_import('redis') - - -matchmaker_redis_opts = [ - cfg.StrOpt('host', - default='127.0.0.1', - help='Host to locate redis.'), - cfg.IntOpt('port', - default=6379, - help='Use this port to connect to redis host.'), - cfg.StrOpt('password', - help='Password for Redis server (optional).'), -] - -CONF = cfg.CONF -opt_group = cfg.OptGroup(name='matchmaker_redis', - title='Options for Redis-based MatchMaker') -CONF.register_group(opt_group) -CONF.register_opts(matchmaker_redis_opts, opt_group) - - -class RedisExchange(mm_common.Exchange): - def __init__(self, matchmaker): - self.matchmaker = matchmaker - self.redis = matchmaker.redis - super(RedisExchange, self).__init__() - - -class RedisTopicExchange(RedisExchange): - """Exchange where all topic keys are split, sending to second half. - - i.e. "compute.host" sends a message to "compute" running on "host" - """ - def run(self, topic): - while True: - member_name = self.redis.srandmember(topic) - - if not member_name: - # If this happens, there are no - # longer any members. - break - - if not self.matchmaker.is_alive(topic, member_name): - continue - - host = member_name.split('.', 1)[1] - return [(member_name, host)] - return [] - - -class RedisFanoutExchange(RedisExchange): - """Return a list of all hosts.""" - def run(self, topic): - topic = topic.split('~', 1)[1] - hosts = self.redis.smembers(topic) - good_hosts = filter( - lambda host: self.matchmaker.is_alive(topic, host), hosts) - - return [(x, x.split('.', 1)[1]) for x in good_hosts] - - -class MatchMakerRedis(mm_common.HeartbeatMatchMakerBase): - """MatchMaker registering and looking-up hosts with a Redis server.""" - def __init__(self): - super(MatchMakerRedis, self).__init__() - - if not redis: - raise ImportError("Failed to import module redis.") - - self.redis = redis.StrictRedis( - host=CONF.matchmaker_redis.host, - port=CONF.matchmaker_redis.port, - password=CONF.matchmaker_redis.password) - - self.add_binding(mm_common.FanoutBinding(), RedisFanoutExchange(self)) - self.add_binding(mm_common.DirectBinding(), mm_common.DirectExchange()) - self.add_binding(mm_common.TopicBinding(), RedisTopicExchange(self)) - - def ack_alive(self, key, host): - topic = "%s.%s" % (key, host) - if not self.redis.expire(topic, CONF.matchmaker_heartbeat_ttl): - # If we could not update the expiration, the key - # might have been pruned. Re-register, creating a new - # key in Redis. - self.register(key, host) - - def is_alive(self, topic, host): - # After redis 2.8, if the specialized key doesn't exist, - # TTL fuction would return -2. If key exists, - # but doesn't have expiration associated, - # TTL func would return -1. For more information, - # please visit http://redis.io/commands/ttl - if self.redis.ttl(host) == -2: - self.expire(topic, host) - return False - return True - - def expire(self, topic, host): - with self.redis.pipeline() as pipe: - pipe.multi() - pipe.delete(host) - pipe.srem(topic, host) - pipe.execute() - - def backend_register(self, key, key_host): - with self.redis.pipeline() as pipe: - pipe.multi() - pipe.sadd(key, key_host) - - # No value is needed, we just - # care if it exists. Sets aren't viable - # because only keys can expire. - pipe.sadd(key_host, '') - - pipe.execute() - - def backend_unregister(self, key, key_host): - with self.redis.pipeline() as pipe: - pipe.multi() - pipe.srem(key, key_host) - pipe.delete(key_host) - pipe.execute() diff --git a/oslo_messaging/_drivers/matchmaker_ring.py b/oslo_messaging/_drivers/matchmaker_ring.py deleted file mode 100644 index 0fd918cb5..000000000 --- a/oslo_messaging/_drivers/matchmaker_ring.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright 2011-2013 Cloudscaling Group, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -The MatchMaker classes should except a Topic or Fanout exchange key and -return keys for direct exchanges, per (approximate) AMQP parlance. -""" - -import itertools -import json -import logging - -from oslo_config import cfg - -from oslo_messaging._drivers import matchmaker as mm -from oslo_messaging._i18n import _ - -matchmaker_opts = [ - # Matchmaker ring file - cfg.StrOpt('ringfile', - deprecated_name='matchmaker_ringfile', - deprecated_group='DEFAULT', - default='/etc/oslo/matchmaker_ring.json', - help='Matchmaker ring file (JSON).'), -] - -CONF = cfg.CONF -CONF.register_opts(matchmaker_opts, 'matchmaker_ring') -LOG = logging.getLogger(__name__) - - -class RingExchange(mm.Exchange): - """Match Maker where hosts are loaded from a static JSON formatted file. - - __init__ takes optional ring dictionary argument, otherwise - loads the ringfile from CONF.mathcmaker_ringfile. - """ - def __init__(self, ring=None): - super(RingExchange, self).__init__() - - if ring: - self.ring = ring - else: - fh = open(CONF.matchmaker_ring.ringfile, 'r') - self.ring = json.load(fh) - fh.close() - - self.ring0 = {} - for k in self.ring.keys(): - self.ring0[k] = itertools.cycle(self.ring[k]) - - def _ring_has(self, key): - return key in self.ring0 - - -class RoundRobinRingExchange(RingExchange): - """A Topic Exchange based on a hashmap.""" - def __init__(self, ring=None): - super(RoundRobinRingExchange, self).__init__(ring) - - def run(self, key): - if not self._ring_has(key): - LOG.warn( - _("No key defining hosts for topic '%s', " - "see ringfile"), key - ) - return [] - host = next(self.ring0[key]) - return [(key + '.' + host, host)] - - -class FanoutRingExchange(RingExchange): - """Fanout Exchange based on a hashmap.""" - def __init__(self, ring=None): - super(FanoutRingExchange, self).__init__(ring) - - def run(self, key): - # Assume starts with "fanout~", strip it for lookup. - nkey = key.split('fanout~')[1:][0] - if not self._ring_has(nkey): - LOG.warn( - _("No key defining hosts for topic '%s', " - "see ringfile"), nkey - ) - return [] - return map(lambda x: (key + '.' + x, x), self.ring[nkey]) - - -class MatchMakerRing(mm.MatchMakerBase): - """Match Maker where hosts are loaded from a static hashmap.""" - def __init__(self, ring=None): - super(MatchMakerRing, self).__init__() - self.add_binding(mm.FanoutBinding(), FanoutRingExchange(ring)) - self.add_binding(mm.DirectBinding(), mm.DirectExchange()) - self.add_binding(mm.TopicBinding(), RoundRobinRingExchange(ring)) diff --git a/oslo_messaging/_drivers/zmq_driver/matchmaker/__init__.py b/oslo_messaging/_drivers/zmq_driver/matchmaker/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py b/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py new file mode 100644 index 000000000..29e9d52a2 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py @@ -0,0 +1,70 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import abc +import collections +import logging + +import six + +from oslo_messaging._i18n import _LI, _LW + + +LOG = logging.getLogger(__name__) + + +@six.add_metaclass(abc.ABCMeta) +class MatchMakerBase(object): + + def __init__(self, conf, *args, **kwargs): + super(MatchMakerBase, self).__init__(*args, **kwargs) + + self.conf = conf + + @abc.abstractmethod + def register(self, topic, hostname): + """Register topic on nameserver""" + + @abc.abstractmethod + def get_hosts(self, topic): + """Get hosts from nameserver by topic""" + + def get_single_host(self, topic): + """Get a single host by topic""" + hosts = self.get_hosts(topic) + if len(hosts) == 0: + LOG.warning(_LW("No hosts were found for topic %s. Using " + "localhost") % topic) + return "localhost" + elif len(hosts) == 1: + LOG.info(_LI("A single host found for topic %s.") % topic) + return hosts[0] + else: + LOG.warning(_LW("Multiple hosts were found for topic %s. Using " + "the first one.") % topic) + return hosts[0] + + +class DummyMatchMaker(MatchMakerBase): + + def __init__(self, conf, *args, **kwargs): + super(DummyMatchMaker, self).__init__(conf, *args, **kwargs) + + self._cache = collections.defaultdict(list) + + def register(self, topic, hostname): + if hostname not in self._cache[topic]: + self._cache[topic].append(hostname) + + def get_hosts(self, topic): + return self._cache[topic] diff --git a/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py b/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py new file mode 100644 index 000000000..f1a6f3827 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py @@ -0,0 +1,55 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from oslo_config import cfg +import redis + +from oslo_messaging._drivers.zmq_driver.matchmaker import base + + +LOG = logging.getLogger(__name__) + + +matchmaker_redis_opts = [ + cfg.StrOpt('host', + default='127.0.0.1', + help='Host to locate redis.'), + cfg.IntOpt('port', + default=6379, + help='Use this port to connect to redis host.'), + cfg.StrOpt('password', + default='', + secret=True, + help='Password for Redis server (optional).'), +] + + +class RedisMatchMaker(base.MatchMakerBase): + + def __init__(self, conf, *args, **kwargs): + super(RedisMatchMaker, self).__init__(conf, *args, **kwargs) + + self._redis = redis.StrictRedis( + host=self.conf.matchmaker_redis.host, + port=self.conf.matchmaker_redis.port, + password=self.conf.matchmaker_redis.password, + ) + + def register(self, topic, hostname): + if hostname not in self.get_hosts(topic): + self._redis.lpush(topic, hostname) + + def get_hosts(self, topic): + return self._redis.lrange(topic, 0, -1)[::-1] diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py index 47a87d1d2..11c190bf9 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py @@ -30,8 +30,9 @@ zmq = zmq_async.import_zmq() class CallRequest(Request): def __init__(self, conf, target, context, message, timeout=None, - retry=None, allowed_remote_exmods=None): + retry=None, allowed_remote_exmods=None, matchmaker=None): self.allowed_remote_exmods = allowed_remote_exmods or [] + self.matchmaker = matchmaker try: self.zmq_context = zmq.Context() @@ -41,8 +42,9 @@ class CallRequest(Request): zmq_serializer.CALL_TYPE, timeout, retry) + self.host = self.matchmaker.get_single_host(self.topic.topic) self.connect_address = zmq_topic.get_tcp_address_call(conf, - self.topic) + self.host) LOG.info(_LI("Connecting REQ to %s") % self.connect_address) self.socket.connect(self.connect_address) except zmq.ZMQError as e: diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py index 30a117c1e..6f6640575 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py @@ -58,7 +58,8 @@ class DealerCastPublisher(zmq_cast_publisher.CastPublisherBase): def cast(self, target, context, message, timeout=None, retry=None): topic = zmq_topic.Topic.from_target(self.conf, target) - connect_address = zmq_topic.get_tcp_address_call(self.conf, topic) + host = self.matchmaker.get_single_host(topic.topic) + connect_address = zmq_topic.get_tcp_address_call(self.conf, host) dealer_socket = self._create_socket(connect_address) request = CastRequest(self.conf, target, context, message, dealer_socket, connect_address, timeout, retry) @@ -71,6 +72,7 @@ class DealerCastPublisher(zmq_cast_publisher.CastPublisherBase): dealer_socket = self.zmq_context.socket(zmq.DEALER) LOG.info(_LI("Connecting DEALER to %s") % address) dealer_socket.connect(address) + return dealer_socket except zmq.ZMQError: LOG.error(_LE("Failed connecting DEALER to %s") % address) - return dealer_socket + raise diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py index ec00cb912..cdd291b1f 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py @@ -21,6 +21,7 @@ class ZmqClient(object): def __init__(self, conf, matchmaker=None, allowed_remote_exmods=None): self.conf = conf + self.matchmaker = matchmaker self.allowed_remote_exmods = allowed_remote_exmods or [] self.cast_publisher = zmq_cast_dealer.DealerCastPublisher(conf, matchmaker) @@ -28,7 +29,7 @@ class ZmqClient(object): def call(self, target, context, message, timeout=None, retry=None): request = zmq_call_request.CallRequest( self.conf, target, context, message, timeout, retry, - self.allowed_remote_exmods) + self.allowed_remote_exmods, self.matchmaker) return request() def cast(self, target, context, message, timeout=None, retry=None): diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py index 7f7ec57a3..38bc43207 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py @@ -82,7 +82,7 @@ class CallResponder(zmq_base_consumer.ConsumerBase): self.poller) return incoming except zmq.ZMQError as e: - LOG.error(_LE("Receiving message failed ... {}"), e) + LOG.error(_LE("Receiving message failed ... %s") % str(e)) def listen(self, target): topic = topic_utils.Topic.from_target(self.conf, target) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py index b51ff0187..778f3b273 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py @@ -31,6 +31,7 @@ class ZmqServer(base.Listener): self.conf = conf self.context = zmq.Context() self.poller = zmq_async.get_reply_poller() + self.matchmaker = matchmaker self.call_resp = zmq_call_responder.CallResponder(self, conf, self.poller, self.context) @@ -50,6 +51,9 @@ class ZmqServer(base.Listener): def listen(self, target): LOG.info("[Server] Listen to Target %s" % target) + + self.matchmaker.register(topic=target.topic, + hostname=self.conf.rpc_zmq_host) if target.fanout: self.fanout_resp.listen(target) else: diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_topic.py b/oslo_messaging/_drivers/zmq_driver/zmq_topic.py index 332c81912..f89f5073b 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_topic.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_topic.py @@ -21,8 +21,8 @@ def get_tcp_bind_address(port): return "tcp://*:%s" % port -def get_tcp_address_call(conf, topic): - return "tcp://%s:%s" % (topic.server, conf.rpc_zmq_port) +def get_tcp_address_call(conf, host): + return "tcp://%s:%s" % (host, conf.rpc_zmq_port) def get_ipc_address_cast(conf, topic): diff --git a/oslo_messaging/conffixture.py b/oslo_messaging/conffixture.py index 8a44f9e62..bbf92caf0 100644 --- a/oslo_messaging/conffixture.py +++ b/oslo_messaging/conffixture.py @@ -59,7 +59,8 @@ class ConfFixture(fixtures.Fixture): _import_opts(self.conf, 'oslo_messaging._drivers.impl_zmq', 'zmq_opts') _import_opts(self.conf, - 'oslo_messaging._drivers.matchmaker_redis', + 'oslo_messaging._drivers.zmq_driver.' + 'matchmaker.matchmaker_redis', 'matchmaker_redis_opts', 'matchmaker_redis') _import_opts(self.conf, 'oslo_messaging.rpc.client', '_client_opts') diff --git a/oslo_messaging/opts.py b/oslo_messaging/opts.py index 5911b69d7..0accf30a0 100644 --- a/oslo_messaging/opts.py +++ b/oslo_messaging/opts.py @@ -25,10 +25,8 @@ from oslo_messaging._drivers import base as drivers_base from oslo_messaging._drivers import impl_qpid from oslo_messaging._drivers import impl_rabbit from oslo_messaging._drivers import impl_zmq -from oslo_messaging._drivers import matchmaker -from oslo_messaging._drivers import matchmaker_redis -from oslo_messaging._drivers import matchmaker_ring from oslo_messaging._drivers.protocols.amqp import opts as amqp_opts +from oslo_messaging._drivers.zmq_driver.matchmaker import matchmaker_redis from oslo_messaging._executors import base from oslo_messaging.notify import notifier from oslo_messaging.rpc import client @@ -37,7 +35,6 @@ from oslo_messaging import transport _global_opt_lists = [ drivers_base.base_opts, impl_zmq.zmq_opts, - matchmaker.matchmaker_opts, base._pool_opts, notifier._notifier_opts, client._client_opts, @@ -47,7 +44,6 @@ _global_opt_lists = [ _opts = [ (None, list(itertools.chain(*_global_opt_lists))), ('matchmaker_redis', matchmaker_redis.matchmaker_redis_opts), - ('matchmaker_ring', matchmaker_ring.matchmaker_opts), ('oslo_messaging_amqp', amqp_opts.amqp1_opts), ('oslo_messaging_rabbit', list(itertools.chain(amqp.amqp_opts, impl_rabbit.rabbit_opts))), diff --git a/oslo_messaging/tests/drivers/test_impl_matchmaker.py b/oslo_messaging/tests/drivers/test_impl_matchmaker.py new file mode 100644 index 000000000..8fa82c2a4 --- /dev/null +++ b/oslo_messaging/tests/drivers/test_impl_matchmaker.py @@ -0,0 +1,75 @@ +# Copyright 2014 Canonical, Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from stevedore import driver +import testscenarios + +from oslo_messaging.tests import utils as test_utils + + +load_tests = testscenarios.load_tests_apply_scenarios + + +class TestImplMatchmaker(test_utils.BaseTestCase): + + scenarios = [ + ("dummy", {"rpc_zmq_matchmaker": "dummy"}), + ("redis", {"rpc_zmq_matchmaker": "redis"}), + ] + + def setUp(self): + super(TestImplMatchmaker, self).setUp() + + self.test_matcher = driver.DriverManager( + 'oslo.messaging.zmq.matchmaker', + self.rpc_zmq_matchmaker, + ).driver(self.conf) + + if self.rpc_zmq_matchmaker == "redis": + self.addCleanup(self.test_matcher._redis.flushdb) + + self.topic = "test_topic" + self.host1 = b"test_host1" + self.host2 = b"test_host2" + + def test_register(self): + self.test_matcher.register(self.topic, self.host1) + + self.assertEqual(self.test_matcher.get_hosts(self.topic), [self.host1]) + self.assertEqual(self.test_matcher.get_single_host(self.topic), + self.host1) + + def test_register_two_hosts(self): + self.test_matcher.register(self.topic, self.host1) + self.test_matcher.register(self.topic, self.host2) + + self.assertEqual(self.test_matcher.get_hosts(self.topic), + [self.host1, self.host2]) + self.assertIn(self.test_matcher.get_single_host(self.topic), + [self.host1, self.host2]) + + def test_register_two_same_hosts(self): + self.test_matcher.register(self.topic, self.host1) + self.test_matcher.register(self.topic, self.host1) + + self.assertEqual(self.test_matcher.get_hosts(self.topic), [self.host1]) + self.assertEqual(self.test_matcher.get_single_host(self.topic), + self.host1) + + def test_get_hosts_wrong_topic(self): + self.assertEqual(self.test_matcher.get_hosts("no_such_topic"), []) + + def test_get_single_host_wrong_topic(self): + self.assertEqual(self.test_matcher.get_single_host("no_such_topic"), + "localhost") diff --git a/oslo_messaging/tests/drivers/test_matchmaker.py b/oslo_messaging/tests/drivers/test_matchmaker.py deleted file mode 100644 index 61c37a92b..000000000 --- a/oslo_messaging/tests/drivers/test_matchmaker.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2014 Canonical, Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_utils import importutils -import testtools - -from oslo_messaging.tests import utils as test_utils - -# NOTE(jamespage) matchmaker tied directly to eventlet -# which is not yet py3 compatible - skip if import fails -matchmaker = ( - importutils.try_import('oslo_messaging._drivers.matchmaker')) - - -@testtools.skipIf(not matchmaker, "matchmaker/eventlet unavailable") -class MatchmakerTest(test_utils.BaseTestCase): - - def test_fanout_binding(self): - matcher = matchmaker.MatchMakerBase() - matcher.add_binding( - matchmaker.FanoutBinding(), matchmaker.DirectExchange()) - self.assertEqual(matcher.queues('hello.world'), []) - self.assertEqual( - matcher.queues('fanout~fantasy.unicorn'), - [('fanout~fantasy.unicorn', 'unicorn')]) - self.assertEqual( - matcher.queues('fanout~fantasy.pony'), - [('fanout~fantasy.pony', 'pony')]) - - def test_topic_binding(self): - matcher = matchmaker.MatchMakerBase() - matcher.add_binding( - matchmaker.TopicBinding(), matchmaker.StubExchange()) - self.assertEqual( - matcher.queues('hello-world'), [('hello-world', None)]) - - def test_direct_binding(self): - matcher = matchmaker.MatchMakerBase() - matcher.add_binding( - matchmaker.DirectBinding(), matchmaker.StubExchange()) - self.assertEqual( - matcher.queues('hello.server'), [('hello.server', None)]) - self.assertEqual(matcher.queues('hello-world'), []) - - def test_localhost_match(self): - matcher = matchmaker.MatchMakerLocalhost() - self.assertEqual( - matcher.queues('hello.server'), [('hello.server', 'server')]) - - # Gets remapped due to localhost exchange - # all bindings default to first match. - self.assertEqual( - matcher.queues('fanout~testing.server'), - [('fanout~testing.localhost', 'localhost')]) - - self.assertEqual( - matcher.queues('hello-world'), - [('hello-world.localhost', 'localhost')]) diff --git a/oslo_messaging/tests/drivers/test_matchmaker_ring.py b/oslo_messaging/tests/drivers/test_matchmaker_ring.py deleted file mode 100644 index 5f156007a..000000000 --- a/oslo_messaging/tests/drivers/test_matchmaker_ring.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2014 Canonical, Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_utils import importutils -import testtools - -from oslo_messaging.tests import utils as test_utils - -# NOTE(jamespage) matchmaker tied directly to eventlet -# which is not yet py3 compatible - skip if import fails -matchmaker_ring = ( - importutils.try_import('oslo_messaging._drivers.matchmaker_ring')) - - -@testtools.skipIf(not matchmaker_ring, "matchmaker/eventlet unavailable") -class MatchmakerRingTest(test_utils.BaseTestCase): - - def setUp(self): - super(MatchmakerRingTest, self).setUp() - self.ring_data = { - "conductor": ["controller1", "node1", "node2", "node3"], - "scheduler": ["controller1", "node1", "node2", "node3"], - "network": ["controller1", "node1", "node2", "node3"], - "cert": ["controller1"], - "console": ["controller1"], - "consoleauth": ["controller1"]} - self.matcher = matchmaker_ring.MatchMakerRing(self.ring_data) - - def test_direct(self): - self.assertEqual( - self.matcher.queues('cert.controller1'), - [('cert.controller1', 'controller1')]) - self.assertEqual( - self.matcher.queues('conductor.node1'), - [('conductor.node1', 'node1')]) - - def test_fanout(self): - self.assertEqual( - self.matcher.queues('fanout~conductor'), - [('fanout~conductor.controller1', 'controller1'), - ('fanout~conductor.node1', 'node1'), - ('fanout~conductor.node2', 'node2'), - ('fanout~conductor.node3', 'node3')]) - - def test_bare_topic(self): - # Round robins through the hosts on the topic - self.assertEqual( - self.matcher.queues('scheduler'), - [('scheduler.controller1', 'controller1')]) - self.assertEqual( - self.matcher.queues('scheduler'), - [('scheduler.node1', 'node1')]) - self.assertEqual( - self.matcher.queues('scheduler'), - [('scheduler.node2', 'node2')]) - self.assertEqual( - self.matcher.queues('scheduler'), - [('scheduler.node3', 'node3')]) - # Cycles loop - self.assertEqual( - self.matcher.queues('scheduler'), - [('scheduler.controller1', 'controller1')]) diff --git a/oslo_messaging/tests/functional/utils.py b/oslo_messaging/tests/functional/utils.py index de1673839..cc54ee5a7 100644 --- a/oslo_messaging/tests/functional/utils.py +++ b/oslo_messaging/tests/functional/utils.py @@ -125,13 +125,8 @@ class RpcServerGroupFixture(fixtures.Fixture): # NOTE(sileht): topic and servier_name must be uniq # to be able to run all tests in parallel self.topic = topic or str(uuid.uuid4()) - if self.url.startswith('zmq'): - # NOTE(viktors): We need to pass correct hots name to the to - # get_tcp_.*() methods. Should we use nameserver here? - self.names = names or [cfg.CONF.rpc_zmq_host for i in range(3)] - else: - self.names = names or ["server_%i_%s" % (i, uuid.uuid4()) - for i in range(3)] + self.names = names or ["server_%i_%s" % (i, str(uuid.uuid4())[:8]) + for i in range(3)] self.exchange = exchange self.targets = [self._target(server=n) for n in self.names] self.use_fanout_ctrl = use_fanout_ctrl diff --git a/oslo_messaging/tests/test_opts.py b/oslo_messaging/tests/test_opts.py index d1c75a0bc..e170eff9e 100644 --- a/oslo_messaging/tests/test_opts.py +++ b/oslo_messaging/tests/test_opts.py @@ -29,11 +29,10 @@ class OptsTestCase(test_utils.BaseTestCase): super(OptsTestCase, self).setUp() def _test_list_opts(self, result): - self.assertEqual(6, len(result)) + self.assertEqual(5, len(result)) groups = [g for (g, l) in result] self.assertIn(None, groups) - self.assertIn('matchmaker_ring', groups) self.assertIn('matchmaker_redis', groups) self.assertIn('oslo_messaging_amqp', groups) self.assertIn('oslo_messaging_rabbit', groups) diff --git a/setup.cfg b/setup.cfg index 17c228982..ce73f1a0b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -57,9 +57,8 @@ oslo.messaging.notify.drivers = oslo.messaging.zmq.matchmaker = # Matchmakers for ZeroMQ - redis = oslo_messaging._drivers.matchmaker_redis:MatchMakerRedis - ring = oslo_messaging._drivers.matchmaker_ring:MatchMakerRing - local = oslo_messaging._drivers.matchmaker:MatchMakerLocalhost + dummy = oslo_messaging._drivers.zmq_driver.matchmaker.base:DummyMatchMaker + redis = oslo_messaging._drivers.zmq_driver.matchmaker.matchmaker_redis:RedisMatchMaker oslo.config.opts = oslo.messaging = oslo_messaging.opts:list_opts diff --git a/tox.ini b/tox.ini index d91508bcf..7bc79ce52 100644 --- a/tox.ini +++ b/tox.ini @@ -43,8 +43,9 @@ commands = {toxinidir}/setup-test-env-qpid.sh python setup.py testr --slowest -- [testenv:py27-func-zeromq] commands = {toxinidir}/setup-test-env-zmq.sh python -m testtools.run \ oslo_messaging.tests.functional.test_functional.CallTestCase.test_exception \ - oslo_messaging.tests.functional.test_functional.CallTestCase.test_timeout -# commands = {toxinidir}/setup-test-env-zmq.sh python setup.py testr --slowest --testr-args='oslo_messaging.tests.functional' + oslo_messaging.tests.functional.test_functional.CallTestCase.test_timeout \ + oslo_messaging.tests.functional.test_functional.CallTestCase.test_specific_server \ + oslo_messaging.tests.functional.test_functional.CastTestCase.test_specific_server [flake8] show-source = True From 9c06fa84a07c21807bb8c81a41e4e26e0c0e5105 Mon Sep 17 00:00:00 2001 From: Doug Royal Date: Thu, 16 Jul 2015 21:04:47 -0500 Subject: [PATCH 12/28] Move zmq tests into a subdirectory There are no code changes in this commit. The zmq tests are being organized to match the development of the new zmq driver. Change-Id: Id79a2ade3874c56d8d5c3eca4689d49ba68d4418 --- oslo_messaging/tests/drivers/zmq/__init__.py | 0 .../tests/drivers/zmq/matchmaker/__init__.py | 1 + .../matchmaker}/test_impl_matchmaker.py | 0 .../matchmaker}/test_matchmaker_redis.py | 0 .../tests/drivers/{ => zmq}/test_impl_zmq.py | 53 --------------- .../tests/drivers/zmq/test_zmq_serializer.py | 67 +++++++++++++++++++ 6 files changed, 68 insertions(+), 53 deletions(-) create mode 100644 oslo_messaging/tests/drivers/zmq/__init__.py create mode 100644 oslo_messaging/tests/drivers/zmq/matchmaker/__init__.py rename oslo_messaging/tests/drivers/{ => zmq/matchmaker}/test_impl_matchmaker.py (100%) rename oslo_messaging/tests/drivers/{ => zmq/matchmaker}/test_matchmaker_redis.py (100%) rename oslo_messaging/tests/drivers/{ => zmq}/test_impl_zmq.py (78%) create mode 100644 oslo_messaging/tests/drivers/zmq/test_zmq_serializer.py diff --git a/oslo_messaging/tests/drivers/zmq/__init__.py b/oslo_messaging/tests/drivers/zmq/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/oslo_messaging/tests/drivers/zmq/matchmaker/__init__.py b/oslo_messaging/tests/drivers/zmq/matchmaker/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/oslo_messaging/tests/drivers/zmq/matchmaker/__init__.py @@ -0,0 +1 @@ + diff --git a/oslo_messaging/tests/drivers/test_impl_matchmaker.py b/oslo_messaging/tests/drivers/zmq/matchmaker/test_impl_matchmaker.py similarity index 100% rename from oslo_messaging/tests/drivers/test_impl_matchmaker.py rename to oslo_messaging/tests/drivers/zmq/matchmaker/test_impl_matchmaker.py diff --git a/oslo_messaging/tests/drivers/test_matchmaker_redis.py b/oslo_messaging/tests/drivers/zmq/matchmaker/test_matchmaker_redis.py similarity index 100% rename from oslo_messaging/tests/drivers/test_matchmaker_redis.py rename to oslo_messaging/tests/drivers/zmq/matchmaker/test_matchmaker_redis.py diff --git a/oslo_messaging/tests/drivers/test_impl_zmq.py b/oslo_messaging/tests/drivers/zmq/test_impl_zmq.py similarity index 78% rename from oslo_messaging/tests/drivers/test_impl_zmq.py rename to oslo_messaging/tests/drivers/zmq/test_impl_zmq.py index dd6df4448..563483a25 100644 --- a/oslo_messaging/tests/drivers/test_impl_zmq.py +++ b/oslo_messaging/tests/drivers/zmq/test_impl_zmq.py @@ -13,8 +13,6 @@ # under the License. import logging -import os -import re import socket import threading @@ -22,11 +20,9 @@ import fixtures import testtools import oslo_messaging -from oslo_messaging._drivers.common import RPCException from oslo_messaging._drivers import impl_zmq from oslo_messaging._drivers.zmq_driver.broker.zmq_broker import ZmqBroker from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._drivers.zmq_driver import zmq_serializer from oslo_messaging._i18n import _ from oslo_messaging.tests import utils as test_utils @@ -248,52 +244,3 @@ class TestPoller(test_utils.BaseTestCase): incoming, socket = reply_poller.poll(1) self.assertIsNone(incoming) self.assertIsNone(socket) - - -class TestZmqSerializer(test_utils.BaseTestCase): - - def test_message_without_topic_raises_RPCException(self): - # The topic is the 4th element of the message. - msg_without_topic = ['only', 'three', 'parts'] - - expected = "Message did not contain a topic: %s" % msg_without_topic - with self.assertRaisesRegexp(RPCException, re.escape(expected)): - zmq_serializer.get_topic_from_call_message(msg_without_topic) - - def test_invalid_topic_format_raises_RPCException(self): - invalid_topic = "no dots to split on, so not index-able".encode('utf8') - bad_message = ['', '', '', invalid_topic] - - expected_msg = "Topic was not formatted correctly: %s" - expected_msg = expected_msg % invalid_topic.decode('utf8') - with self.assertRaisesRegexp(RPCException, expected_msg): - zmq_serializer.get_topic_from_call_message(bad_message) - - def test_py3_decodes_bytes_correctly(self): - message = ['', '', '', b'topic.ipaddress'] - - actual, _ = zmq_serializer.get_topic_from_call_message(message) - - self.assertEqual('topic', actual) - - def test_bad_characters_in_topic_raise_RPCException(self): - # handle unexpected os path separators: - unexpected_evil = '<' - os.path.sep = unexpected_evil - - unexpected_alt_evil = '>' - os.path.altsep = unexpected_alt_evil - - evil_chars = [unexpected_evil, unexpected_alt_evil, '\\', '/'] - - for evil_char in evil_chars: - evil_topic = '%s%s%s' % ('trust.me', evil_char, 'please') - evil_topic = evil_topic.encode('utf8') - evil_message = ['', '', '', evil_topic] - - expected_msg = "Topic contained dangerous characters: %s" - expected_msg = expected_msg % evil_topic.decode('utf8') - expected_msg = re.escape(expected_msg) - - with self.assertRaisesRegexp(RPCException, expected_msg): - zmq_serializer.get_topic_from_call_message(evil_message) diff --git a/oslo_messaging/tests/drivers/zmq/test_zmq_serializer.py b/oslo_messaging/tests/drivers/zmq/test_zmq_serializer.py new file mode 100644 index 000000000..48f52734e --- /dev/null +++ b/oslo_messaging/tests/drivers/zmq/test_zmq_serializer.py @@ -0,0 +1,67 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import os +import re + +from oslo_messaging._drivers.common import RPCException +from oslo_messaging._drivers.zmq_driver import zmq_serializer +from oslo_messaging.tests import utils as test_utils + + +class TestZmqSerializer(test_utils.BaseTestCase): + + def test_message_without_topic_raises_RPCException(self): + # The topic is the 4th element of the message. + msg_without_topic = ['only', 'three', 'parts'] + + expected = "Message did not contain a topic: %s" % msg_without_topic + with self.assertRaisesRegexp(RPCException, re.escape(expected)): + zmq_serializer.get_topic_from_call_message(msg_without_topic) + + def test_invalid_topic_format_raises_RPCException(self): + invalid_topic = "no dots to split on, so not index-able".encode('utf8') + bad_message = ['', '', '', invalid_topic] + + expected_msg = "Topic was not formatted correctly: %s" + expected_msg = expected_msg % invalid_topic.decode('utf8') + with self.assertRaisesRegexp(RPCException, expected_msg): + zmq_serializer.get_topic_from_call_message(bad_message) + + def test_py3_decodes_bytes_correctly(self): + message = ['', '', '', b'topic.ipaddress'] + + actual, _ = zmq_serializer.get_topic_from_call_message(message) + + self.assertEqual('topic', actual) + + def test_bad_characters_in_topic_raise_RPCException(self): + # handle unexpected os path separators: + unexpected_evil = '<' + os.path.sep = unexpected_evil + + unexpected_alt_evil = '>' + os.path.altsep = unexpected_alt_evil + + evil_chars = [unexpected_evil, unexpected_alt_evil, '\\', '/'] + + for evil_char in evil_chars: + evil_topic = '%s%s%s' % ('trust.me', evil_char, 'please') + evil_topic = evil_topic.encode('utf8') + evil_message = ['', '', '', evil_topic] + + expected_msg = "Topic contained dangerous characters: %s" + expected_msg = expected_msg % evil_topic.decode('utf8') + expected_msg = re.escape(expected_msg) + + with self.assertRaisesRegexp(RPCException, expected_msg): + zmq_serializer.get_topic_from_call_message(evil_message) From 75660cedacefd1c3cf1c7d6d2e131cdb6112671a Mon Sep 17 00:00:00 2001 From: Oleksii Zamiatin Date: Fri, 17 Jul 2015 15:44:37 +0300 Subject: [PATCH 13/28] Target direct usage Reduced ZmqTopic as unneeded duplication of oslo_messaging Target. Change-Id: Ie9c8e59cb8a2b08c26dbc42f7af4e808362a7524 --- .../zmq_driver/broker/zmq_base_proxy.py | 22 +++--- .../zmq_driver/broker/zmq_call_proxy.py | 63 +++++++-------- .../zmq_driver/broker/zmq_cast_proxy.py | 78 ------------------- .../zmq_driver/broker/zmq_fanout_proxy.py | 9 ++- .../_drivers/zmq_driver/matchmaker/base.py | 37 +++++---- .../zmq_driver/matchmaker/matchmaker_redis.py | 13 ++-- .../zmq_driver/poller/threading_poller.py | 2 +- .../zmq_driver/rpc/client/zmq_call_request.py | 8 +- .../zmq_driver/rpc/client/zmq_cast_dealer.py | 7 +- .../zmq_driver/rpc/client/zmq_request.py | 4 +- .../rpc/server/zmq_base_consumer.py | 2 +- .../rpc/server/zmq_call_responder.py | 14 ++-- .../rpc/server/zmq_fanout_consumer.py | 4 +- .../zmq_driver/rpc/server/zmq_server.py | 2 +- .../_drivers/zmq_driver/zmq_context.py | 33 -------- .../_drivers/zmq_driver/zmq_serializer.py | 16 +++- .../_drivers/zmq_driver/zmq_target.py | 52 +++++++++++++ .../_drivers/zmq_driver/zmq_topic.py | 65 ---------------- .../zmq/matchmaker/test_impl_matchmaker.py | 33 ++++---- 19 files changed, 179 insertions(+), 285 deletions(-) delete mode 100644 oslo_messaging/_drivers/zmq_driver/broker/zmq_cast_proxy.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/zmq_context.py create mode 100644 oslo_messaging/_drivers/zmq_driver/zmq_target.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/zmq_topic.py diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py index 5443e6af0..c13d14d50 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py @@ -19,7 +19,7 @@ import six from oslo_messaging._drivers.common import RPCException from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._drivers.zmq_driver import zmq_topic +from oslo_messaging._drivers.zmq_driver import zmq_target from oslo_messaging._i18n import _LE, _LI LOG = logging.getLogger(__name__) @@ -86,6 +86,8 @@ class BaseTcpFrontend(object): :type socket_type: int :param port_number: Current messaging pipeline port. :type port_number: int + :param receive_meth: Receive method for poller. + :type receive_meth: method """ self.conf = conf @@ -93,7 +95,7 @@ class BaseTcpFrontend(object): self.context = context try: self.frontend = self.context.socket(socket_type) - bind_address = zmq_topic.get_tcp_bind_address(port_number) + bind_address = zmq_target.get_tcp_bind_address(port_number) LOG.info(_LI("Binding to TCP %s") % bind_address) self.frontend.bind(bind_address) self.poller.register(self.frontend, receive_meth) @@ -127,25 +129,25 @@ class BaseBackendMatcher(object): class DirectBackendMatcher(BaseBackendMatcher): def redirect_to_backend(self, message): - backend, topic = self._match_backend(message) - self._send_message(backend, message, topic) + backend, target = self._match_backend(message) + self._send_message(backend, message, target) def _match_backend(self, message): - topic = self._get_topic(message) - ipc_address = self._get_ipc_address(topic) + target = self._get_target(message) + ipc_address = self._get_ipc_address(target) backend = self._create_backend(ipc_address) - return backend, topic + return backend, target @abc.abstractmethod - def _get_topic(self, message): + def _get_target(self, message): """Extract topic from message""" @abc.abstractmethod - def _get_ipc_address(self, topic): + def _get_ipc_address(self, target): """Get ipc backend address from topic""" @abc.abstractmethod - def _send_message(self, backend, message, topic): + def _send_message(self, backend, message, target): """Backend specific sending logic""" @abc.abstractmethod diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py index a2150fa30..623303902 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py @@ -18,7 +18,7 @@ from oslo_messaging._drivers.common import RPCException import oslo_messaging._drivers.zmq_driver.broker.zmq_base_proxy as base_proxy from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._drivers.zmq_driver import zmq_topic +from oslo_messaging._drivers.zmq_driver import zmq_target from oslo_messaging._i18n import _LE, _LI LOG = logging.getLogger(__name__) @@ -26,24 +26,6 @@ LOG = logging.getLogger(__name__) zmq = zmq_async.import_zmq() -class CallProxy(base_proxy.BaseProxy): - - def __init__(self, conf, context): - super(CallProxy, self).__init__(conf, context) - self.tcp_frontend = FrontendTcpRouter(self.conf, context) - self.backend_matcher = DealerBackend(self.conf, context) - LOG.info(_LI("Starting call proxy thread")) - - def run(self): - message = self.tcp_frontend.receive_incoming() - if message is not None: - self.backend_matcher.redirect_to_backend(message) - - reply, socket = self.backend_matcher.receive_outgoing_reply() - if reply is not None: - self.tcp_frontend.redirect_outgoing_reply(reply) - - class DealerBackend(base_proxy.DirectBackendMatcher): def __init__(self, conf, context, poller=None): @@ -52,21 +34,19 @@ class DealerBackend(base_proxy.DirectBackendMatcher): native_zmq=conf.rpc_zmq_native) super(DealerBackend, self).__init__(conf, poller, context) - def receive_outgoing_reply(self): - reply_message = self.poller.poll(1) - return reply_message + def _get_target(self, message): + return zmq_serializer.get_target_from_call_message(message) - def _get_topic(self, message): - topic, server = zmq_serializer.get_topic_from_call_message(message) - return zmq_topic.Topic(self.conf, topic, server) - - def _get_ipc_address(self, topic): - return zmq_topic.get_ipc_address_call(self.conf, topic) + def _get_ipc_address(self, target): + return zmq_target.get_ipc_address_call(self.conf, target) def _send_message(self, backend, message, topic): # Empty needed for awaiting REP socket to work properly # (DEALER-REP usage specific) backend.send(b'', zmq.SNDMORE) + backend.send(message.pop(0), zmq.SNDMORE) + backend.send_string(message.pop(0), zmq.SNDMORE) + message.pop(0) # Drop target unneeded any more backend.send_multipart(message) def _create_backend(self, ipc_address): @@ -85,9 +65,30 @@ class FrontendTcpRouter(base_proxy.BaseTcpFrontend): if poller is None: poller = zmq_async.get_poller( native_zmq=conf.rpc_zmq_native) - super(FrontendTcpRouter, self).__init__(conf, poller, context, - socket_type=zmq.ROUTER, - port_number=conf.rpc_zmq_port) + super(FrontendTcpRouter, self).__init__( + conf, poller, context, + socket_type=zmq.ROUTER, + port_number=conf.rpc_zmq_port, + receive_meth=self._receive_message) + + def _receive_message(self, socket): + + try: + reply_id = socket.recv() + empty = socket.recv() + assert empty == b'', "Empty delimiter expected" + msg_type = socket.recv_string() + target_dict = socket.recv_json() + target = zmq_target.target_from_dict(target_dict) + other = socket.recv_multipart() + except zmq.ZMQError as e: + LOG.error(_LE("Error receiving message %s") % str(e)) + return None + + if msg_type == zmq_serializer.FANOUT_TYPE: + other.insert(0, zmq_target.target_to_str(target).encode("utf-8")) + + return [reply_id, msg_type, target] + other @staticmethod def _reduce_empty(reply): diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_cast_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_cast_proxy.py deleted file mode 100644 index 9779779df..000000000 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_cast_proxy.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging - -import oslo_messaging._drivers.zmq_driver.broker.zmq_base_proxy as base_proxy -from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._drivers.zmq_driver import zmq_topic -from oslo_messaging._i18n import _LI - -zmq = zmq_async.import_zmq() - -LOG = logging.getLogger(__name__) - - -class CastProxy(base_proxy.BaseProxy): - - def __init__(self, conf, context): - super(CastProxy, self).__init__(conf, context) - self.tcp_frontend = FrontendTcpPull(self.conf, context) - self.backend_matcher = CastPushBackendMatcher(self.conf, context) - LOG.info(_LI("Starting cast proxy thread")) - - def run(self): - message = self.tcp_frontend.receive_incoming() - if message is not None: - self.backend_matcher.redirect_to_backend(message) - - -class FrontendTcpPull(base_proxy.BaseTcpFrontend): - - def __init__(self, conf, context): - poller = zmq_async.get_poller(native_zmq=conf.rpc_zmq_native) - super(FrontendTcpPull, self).__init__(conf, poller, context) - self.frontend = self.context.socket(zmq.PULL) - address = zmq_topic.get_tcp_bind_address(conf.rpc_zmq_fanout_port) - LOG.info(_LI("Binding to TCP PULL %s") % address) - self.frontend.bind(address) - self.poller.register(self.frontend) - - def _receive_message(self): - message = self.poller.poll() - return message - - -class CastPushBackendMatcher(base_proxy.BaseBackendMatcher): - - def __init__(self, conf, context): - poller = zmq_async.get_poller(native_zmq=conf.rpc_zmq_native) - super(CastPushBackendMatcher, self).__init__(conf, poller, context) - self.backend = self.context.socket(zmq.PUSH) - - def _get_topic(self, message): - topic, server = zmq_serializer.get_topic_from_cast_message(message) - return zmq_topic.Topic(self.conf, topic, server) - - def _get_ipc_address(self, topic): - return zmq_topic.get_ipc_address_cast(self.conf, topic) - - def _send_message(self, backend, message, topic): - backend.send_multipart(message) - - def _create_backend(self, ipc_address): - LOG.debug("[Cast Proxy] Creating PUSH backend %s", ipc_address) - self.backend.connect(ipc_address) - self.backends[str(ipc_address)] = True diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py index 8d1f8b185..279ce3f04 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py @@ -16,7 +16,7 @@ import oslo_messaging._drivers.zmq_driver.broker.zmq_base_proxy as base_proxy from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._drivers.zmq_driver import zmq_topic +from oslo_messaging._drivers.zmq_driver import zmq_target zmq = zmq_async.import_zmq() @@ -27,8 +27,9 @@ class PublisherBackend(base_proxy.BaseBackendMatcher): poller = zmq_async.get_poller(native_zmq=conf.rpc_zmq_native) super(PublisherBackend, self).__init__(conf, poller, context) self.backend = self.context.socket(zmq.PUB) - self.backend.bind(zmq_topic.get_ipc_address_fanout(conf)) + self.backend.bind(zmq_target.get_ipc_address_fanout(conf)) def redirect_to_backend(self, message): - topic_pos = zmq_serializer.MESSAGE_CALL_TOPIC_POSITION - self.backend.send_multipart(message[topic_pos:]) + target_pos = zmq_serializer.MESSAGE_CALL_TARGET_POSITION + 1 + msg = message[target_pos:] + self.backend.send_multipart(msg) diff --git a/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py b/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py index 29e9d52a2..766e367c2 100644 --- a/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py +++ b/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py @@ -17,6 +17,7 @@ import logging import six +from oslo_messaging._drivers.zmq_driver import zmq_target from oslo_messaging._i18n import _LI, _LW @@ -32,26 +33,26 @@ class MatchMakerBase(object): self.conf = conf @abc.abstractmethod - def register(self, topic, hostname): - """Register topic on nameserver""" + def register(self, target, hostname): + """Register target on nameserver""" @abc.abstractmethod - def get_hosts(self, topic): - """Get hosts from nameserver by topic""" + def get_hosts(self, target): + """Get hosts from nameserver by target""" - def get_single_host(self, topic): - """Get a single host by topic""" - hosts = self.get_hosts(topic) + def get_single_host(self, target): + """Get a single host by target""" + hosts = self.get_hosts(target) if len(hosts) == 0: - LOG.warning(_LW("No hosts were found for topic %s. Using " - "localhost") % topic) + LOG.warning(_LW("No hosts were found for target %s. Using " + "localhost") % target) return "localhost" elif len(hosts) == 1: - LOG.info(_LI("A single host found for topic %s.") % topic) + LOG.info(_LI("A single host found for target %s.") % target) return hosts[0] else: - LOG.warning(_LW("Multiple hosts were found for topic %s. Using " - "the first one.") % topic) + LOG.warning(_LW("Multiple hosts were found for target %s. Using " + "the first one.") % target) return hosts[0] @@ -62,9 +63,11 @@ class DummyMatchMaker(MatchMakerBase): self._cache = collections.defaultdict(list) - def register(self, topic, hostname): - if hostname not in self._cache[topic]: - self._cache[topic].append(hostname) + def register(self, target, hostname): + key = zmq_target.target_to_str(target) + if hostname not in self._cache[key]: + self._cache[key].append(hostname) - def get_hosts(self, topic): - return self._cache[topic] + def get_hosts(self, target): + key = zmq_target.target_to_str(target) + return self._cache[key] diff --git a/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py b/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py index f1a6f3827..1ece3d201 100644 --- a/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py +++ b/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py @@ -17,6 +17,7 @@ from oslo_config import cfg import redis from oslo_messaging._drivers.zmq_driver.matchmaker import base +from oslo_messaging._drivers.zmq_driver import zmq_target LOG = logging.getLogger(__name__) @@ -47,9 +48,11 @@ class RedisMatchMaker(base.MatchMakerBase): password=self.conf.matchmaker_redis.password, ) - def register(self, topic, hostname): - if hostname not in self.get_hosts(topic): - self._redis.lpush(topic, hostname) + def register(self, target, hostname): + key = zmq_target.target_to_str(target) + if hostname not in self.get_hosts(target): + self._redis.lpush(key, hostname) - def get_hosts(self, topic): - return self._redis.lrange(topic, 0, -1)[::-1] + def get_hosts(self, target): + key = zmq_target.target_to_str(target) + return self._redis.lrange(key, 0, -1)[::-1] diff --git a/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py b/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py index db9c1463a..c6be79323 100644 --- a/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py +++ b/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py @@ -47,7 +47,7 @@ class ThreadingPoller(zmq_poller.ZmqPoller): return None, None for socket in sockets: if socket in self.recv_methods: - return self.recv_methods[socket](socket) + return self.recv_methods[socket](socket), socket else: return socket.recv_multipart(), socket diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py index 11c190bf9..aff4f59fc 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py @@ -19,7 +19,7 @@ from oslo_messaging._drivers import common as rpc_common from oslo_messaging._drivers.zmq_driver.rpc.client.zmq_request import Request from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._drivers.zmq_driver import zmq_topic +from oslo_messaging._drivers.zmq_driver import zmq_target from oslo_messaging._i18n import _LE, _LI LOG = logging.getLogger(__name__) @@ -42,9 +42,9 @@ class CallRequest(Request): zmq_serializer.CALL_TYPE, timeout, retry) - self.host = self.matchmaker.get_single_host(self.topic.topic) - self.connect_address = zmq_topic.get_tcp_address_call(conf, - self.host) + self.host = self.matchmaker.get_single_host(self.target) + self.connect_address = zmq_target.get_tcp_address_call(conf, + self.host) LOG.info(_LI("Connecting REQ to %s") % self.connect_address) self.socket.connect(self.connect_address) except zmq.ZMQError as e: diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py index 6f6640575..2aad145c2 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py @@ -18,7 +18,7 @@ from oslo_messaging._drivers.zmq_driver.rpc.client import zmq_cast_publisher from oslo_messaging._drivers.zmq_driver.rpc.client.zmq_request import Request from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._drivers.zmq_driver import zmq_topic +from oslo_messaging._drivers.zmq_driver import zmq_target from oslo_messaging._i18n import _LE, _LI LOG = logging.getLogger(__name__) @@ -57,9 +57,8 @@ class DealerCastPublisher(zmq_cast_publisher.CastPublisherBase): def cast(self, target, context, message, timeout=None, retry=None): - topic = zmq_topic.Topic.from_target(self.conf, target) - host = self.matchmaker.get_single_host(topic.topic) - connect_address = zmq_topic.get_tcp_address_call(self.conf, host) + host = self.matchmaker.get_single_host(target) + connect_address = zmq_target.get_tcp_address_call(self.conf, host) dealer_socket = self._create_socket(connect_address) request = CastRequest(self.conf, target, context, message, dealer_socket, connect_address, timeout, retry) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py index badb04066..144c5c107 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py @@ -21,7 +21,6 @@ import six from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._drivers.zmq_driver import zmq_topic from oslo_messaging._i18n import _LE LOG = logging.getLogger(__name__) @@ -51,7 +50,6 @@ class Request(object): self.retry = retry self.reply = None self.socket = socket - self.topic = zmq_topic.Topic.from_target(conf, target) @property def is_replied(self): @@ -63,7 +61,7 @@ class Request(object): def send_request(self): self.socket.send_string(self.msg_type, zmq.SNDMORE) - self.socket.send_string(str(self.topic), zmq.SNDMORE) + self.socket.send_json(self.target.__dict__, zmq.SNDMORE) self.socket.send_string(self.msg_id, zmq.SNDMORE) self.socket.send_json(self.context, zmq.SNDMORE) self.socket.send_json(self.message) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py index 2eeb55f22..9fffe8637 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py @@ -20,7 +20,7 @@ class ConsumerBase(object): self.conf = conf self.poller = zmq_poller self.context = context - self.sockets_per_topic = {} + self.sockets_per_target = {} def poll(self, timeout=None): pass diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py index 38bc43207..f440359a2 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py @@ -20,7 +20,7 @@ from oslo_messaging._drivers import common as rpc_common from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_base_consumer from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._drivers.zmq_driver import zmq_topic as topic_utils +from oslo_messaging._drivers.zmq_driver import zmq_target from oslo_messaging._i18n import _LE @@ -64,12 +64,8 @@ class CallResponder(zmq_base_consumer.ConsumerBase): def _receive_message(self, socket): try: reply_id = socket.recv() - empty = socket.recv() - assert empty == b'', 'Bad format: empty separator expected' msg_type = socket.recv_string() assert msg_type is not None, 'Bad format: msg type expected' - topic = socket.recv_string() - assert topic is not None, 'Bad format: topic string expected' msg_id = socket.recv_string() assert msg_id is not None, 'Bad format: message ID expected' context = socket.recv_json() @@ -82,12 +78,12 @@ class CallResponder(zmq_base_consumer.ConsumerBase): self.poller) return incoming except zmq.ZMQError as e: - LOG.error(_LE("Receiving message failed ... %s") % str(e)) + LOG.error(_LE("Receiving message failed: %s") % str(e)) def listen(self, target): - topic = topic_utils.Topic.from_target(self.conf, target) - ipc_rep_address = topic_utils.get_ipc_address_call(self.conf, topic) + ipc_rep_address = zmq_target.get_ipc_address_call(self.conf, target) rep_socket = self.context.socket(zmq.REP) rep_socket.bind(ipc_rep_address) - self.sockets_per_topic[str(topic)] = rep_socket + str_target = zmq_target.target_to_str(target) + self.sockets_per_target[str_target] = rep_socket self.poller.register(rep_socket, self._receive_message) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_fanout_consumer.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_fanout_consumer.py index 3ca78cf05..0c54ca36b 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_fanout_consumer.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_fanout_consumer.py @@ -20,7 +20,7 @@ import six from oslo_messaging._drivers import base from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_base_consumer from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._drivers.zmq_driver import zmq_topic as topic_utils +from oslo_messaging._drivers.zmq_driver import zmq_target as topic_utils from oslo_messaging._i18n import _LE @@ -63,7 +63,7 @@ class FanoutConsumer(zmq_base_consumer.ConsumerBase): LOG.error(_LE("Receiving message failed ... {}"), e) def listen(self, target): - topic = topic_utils.Topic.from_target(self.conf, target) + topic = topic_utils.target_to_str(target) ipc_address = topic_utils.get_ipc_address_fanout(self.conf) sub_socket = self.context.socket(zmq.SUB) sub_socket.connect(ipc_address) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py index 778f3b273..ef6e97a32 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py @@ -52,7 +52,7 @@ class ZmqServer(base.Listener): def listen(self, target): LOG.info("[Server] Listen to Target %s" % target) - self.matchmaker.register(topic=target.topic, + self.matchmaker.register(target=target, hostname=self.conf.rpc_zmq_host) if target.fanout: self.fanout_resp.listen(target) diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_context.py b/oslo_messaging/_drivers/zmq_driver/zmq_context.py deleted file mode 100644 index f986e41db..000000000 --- a/oslo_messaging/_drivers/zmq_driver/zmq_context.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from oslo_messaging._drivers import common as rpc_common - - -class RpcContext(rpc_common.CommonRpcContext): - """Context that supports replying to a rpc.call.""" - def __init__(self, **kwargs): - self.replies = [] - super(RpcContext, self).__init__(**kwargs) - - def deepcopy(self): - values = self.to_dict() - values['replies'] = self.replies - return self.__class__(**values) - - def reply(self, reply=None, failure=None, ending=False): - if ending: - return - self.replies.append(reply) diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py b/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py index ef422ff8d..cda3aca4e 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py @@ -23,7 +23,8 @@ from oslo_messaging._i18n import _LE, _LW LOG = logging.getLogger(__name__) -MESSAGE_CALL_TYPE_POSITION = 2 +MESSAGE_CALL_TYPE_POSITION = 1 +MESSAGE_CALL_TARGET_POSITION = 2 MESSAGE_CALL_TOPIC_POSITION = 3 FIELD_FAILURE = 'failure' @@ -40,8 +41,6 @@ MESSAGE_TYPES = (CALL_TYPE, CAST_TYPE, FANOUT_TYPE, NOTIFY_TYPE) def get_msg_type(message): type = message[MESSAGE_CALL_TYPE_POSITION] - if six.PY3: - type = type.decode('utf-8') if type not in MESSAGE_TYPES: errmsg = _LE("Unknown message type: %s") % str(type) LOG.error(errmsg) @@ -88,3 +87,14 @@ def get_topic_from_call_message(message): :returns: (topic: str, server: str) """ return _get_topic_from_msg(message, MESSAGE_CALL_TOPIC_POSITION) + + +def get_target_from_call_message(message): + """Extract target from message. + + :param message: A message + :type message: list + + :returns: target: Target + """ + return message[MESSAGE_CALL_TARGET_POSITION] diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_target.py b/oslo_messaging/_drivers/zmq_driver/zmq_target.py new file mode 100644 index 000000000..a5e5de8dd --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/zmq_target.py @@ -0,0 +1,52 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from oslo_messaging import target + + +def get_ipc_address_call(conf, target): + target_addr = target_to_str(target) + return "ipc://%s/%s" % (conf.rpc_zmq_ipc_dir, target_addr) + + +def get_tcp_bind_address(port): + return "tcp://*:%s" % port + + +def get_tcp_address_call(conf, host): + return "tcp://%s:%s" % (host, conf.rpc_zmq_port) + + +def get_ipc_address_cast(conf, target): + target_addr = target_to_str(target) + return "ipc://%s/fanout/%s" % (conf.rpc_zmq_ipc_dir, target_addr) + + +def get_ipc_address_fanout(conf): + return "ipc://%s/fanout_general" % conf.rpc_zmq_ipc_dir + + +def target_to_str(target): + if target.server is None: + return target.topic + return "%s.%s" % (target.server, target.topic) + + +def target_from_dict(target_dict): + return target.Target(exchange=target_dict['exchange'], + topic=target_dict['topic'], + namespace=target_dict['namespace'], + version=target_dict['version'], + server=target_dict['server'], + fanout=target_dict['fanout']) diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_topic.py b/oslo_messaging/_drivers/zmq_driver/zmq_topic.py deleted file mode 100644 index f89f5073b..000000000 --- a/oslo_messaging/_drivers/zmq_driver/zmq_topic.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -def get_ipc_address_call(conf, topic): - return "ipc://%s/%s" % (conf.rpc_zmq_ipc_dir, str(topic)) - - -def get_tcp_bind_address(port): - return "tcp://*:%s" % port - - -def get_tcp_address_call(conf, host): - return "tcp://%s:%s" % (host, conf.rpc_zmq_port) - - -def get_ipc_address_cast(conf, topic): - return "ipc://%s/fanout/%s" % (conf.rpc_zmq_ipc_dir, str(topic)) - - -def get_ipc_address_fanout(conf): - return "ipc://%s/fanout_general" % conf.rpc_zmq_ipc_dir - - -class Topic(object): - - def __init__(self, conf, topic, server=None, fanout=False): - - if server is None: - self.server = conf.rpc_zmq_host - else: - self.server = server - - self._topic = topic - self.fanout = fanout - - @staticmethod - def _extract_cinder_server(server): - return server.split('@', 1)[0] - - @staticmethod - def from_target(conf, target): - if target.server is not None: - return Topic(conf, target.topic, target.server, - fanout=target.fanout) - else: - return Topic(conf, target.topic, fanout=target.fanout) - - @property - def topic(self): - return self._topic if self._topic else "" - - def __str__(self, *args, **kwargs): - return u"%s.%s" % (self.topic, self.server) diff --git a/oslo_messaging/tests/drivers/zmq/matchmaker/test_impl_matchmaker.py b/oslo_messaging/tests/drivers/zmq/matchmaker/test_impl_matchmaker.py index 8fa82c2a4..9cae5fe6a 100644 --- a/oslo_messaging/tests/drivers/zmq/matchmaker/test_impl_matchmaker.py +++ b/oslo_messaging/tests/drivers/zmq/matchmaker/test_impl_matchmaker.py @@ -15,6 +15,7 @@ from stevedore import driver import testscenarios +import oslo_messaging from oslo_messaging.tests import utils as test_utils @@ -39,37 +40,41 @@ class TestImplMatchmaker(test_utils.BaseTestCase): if self.rpc_zmq_matchmaker == "redis": self.addCleanup(self.test_matcher._redis.flushdb) - self.topic = "test_topic" + self.target = oslo_messaging.Target(topic="test_topic") self.host1 = b"test_host1" self.host2 = b"test_host2" def test_register(self): - self.test_matcher.register(self.topic, self.host1) + self.test_matcher.register(self.target, self.host1) - self.assertEqual(self.test_matcher.get_hosts(self.topic), [self.host1]) - self.assertEqual(self.test_matcher.get_single_host(self.topic), + self.assertEqual(self.test_matcher.get_hosts(self.target), + [self.host1]) + self.assertEqual(self.test_matcher.get_single_host(self.target), self.host1) def test_register_two_hosts(self): - self.test_matcher.register(self.topic, self.host1) - self.test_matcher.register(self.topic, self.host2) + self.test_matcher.register(self.target, self.host1) + self.test_matcher.register(self.target, self.host2) - self.assertEqual(self.test_matcher.get_hosts(self.topic), + self.assertEqual(self.test_matcher.get_hosts(self.target), [self.host1, self.host2]) - self.assertIn(self.test_matcher.get_single_host(self.topic), + self.assertIn(self.test_matcher.get_single_host(self.target), [self.host1, self.host2]) def test_register_two_same_hosts(self): - self.test_matcher.register(self.topic, self.host1) - self.test_matcher.register(self.topic, self.host1) + self.test_matcher.register(self.target, self.host1) + self.test_matcher.register(self.target, self.host1) - self.assertEqual(self.test_matcher.get_hosts(self.topic), [self.host1]) - self.assertEqual(self.test_matcher.get_single_host(self.topic), + self.assertEqual(self.test_matcher.get_hosts(self.target), + [self.host1]) + self.assertEqual(self.test_matcher.get_single_host(self.target), self.host1) def test_get_hosts_wrong_topic(self): - self.assertEqual(self.test_matcher.get_hosts("no_such_topic"), []) + target = oslo_messaging.Target(topic="no_such_topic") + self.assertEqual(self.test_matcher.get_hosts(target), []) def test_get_single_host_wrong_topic(self): - self.assertEqual(self.test_matcher.get_single_host("no_such_topic"), + target = oslo_messaging.Target(topic="no_such_topic") + self.assertEqual(self.test_matcher.get_single_host(target), "localhost") From e2c3e36d75dedcab07c9239f544e93aabe0faed5 Mon Sep 17 00:00:00 2001 From: Oleksii Zamiatin Date: Mon, 20 Jul 2015 12:21:58 +0300 Subject: [PATCH 14/28] Close sockets properly All socket-connections should properly die after their parents being stopped. Change-Id: I6a83ed2d5ef194e8b068c1d8bd6813f48636c5fb --- oslo_messaging/_drivers/impl_zmq.py | 14 ++++++-------- .../zmq_driver/broker/zmq_base_proxy.py | 8 ++++++++ .../zmq_driver/broker/zmq_fanout_proxy.py | 3 +++ .../zmq_driver/broker/zmq_universal_proxy.py | 6 ++++++ .../zmq_driver/matchmaker/matchmaker_redis.py | 2 +- .../_drivers/zmq_driver/poller/green_poller.py | 12 ++++++++---- .../zmq_driver/poller/threading_poller.py | 14 +++++++++++++- .../zmq_driver/rpc/client/zmq_call_request.py | 12 ++++++++---- .../zmq_driver/rpc/client/zmq_cast_dealer.py | 6 ++++++ .../zmq_driver/rpc/client/zmq_client.py | 13 +++++++++---- .../zmq_driver/rpc/server/zmq_base_consumer.py | 18 ++++++++++-------- .../zmq_driver/rpc/server/zmq_server.py | 4 +++- .../_drivers/zmq_driver/zmq_poller.py | 8 ++++++-- 13 files changed, 87 insertions(+), 33 deletions(-) diff --git a/oslo_messaging/_drivers/impl_zmq.py b/oslo_messaging/_drivers/impl_zmq.py index 69d2bf5c8..064fe7c60 100644 --- a/oslo_messaging/_drivers/impl_zmq.py +++ b/oslo_messaging/_drivers/impl_zmq.py @@ -96,21 +96,20 @@ class ZmqDriver(base.BaseDriver): conf.register_opts(zmq_opts) conf.register_opts(executor_base._pool_opts) self.conf = conf - self.server = None - self.client = None + self.matchmaker = driver.DriverManager( 'oslo.messaging.zmq.matchmaker', self.conf.rpc_zmq_matchmaker, ).driver(self.conf) + self.server = zmq_server.ZmqServer(self.conf, self.matchmaker) + self.client = zmq_client.ZmqClient(self.conf, self.matchmaker, + allowed_remote_exmods) super(ZmqDriver, self).__init__(conf, url, default_exchange, allowed_remote_exmods) def send(self, target, ctxt, message, wait_for_reply=None, timeout=None, retry=None): - if self.client is None: - self.client = zmq_client.ZmqClient(self.conf, self.matchmaker, - self._allowed_remote_exmods) if wait_for_reply: return self.client.call(target, ctxt, message, timeout, retry) else: @@ -121,8 +120,6 @@ class ZmqDriver(base.BaseDriver): return None def listen(self, target): - if self.server is None: - self.server = zmq_server.ZmqServer(self.conf, self.matchmaker) self.server.listen(target) return self.server @@ -130,4 +127,5 @@ class ZmqDriver(base.BaseDriver): return None def cleanup(self): - pass + self.client.cleanup() + self.server.cleanup() diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py index c13d14d50..758c15c5f 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py @@ -110,6 +110,9 @@ class BaseTcpFrontend(object): LOG.info(_LI("Message %s received."), message) return message + def close(self): + self.frontend.close() + @six.add_metaclass(abc.ABCMeta) class BaseBackendMatcher(object): @@ -124,6 +127,11 @@ class BaseBackendMatcher(object): def redirect_to_backend(self, message): """Redirect message""" + def close(self): + if self.backends: + for backend in self.backends.values(): + backend.close() + @six.add_metaclass(abc.ABCMeta) class DirectBackendMatcher(BaseBackendMatcher): diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py index 279ce3f04..bf6492ee1 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py @@ -33,3 +33,6 @@ class PublisherBackend(base_proxy.BaseBackendMatcher): target_pos = zmq_serializer.MESSAGE_CALL_TARGET_POSITION + 1 msg = message[target_pos:] self.backend.send_multipart(msg) + + def close(self): + self.backend.close() diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py index c57a60f9f..82d3e024d 100644 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py +++ b/oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py @@ -50,6 +50,12 @@ class UniversalProxy(base_proxy.BaseProxy): else: self.tcp_frontend.redirect_outgoing_reply(message) + def stop(self): + self.poller.close() + super(UniversalProxy, self).stop() + self.tcp_frontend.close() + self.backend_matcher.close() + class BackendMatcher(base_proxy.BaseBackendMatcher): diff --git a/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py b/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py index 1ece3d201..834d35fb3 100644 --- a/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py +++ b/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py @@ -49,8 +49,8 @@ class RedisMatchMaker(base.MatchMakerBase): ) def register(self, target, hostname): - key = zmq_target.target_to_str(target) if hostname not in self.get_hosts(target): + key = zmq_target.target_to_str(target) self._redis.lpush(key, hostname) def get_hosts(self, target): diff --git a/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py b/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py index f09bb016a..3e6f5148a 100644 --- a/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py +++ b/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py @@ -29,12 +29,12 @@ class GreenPoller(zmq_poller.ZmqPoller): def __init__(self): self.incoming_queue = six.moves.queue.Queue() self.green_pool = eventlet.GreenPool() - self.sockets = [] + self.threads = [] def register(self, socket, recv_method=None): - self.sockets.append(socket) - return self.green_pool.spawn(self._socket_receive, socket, - recv_method) + self.threads.append( + self.green_pool.spawn(self._socket_receive, socket, + recv_method)) def _socket_receive(self, socket, recv_method=None): while True: @@ -58,6 +58,10 @@ class GreenPoller(zmq_poller.ZmqPoller): return None, None return incoming[0], incoming[1] + def close(self): + for thread in self.threads: + thread.kill() + class HoldReplyPoller(GreenPoller): diff --git a/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py b/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py index c6be79323..7719310cf 100644 --- a/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py +++ b/oslo_messaging/_drivers/zmq_driver/poller/threading_poller.py @@ -42,7 +42,13 @@ class ThreadingPoller(zmq_poller.ZmqPoller): def poll(self, timeout=None): timeout = timeout * 1000 # zmq poller waits milliseconds - sockets = dict(self.poller.poll(timeout=timeout)) + sockets = None + + try: + sockets = dict(self.poller.poll(timeout=timeout)) + except zmq.ZMQError as e: + LOG.debug("Polling terminated with error: %s" % e) + if not sockets: return None, None for socket in sockets: @@ -51,6 +57,12 @@ class ThreadingPoller(zmq_poller.ZmqPoller): else: return socket.recv_multipart(), socket + def resume_polling(self, socket): + pass # Nothing to do for threading poller + + def close(self): + pass # Nothing to do for threading poller + class ThreadingExecutor(zmq_poller.Executor): diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py index aff4f59fc..8bdd9c0af 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py @@ -33,6 +33,7 @@ class CallRequest(Request): retry=None, allowed_remote_exmods=None, matchmaker=None): self.allowed_remote_exmods = allowed_remote_exmods or [] self.matchmaker = matchmaker + self.reply_poller = zmq_async.get_reply_poller() try: self.zmq_context = zmq.Context() @@ -51,13 +52,16 @@ class CallRequest(Request): LOG.error(_LE("Error connecting to socket: %s") % str(e)) raise + def close(self): + self.reply_poller.close() + self.socket.close() + def receive_reply(self): # NOTE(ozamiatin): Check for retry here (no retries now) - poller = zmq_async.get_reply_poller() - poller.register(self.socket, - recv_method=lambda socket: socket.recv_json()) + self.reply_poller.register( + self.socket, recv_method=lambda socket: socket.recv_json()) - reply, socket = poller.poll(timeout=self.timeout) + reply, socket = self.reply_poller.poll(timeout=self.timeout) if reply is None: raise oslo_messaging.MessagingTimeout( "Timeout %s seconds was reached" % self.timeout) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py index 2aad145c2..f28ed428f 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py @@ -71,7 +71,13 @@ class DealerCastPublisher(zmq_cast_publisher.CastPublisherBase): dealer_socket = self.zmq_context.socket(zmq.DEALER) LOG.info(_LI("Connecting DEALER to %s") % address) dealer_socket.connect(address) + self.outbound_sockets[address] = dealer_socket return dealer_socket except zmq.ZMQError: LOG.error(_LE("Failed connecting DEALER to %s") % address) raise + + def cleanup(self): + if self.outbound_sockets: + for socket in self.outbound_sockets.values(): + socket.close() diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py index cdd291b1f..2bdbee18b 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py @@ -12,6 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. +import contextlib from oslo_messaging._drivers.zmq_driver.rpc.client import zmq_call_request from oslo_messaging._drivers.zmq_driver.rpc.client import zmq_cast_dealer @@ -27,10 +28,14 @@ class ZmqClient(object): matchmaker) def call(self, target, context, message, timeout=None, retry=None): - request = zmq_call_request.CallRequest( - self.conf, target, context, message, timeout, retry, - self.allowed_remote_exmods, self.matchmaker) - return request() + with contextlib.closing(zmq_call_request.CallRequest( + self.conf, target, context, message, timeout, retry, + self.allowed_remote_exmods, + self.matchmaker)) as request: + return request() def cast(self, target, context, message, timeout=None, retry=None): self.cast_publisher.cast(target, context, message, timeout, retry) + + def cleanup(self): + self.cast_publisher.cleanup() diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py index 9fffe8637..a74b7dcba 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py @@ -12,7 +12,12 @@ # License for the specific language governing permissions and limitations # under the License. +import abc +import six + + +@six.add_metaclass(abc.ABCMeta) class ConsumerBase(object): def __init__(self, listener, conf, zmq_poller, context): @@ -22,14 +27,11 @@ class ConsumerBase(object): self.context = context self.sockets_per_target = {} - def poll(self, timeout=None): - pass - - def stop(self): - pass - def cleanup(self): - pass + if self.sockets_per_target: + for socket in self.sockets_per_target.values(): + socket.close() + @abc.abstractmethod def listen(self, target): - pass + """Listen for target""" diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py index ef6e97a32..9621e8ce9 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py @@ -47,7 +47,9 @@ class ZmqServer(base.Listener): LOG.info("[Server] Stop") def cleanup(self): - pass + self.poller.close() + self.call_resp.cleanup() + self.fanout_resp.cleanup() def listen(self, target): LOG.info("[Server] Listen to Target %s" % target) diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_poller.py b/oslo_messaging/_drivers/zmq_driver/zmq_poller.py index dcd51ad7b..02d4ee87c 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_poller.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_poller.py @@ -22,11 +22,15 @@ class ZmqPoller(object): @abc.abstractmethod def register(self, socket, recv_method=None): - 'Register socket to poll' + """Register socket to poll""" @abc.abstractmethod def poll(self, timeout=None): - 'Poll for messages' + """Poll for messages""" + + @abc.abstractmethod + def close(self): + """Terminate polling""" @six.add_metaclass(abc.ABCMeta) From 315e56ae2b91cb7ab5a8e24ead1b9ad8c0120552 Mon Sep 17 00:00:00 2001 From: Oleksii Zamiatin Date: Thu, 23 Jul 2015 14:05:04 +0300 Subject: [PATCH 15/28] Get rid of proxy process in zmq As far as we use redis as a name service we don't need a proxy, becase we can pass binded port over name service too. Change-Id: I59bbe2b34dcedfeef113ef06d6a988e1c413405e --- oslo_messaging/_cmd/__init__.py | 1 - oslo_messaging/_cmd/zmq_receiver.py | 44 ----- .../_drivers/zmq_driver/broker/__init__.py | 1 - .../zmq_driver/broker/zmq_base_proxy.py | 163 ------------------ .../_drivers/zmq_driver/broker/zmq_broker.py | 71 -------- .../zmq_driver/broker/zmq_call_proxy.py | 106 ------------ .../zmq_driver/broker/zmq_fanout_proxy.py | 38 ---- .../zmq_driver/broker/zmq_universal_proxy.py | 72 -------- .../_drivers/zmq_driver/matchmaker/base.py | 2 +- .../zmq_driver/poller/green_poller.py | 13 +- .../zmq_driver/rpc/client/zmq_call_request.py | 17 +- .../zmq_driver/rpc/client/zmq_cast_dealer.py | 12 +- .../zmq_driver/rpc/client/zmq_request.py | 1 - .../rpc/server/zmq_base_consumer.py | 37 ---- .../rpc/server/zmq_fanout_consumer.py | 74 -------- ...l_responder.py => zmq_incoming_message.py} | 41 ++--- .../zmq_driver/rpc/server/zmq_server.py | 73 +++++--- .../_drivers/zmq_driver/zmq_poller.py | 3 + .../_drivers/zmq_driver/zmq_serializer.py | 2 +- .../_drivers/zmq_driver/zmq_target.py | 29 ++-- .../zmq/matchmaker/test_impl_matchmaker.py | 2 +- .../tests/drivers/zmq/test_impl_zmq.py | 32 +--- setup-test-env-zmq.sh | 2 - tests/drivers/test_impl_zmq.py | 43 ++--- 24 files changed, 135 insertions(+), 744 deletions(-) delete mode 100644 oslo_messaging/_cmd/__init__.py delete mode 100644 oslo_messaging/_cmd/zmq_receiver.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/broker/__init__.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_fanout_consumer.py rename oslo_messaging/_drivers/zmq_driver/rpc/server/{zmq_call_responder.py => zmq_incoming_message.py} (58%) diff --git a/oslo_messaging/_cmd/__init__.py b/oslo_messaging/_cmd/__init__.py deleted file mode 100644 index 8b1378917..000000000 --- a/oslo_messaging/_cmd/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/oslo_messaging/_cmd/zmq_receiver.py b/oslo_messaging/_cmd/zmq_receiver.py deleted file mode 100644 index abd24e8d4..000000000 --- a/oslo_messaging/_cmd/zmq_receiver.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2011 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import contextlib -import logging -import sys - -from oslo_config import cfg - -from oslo_messaging._drivers import impl_zmq -from oslo_messaging._drivers.zmq_driver.broker import zmq_broker -from oslo_messaging._executors import base # FIXME(markmc) - -CONF = cfg.CONF -CONF.register_opts(impl_zmq.zmq_opts) -CONF.register_opts(base._pool_opts) -# TODO(ozamiatin): Move this option assignment to an external config file -# Use efficient zmq poller in real-world deployment -CONF.rpc_zmq_native = True - - -def main(): - CONF(sys.argv[1:], project='oslo') - logging.basicConfig(level=logging.DEBUG) - - with contextlib.closing(zmq_broker.ZmqBroker(CONF)) as reactor: - reactor.start() - reactor.wait() - -if __name__ == "__main__": - main() diff --git a/oslo_messaging/_drivers/zmq_driver/broker/__init__.py b/oslo_messaging/_drivers/zmq_driver/broker/__init__.py deleted file mode 100644 index 8af3e63a7..000000000 --- a/oslo_messaging/_drivers/zmq_driver/broker/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__author__ = 'ozamiatin' diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py deleted file mode 100644 index 758c15c5f..000000000 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_base_proxy.py +++ /dev/null @@ -1,163 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc -import logging - -import six - -from oslo_messaging._drivers.common import RPCException -from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._drivers.zmq_driver import zmq_target -from oslo_messaging._i18n import _LE, _LI - -LOG = logging.getLogger(__name__) - -zmq = zmq_async.import_zmq() - - -@six.add_metaclass(abc.ABCMeta) -class BaseProxy(object): - - """Base TCP-proxy. - - TCP-proxy redirects messages received by TCP from clients to servers - over IPC. Consists of TCP-frontend and IPC-backend objects. Runs - in async executor. - """ - - def __init__(self, conf, context): - super(BaseProxy, self).__init__() - self.conf = conf - self.context = context - self.executor = zmq_async.get_executor( - self.run, native_zmq=conf.rpc_zmq_native) - - @abc.abstractmethod - def run(self): - """Main execution point of the proxy""" - - def start(self): - self.executor.execute() - - def stop(self): - self.executor.stop() - - def wait(self): - self.executor.wait() - - -@six.add_metaclass(abc.ABCMeta) -class BaseTcpFrontend(object): - - """Base frontend clause. - - TCP-frontend is a part of TCP-proxy which receives incoming - messages from clients. - """ - - def __init__(self, conf, poller, context, - socket_type=None, - port_number=None, - receive_meth=None): - - """Construct a TCP-frontend. - - Its attributes are: - - :param conf: Driver configuration object. - :type conf: ConfigOpts - :param poller: Messages poller-object green or threading. - :type poller: ZmqPoller - :param context: ZeroMQ context object. - :type context: zmq.Context - :param socket_type: ZeroMQ socket type. - :type socket_type: int - :param port_number: Current messaging pipeline port. - :type port_number: int - :param receive_meth: Receive method for poller. - :type receive_meth: method - """ - - self.conf = conf - self.poller = poller - self.context = context - try: - self.frontend = self.context.socket(socket_type) - bind_address = zmq_target.get_tcp_bind_address(port_number) - LOG.info(_LI("Binding to TCP %s") % bind_address) - self.frontend.bind(bind_address) - self.poller.register(self.frontend, receive_meth) - except zmq.ZMQError as e: - errmsg = _LE("Could not create ZeroMQ receiver daemon. " - "Socket may already be in use: %s") % str(e) - LOG.error(errmsg) - raise RPCException(errmsg) - - def receive_incoming(self): - message, socket = self.poller.poll(1) - LOG.info(_LI("Message %s received."), message) - return message - - def close(self): - self.frontend.close() - - -@six.add_metaclass(abc.ABCMeta) -class BaseBackendMatcher(object): - - def __init__(self, conf, poller, context): - self.conf = conf - self.context = context - self.backends = {} - self.poller = poller - - @abc.abstractmethod - def redirect_to_backend(self, message): - """Redirect message""" - - def close(self): - if self.backends: - for backend in self.backends.values(): - backend.close() - - -@six.add_metaclass(abc.ABCMeta) -class DirectBackendMatcher(BaseBackendMatcher): - - def redirect_to_backend(self, message): - backend, target = self._match_backend(message) - self._send_message(backend, message, target) - - def _match_backend(self, message): - target = self._get_target(message) - ipc_address = self._get_ipc_address(target) - backend = self._create_backend(ipc_address) - return backend, target - - @abc.abstractmethod - def _get_target(self, message): - """Extract topic from message""" - - @abc.abstractmethod - def _get_ipc_address(self, target): - """Get ipc backend address from topic""" - - @abc.abstractmethod - def _send_message(self, backend, message, target): - """Backend specific sending logic""" - - @abc.abstractmethod - def _create_backend(self, ipc_address): - """Backend specific socket opening logic""" diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py deleted file mode 100644 index e3835bae6..000000000 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_broker.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import os - -from oslo_utils import excutils - -from oslo_messaging._drivers.zmq_driver.broker import zmq_universal_proxy -from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._i18n import _LE, _LI - - -LOG = logging.getLogger(__name__) - - -class ZmqBroker(object): - """Local messaging IPC broker (nodes are still peers). - - The main purpose is to have one TCP connection - (one TCP port assigned for ZMQ messaging) per node. - There could be a number of services running on a node. - Without such broker a number of opened TCP ports used for - messaging become unpredictable for the engine. - - All messages are coming to TCP ROUTER socket and then - distributed between their targets by topic via IPC. - """ - - def __init__(self, conf): - super(ZmqBroker, self).__init__() - zmq = zmq_async.import_zmq(native_zmq=conf.rpc_zmq_native) - self.conf = conf - self.context = zmq.Context() - proxy = zmq_universal_proxy.UniversalProxy(conf, self.context) - self.proxies = [proxy] - self._create_ipc_dirs() - - def _create_ipc_dirs(self): - ipc_dir = self.conf.rpc_zmq_ipc_dir - try: - os.makedirs("%s/fanout" % ipc_dir) - except os.error: - if not os.path.isdir(ipc_dir): - with excutils.save_and_reraise_exception(): - LOG.error(_LE("Required IPC directory does not exist at" - " %s"), ipc_dir) - - def start(self): - for proxy in self.proxies: - proxy.start() - - def wait(self): - for proxy in self.proxies: - proxy.wait() - - def close(self): - LOG.info(_LI("Broker shutting down ...")) - for proxy in self.proxies: - proxy.stop() diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py deleted file mode 100644 index 623303902..000000000 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_call_proxy.py +++ /dev/null @@ -1,106 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging - -from oslo_messaging._drivers.common import RPCException -import oslo_messaging._drivers.zmq_driver.broker.zmq_base_proxy as base_proxy -from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._drivers.zmq_driver import zmq_target -from oslo_messaging._i18n import _LE, _LI - -LOG = logging.getLogger(__name__) - -zmq = zmq_async.import_zmq() - - -class DealerBackend(base_proxy.DirectBackendMatcher): - - def __init__(self, conf, context, poller=None): - if poller is None: - poller = zmq_async.get_poller( - native_zmq=conf.rpc_zmq_native) - super(DealerBackend, self).__init__(conf, poller, context) - - def _get_target(self, message): - return zmq_serializer.get_target_from_call_message(message) - - def _get_ipc_address(self, target): - return zmq_target.get_ipc_address_call(self.conf, target) - - def _send_message(self, backend, message, topic): - # Empty needed for awaiting REP socket to work properly - # (DEALER-REP usage specific) - backend.send(b'', zmq.SNDMORE) - backend.send(message.pop(0), zmq.SNDMORE) - backend.send_string(message.pop(0), zmq.SNDMORE) - message.pop(0) # Drop target unneeded any more - backend.send_multipart(message) - - def _create_backend(self, ipc_address): - if ipc_address in self.backends: - return self.backends[ipc_address] - backend = self.context.socket(zmq.DEALER) - backend.connect(ipc_address) - self.poller.register(backend) - self.backends[ipc_address] = backend - return backend - - -class FrontendTcpRouter(base_proxy.BaseTcpFrontend): - - def __init__(self, conf, context, poller=None): - if poller is None: - poller = zmq_async.get_poller( - native_zmq=conf.rpc_zmq_native) - super(FrontendTcpRouter, self).__init__( - conf, poller, context, - socket_type=zmq.ROUTER, - port_number=conf.rpc_zmq_port, - receive_meth=self._receive_message) - - def _receive_message(self, socket): - - try: - reply_id = socket.recv() - empty = socket.recv() - assert empty == b'', "Empty delimiter expected" - msg_type = socket.recv_string() - target_dict = socket.recv_json() - target = zmq_target.target_from_dict(target_dict) - other = socket.recv_multipart() - except zmq.ZMQError as e: - LOG.error(_LE("Error receiving message %s") % str(e)) - return None - - if msg_type == zmq_serializer.FANOUT_TYPE: - other.insert(0, zmq_target.target_to_str(target).encode("utf-8")) - - return [reply_id, msg_type, target] + other - - @staticmethod - def _reduce_empty(reply): - reply.pop(0) - return reply - - def redirect_outgoing_reply(self, reply): - self._reduce_empty(reply) - try: - self.frontend.send_multipart(reply) - LOG.info(_LI("Redirecting reply to client %s") % reply) - except zmq.ZMQError: - errmsg = _LE("Failed redirecting reply to client %s") % reply - LOG.error(errmsg) - raise RPCException(errmsg) diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py deleted file mode 100644 index bf6492ee1..000000000 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_fanout_proxy.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import oslo_messaging._drivers.zmq_driver.broker.zmq_base_proxy as base_proxy -from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._drivers.zmq_driver import zmq_target - -zmq = zmq_async.import_zmq() - - -class PublisherBackend(base_proxy.BaseBackendMatcher): - - def __init__(self, conf, context): - poller = zmq_async.get_poller(native_zmq=conf.rpc_zmq_native) - super(PublisherBackend, self).__init__(conf, poller, context) - self.backend = self.context.socket(zmq.PUB) - self.backend.bind(zmq_target.get_ipc_address_fanout(conf)) - - def redirect_to_backend(self, message): - target_pos = zmq_serializer.MESSAGE_CALL_TARGET_POSITION + 1 - msg = message[target_pos:] - self.backend.send_multipart(msg) - - def close(self): - self.backend.close() diff --git a/oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py b/oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py deleted file mode 100644 index 82d3e024d..000000000 --- a/oslo_messaging/_drivers/zmq_driver/broker/zmq_universal_proxy.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging - -import oslo_messaging._drivers.zmq_driver.broker.zmq_base_proxy as base_proxy -from oslo_messaging._drivers.zmq_driver.broker import zmq_call_proxy -from oslo_messaging._drivers.zmq_driver.broker import zmq_fanout_proxy -from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._i18n import _LI - -LOG = logging.getLogger(__name__) - - -class UniversalProxy(base_proxy.BaseProxy): - - def __init__(self, conf, context): - super(UniversalProxy, self).__init__(conf, context) - self.poller = zmq_async.get_poller( - native_zmq=conf.rpc_zmq_native) - self.tcp_frontend = zmq_call_proxy.FrontendTcpRouter( - conf, context, poller=self.poller) - self.backend_matcher = BackendMatcher( - conf, context, poller=self.poller) - call = zmq_serializer.CALL_TYPE - self.call_backend = self.backend_matcher.backends[call] - LOG.info(_LI("Starting universal-proxy thread")) - - def run(self): - message, socket = self.poller.poll(self.conf.rpc_poll_timeout) - if message is None: - return - - LOG.info(_LI("Received message at universal proxy: %s") % str(message)) - - if socket == self.tcp_frontend.frontend: - self.backend_matcher.redirect_to_backend(message) - else: - self.tcp_frontend.redirect_outgoing_reply(message) - - def stop(self): - self.poller.close() - super(UniversalProxy, self).stop() - self.tcp_frontend.close() - self.backend_matcher.close() - - -class BackendMatcher(base_proxy.BaseBackendMatcher): - - def __init__(self, conf, context, poller=None): - super(BackendMatcher, self).__init__(conf, poller, context) - direct_backend = zmq_call_proxy.DealerBackend(conf, context, poller) - self.backends[zmq_serializer.CALL_TYPE] = direct_backend - self.backends[zmq_serializer.CAST_TYPE] = direct_backend - fanout_backend = zmq_fanout_proxy.PublisherBackend(conf, context) - self.backends[zmq_serializer.FANOUT_TYPE] = fanout_backend - - def redirect_to_backend(self, message): - message_type = zmq_serializer.get_msg_type(message) - self.backends[message_type].redirect_to_backend(message) diff --git a/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py b/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py index 766e367c2..1bd75f2a2 100644 --- a/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py +++ b/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py @@ -46,7 +46,7 @@ class MatchMakerBase(object): if len(hosts) == 0: LOG.warning(_LW("No hosts were found for target %s. Using " "localhost") % target) - return "localhost" + return "localhost:" + str(self.conf.rpc_zmq_port) elif len(hosts) == 1: LOG.info(_LI("A single host found for target %s.") % target) return hosts[0] diff --git a/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py b/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py index 3e6f5148a..72429f1f1 100644 --- a/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py +++ b/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py @@ -62,12 +62,15 @@ class GreenPoller(zmq_poller.ZmqPoller): for thread in self.threads: thread.kill() + self.threads = [] + class HoldReplyPoller(GreenPoller): def __init__(self): super(HoldReplyPoller, self).__init__() self.event_by_socket = {} + self._is_running = threading.Event() def register(self, socket, recv_method=None): super(HoldReplyPoller, self).register(socket, recv_method) @@ -79,7 +82,7 @@ class HoldReplyPoller(GreenPoller): def _socket_receive(self, socket, recv_method=None): pause = self.event_by_socket[socket] - while True: + while not self._is_running.is_set(): pause.clear() if recv_method: incoming = recv_method(socket) @@ -88,6 +91,14 @@ class HoldReplyPoller(GreenPoller): self.incoming_queue.put((incoming, socket)) pause.wait() + def close(self): + self._is_running.set() + for pause in self.event_by_socket.values(): + pause.set() + eventlet.sleep() + + super(HoldReplyPoller, self).close() + class GreenExecutor(zmq_poller.Executor): diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py index 8bdd9c0af..dea54d471 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py @@ -42,25 +42,26 @@ class CallRequest(Request): message, socket, zmq_serializer.CALL_TYPE, timeout, retry) - self.host = self.matchmaker.get_single_host(self.target) - self.connect_address = zmq_target.get_tcp_address_call(conf, - self.host) + self.connect_address = zmq_target.get_tcp_direct_address( + self.host) LOG.info(_LI("Connecting REQ to %s") % self.connect_address) self.socket.connect(self.connect_address) + self.reply_poller.register( + self.socket, recv_method=lambda socket: socket.recv_json()) + except zmq.ZMQError as e: - LOG.error(_LE("Error connecting to socket: %s") % str(e)) - raise + errmsg = _LE("Error connecting to socket: %s") % str(e) + LOG.error(errmsg) + raise rpc_common.RPCException(errmsg) def close(self): self.reply_poller.close() + self.socket.setsockopt(zmq.LINGER, 0) self.socket.close() def receive_reply(self): # NOTE(ozamiatin): Check for retry here (no retries now) - self.reply_poller.register( - self.socket, recv_method=lambda socket: socket.recv_json()) - reply, socket = self.reply_poller.poll(timeout=self.timeout) if reply is None: raise oslo_messaging.MessagingTimeout( diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py index f28ed428f..75551a6a4 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py @@ -14,6 +14,7 @@ import logging +from oslo_messaging._drivers import common as rpc_common from oslo_messaging._drivers.zmq_driver.rpc.client import zmq_cast_publisher from oslo_messaging._drivers.zmq_driver.rpc.client.zmq_request import Request from oslo_messaging._drivers.zmq_driver import zmq_async @@ -58,7 +59,7 @@ class DealerCastPublisher(zmq_cast_publisher.CastPublisherBase): def cast(self, target, context, message, timeout=None, retry=None): host = self.matchmaker.get_single_host(target) - connect_address = zmq_target.get_tcp_address_call(self.conf, host) + connect_address = zmq_target.get_tcp_direct_address(host) dealer_socket = self._create_socket(connect_address) request = CastRequest(self.conf, target, context, message, dealer_socket, connect_address, timeout, retry) @@ -73,11 +74,14 @@ class DealerCastPublisher(zmq_cast_publisher.CastPublisherBase): dealer_socket.connect(address) self.outbound_sockets[address] = dealer_socket return dealer_socket - except zmq.ZMQError: - LOG.error(_LE("Failed connecting DEALER to %s") % address) - raise + except zmq.ZMQError as e: + errmsg = _LE("Failed connecting DEALER to %(address)s: %(e)s")\ + % (address, e) + LOG.error(errmsg) + raise rpc_common.RPCException(errmsg) def cleanup(self): if self.outbound_sockets: for socket in self.outbound_sockets.values(): + socket.setsockopt(zmq.LINGER, 0) socket.close() diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py index 144c5c107..3f8e1da61 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py @@ -62,7 +62,6 @@ class Request(object): def send_request(self): self.socket.send_string(self.msg_type, zmq.SNDMORE) self.socket.send_json(self.target.__dict__, zmq.SNDMORE) - self.socket.send_string(self.msg_id, zmq.SNDMORE) self.socket.send_json(self.context, zmq.SNDMORE) self.socket.send_json(self.message) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py deleted file mode 100644 index a74b7dcba..000000000 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_base_consumer.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc - -import six - - -@six.add_metaclass(abc.ABCMeta) -class ConsumerBase(object): - - def __init__(self, listener, conf, zmq_poller, context): - self.listener = listener - self.conf = conf - self.poller = zmq_poller - self.context = context - self.sockets_per_target = {} - - def cleanup(self): - if self.sockets_per_target: - for socket in self.sockets_per_target.values(): - socket.close() - - @abc.abstractmethod - def listen(self, target): - """Listen for target""" diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_fanout_consumer.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_fanout_consumer.py deleted file mode 100644 index 0c54ca36b..000000000 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_fanout_consumer.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import logging - -import six - -from oslo_messaging._drivers import base -from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_base_consumer -from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._drivers.zmq_driver import zmq_target as topic_utils -from oslo_messaging._i18n import _LE - - -LOG = logging.getLogger(__name__) - -zmq = zmq_async.import_zmq() - - -class ZmqFanoutMessage(base.IncomingMessage): - - def __init__(self, listener, context, message, socket, poller): - super(ZmqFanoutMessage, self).__init__(listener, context, message) - poller.resume_polling(socket) - - def reply(self, reply=None, failure=None, log_failure=True): - """Reply is not needed for fanout(cast) messages""" - - def acknowledge(self): - pass - - def requeue(self): - pass - - -class FanoutConsumer(zmq_base_consumer.ConsumerBase): - - def _receive_message(self, socket): - try: - topic = socket.recv_string() - assert topic is not None, 'Bad format: Topic is expected' - msg_id = socket.recv_string() - assert msg_id is not None, 'Bad format: message ID expected' - context = socket.recv_json() - message = socket.recv_json() - LOG.debug("[Server] REP Received message %s" % str(message)) - incoming = ZmqFanoutMessage(self.listener, context, message, - socket, self.poller) - return incoming - except zmq.ZMQError as e: - LOG.error(_LE("Receiving message failed ... {}"), e) - - def listen(self, target): - topic = topic_utils.target_to_str(target) - ipc_address = topic_utils.get_ipc_address_fanout(self.conf) - sub_socket = self.context.socket(zmq.SUB) - sub_socket.connect(ipc_address) - if six.PY3: - sub_socket.setsockopt_string(zmq.SUBSCRIBE, str(topic)) - else: - sub_socket.setsockopt(zmq.SUBSCRIBE, str(topic)) - self.poller.register(sub_socket, self._receive_message) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_incoming_message.py similarity index 58% rename from oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py rename to oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_incoming_message.py index f440359a2..1373019e1 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_call_responder.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_incoming_message.py @@ -17,11 +17,8 @@ import logging from oslo_messaging._drivers import base from oslo_messaging._drivers import common as rpc_common -from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_base_consumer from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._drivers.zmq_driver import zmq_target -from oslo_messaging._i18n import _LE LOG = logging.getLogger(__name__) @@ -59,31 +56,17 @@ class ZmqIncomingRequest(base.IncomingMessage): pass -class CallResponder(zmq_base_consumer.ConsumerBase): +class ZmqFanoutMessage(base.IncomingMessage): - def _receive_message(self, socket): - try: - reply_id = socket.recv() - msg_type = socket.recv_string() - assert msg_type is not None, 'Bad format: msg type expected' - msg_id = socket.recv_string() - assert msg_id is not None, 'Bad format: message ID expected' - context = socket.recv_json() - message = socket.recv_json() - LOG.debug("[Server] REP Received message %s" % str(message)) - incoming = ZmqIncomingRequest(self.listener, - context, - message, socket, - reply_id, - self.poller) - return incoming - except zmq.ZMQError as e: - LOG.error(_LE("Receiving message failed: %s") % str(e)) + def __init__(self, listener, context, message, socket, poller): + super(ZmqFanoutMessage, self).__init__(listener, context, message) + poller.resume_polling(socket) - def listen(self, target): - ipc_rep_address = zmq_target.get_ipc_address_call(self.conf, target) - rep_socket = self.context.socket(zmq.REP) - rep_socket.bind(ipc_rep_address) - str_target = zmq_target.target_to_str(target) - self.sockets_per_target[str_target] = rep_socket - self.poller.register(rep_socket, self._receive_message) + def reply(self, reply=None, failure=None, log_failure=True): + """Reply is not needed for fanout(cast) messages""" + + def acknowledge(self): + pass + + def requeue(self): + pass diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py index 9621e8ce9..0132aacaf 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py @@ -15,9 +15,12 @@ import logging from oslo_messaging._drivers import base -from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_call_responder -from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_fanout_consumer +from oslo_messaging._drivers import common as rpc_common +from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_incoming_message from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_serializer +from oslo_messaging._drivers.zmq_driver import zmq_target +from oslo_messaging._i18n import _LE LOG = logging.getLogger(__name__) @@ -27,36 +30,64 @@ zmq = zmq_async.import_zmq() class ZmqServer(base.Listener): def __init__(self, conf, matchmaker=None): - LOG.info("[Server] __init__") self.conf = conf - self.context = zmq.Context() - self.poller = zmq_async.get_reply_poller() + try: + self.context = zmq.Context() + self.socket = self.context.socket(zmq.ROUTER) + self.address = zmq_target.get_tcp_random_address(conf) + self.port = self.socket.bind_to_random_port(self.address) + LOG.info("Run server on tcp://%s:%d" % + (self.address, self.port)) + except zmq.ZMQError as e: + errmsg = _LE("Failed binding to port %(port)d: %(e)s")\ + % (self.port, e) + LOG.error(errmsg) + raise rpc_common.RPCException(errmsg) + + self.poller = zmq_async.get_poller() + self.poller.register(self.socket, self._receive_message) self.matchmaker = matchmaker - self.call_resp = zmq_call_responder.CallResponder(self, conf, - self.poller, - self.context) - self.fanout_resp = zmq_fanout_consumer.FanoutConsumer(self, conf, - self.poller, - self.context) def poll(self, timeout=None): incoming = self.poller.poll(timeout or self.conf.rpc_poll_timeout) return incoming[0] def stop(self): - LOG.info("[Server] Stop") + LOG.info("Stop server tcp://%s:%d" % (self.address, self.port)) def cleanup(self): self.poller.close() - self.call_resp.cleanup() - self.fanout_resp.cleanup() + if not self.socket.closed: + self.socket.setsockopt(zmq.LINGER, 0) + self.socket.close() def listen(self, target): - LOG.info("[Server] Listen to Target %s" % target) - + LOG.info("Listen to Target %s on tcp://%s:%d" % + (target, self.address, self.port)) + host = zmq_target.combine_address(self.conf.rpc_zmq_host, self.port) self.matchmaker.register(target=target, - hostname=self.conf.rpc_zmq_host) - if target.fanout: - self.fanout_resp.listen(target) - else: - self.call_resp.listen(target) + hostname=host) + + def _receive_message(self, socket): + try: + reply_id = socket.recv() + empty = socket.recv() + assert empty == b'', 'Bad format: empty delimiter expected' + msg_type = socket.recv_string() + assert msg_type is not None, 'Bad format: msg type expected' + target_dict = socket.recv_json() + assert target_dict is not None, 'Bad format: target expected' + context = socket.recv_json() + message = socket.recv_json() + LOG.debug("Received CALL message %s" % str(message)) + + direct_type = (zmq_serializer.CALL_TYPE, zmq_serializer.CAST_TYPE) + if msg_type in direct_type: + return zmq_incoming_message.ZmqIncomingRequest( + self, context, message, socket, reply_id, self.poller) + elif msg_type == zmq_serializer.FANOUT_TYPE: + return zmq_incoming_message.ZmqFanoutMessage( + self, context, message, socket, self.poller) + + except zmq.ZMQError as e: + LOG.error(_LE("Receiving message failed: %s") % str(e)) diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_poller.py b/oslo_messaging/_drivers/zmq_driver/zmq_poller.py index 02d4ee87c..437c841ab 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_poller.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_poller.py @@ -32,6 +32,9 @@ class ZmqPoller(object): def close(self): """Terminate polling""" + def resume_polling(self, socket): + """Resume with polling""" + @six.add_metaclass(abc.ABCMeta) class Executor(object): diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py b/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py index cda3aca4e..41663f639 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py @@ -44,7 +44,7 @@ def get_msg_type(message): if type not in MESSAGE_TYPES: errmsg = _LE("Unknown message type: %s") % str(type) LOG.error(errmsg) - rpc_common.RPCException(errmsg) + raise rpc_common.RPCException(errmsg) return type diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_target.py b/oslo_messaging/_drivers/zmq_driver/zmq_target.py index a5e5de8dd..3db6aace3 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_target.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_target.py @@ -15,11 +15,6 @@ from oslo_messaging import target -def get_ipc_address_call(conf, target): - target_addr = target_to_str(target) - return "ipc://%s/%s" % (conf.rpc_zmq_ipc_dir, target_addr) - - def get_tcp_bind_address(port): return "tcp://*:%s" % port @@ -28,19 +23,27 @@ def get_tcp_address_call(conf, host): return "tcp://%s:%s" % (host, conf.rpc_zmq_port) -def get_ipc_address_cast(conf, target): - target_addr = target_to_str(target) - return "ipc://%s/fanout/%s" % (conf.rpc_zmq_ipc_dir, target_addr) +def combine_address(host, port): + return "%s:%s" % (host, port) -def get_ipc_address_fanout(conf): - return "ipc://%s/fanout_general" % conf.rpc_zmq_ipc_dir +def get_tcp_direct_address(host): + return "tcp://%s" % (host) + + +def get_tcp_random_address(conf): + return "tcp://*" def target_to_str(target): - if target.server is None: - return target.topic - return "%s.%s" % (target.server, target.topic) + items = [] + if target.topic: + items.append(target.topic) + if target.exchange: + items.append(target.exchange) + if target.server: + items.append(target.server) + return '.'.join(items) def target_from_dict(target_dict): diff --git a/oslo_messaging/tests/drivers/zmq/matchmaker/test_impl_matchmaker.py b/oslo_messaging/tests/drivers/zmq/matchmaker/test_impl_matchmaker.py index 9cae5fe6a..da296d82f 100644 --- a/oslo_messaging/tests/drivers/zmq/matchmaker/test_impl_matchmaker.py +++ b/oslo_messaging/tests/drivers/zmq/matchmaker/test_impl_matchmaker.py @@ -77,4 +77,4 @@ class TestImplMatchmaker(test_utils.BaseTestCase): def test_get_single_host_wrong_topic(self): target = oslo_messaging.Target(topic="no_such_topic") self.assertEqual(self.test_matcher.get_single_host(target), - "localhost") + "localhost:9501") diff --git a/oslo_messaging/tests/drivers/zmq/test_impl_zmq.py b/oslo_messaging/tests/drivers/zmq/test_impl_zmq.py index 563483a25..d191ae64c 100644 --- a/oslo_messaging/tests/drivers/zmq/test_impl_zmq.py +++ b/oslo_messaging/tests/drivers/zmq/test_impl_zmq.py @@ -13,7 +13,6 @@ # under the License. import logging -import socket import threading import fixtures @@ -21,7 +20,6 @@ import testtools import oslo_messaging from oslo_messaging._drivers import impl_zmq -from oslo_messaging._drivers.zmq_driver.broker.zmq_broker import ZmqBroker from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._i18n import _ from oslo_messaging.tests import utils as test_utils @@ -61,40 +59,26 @@ class TestRPCServerListener(object): self.executor.stop() -def get_unused_port(): - """Returns an unused port on localhost.""" - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - s.bind(('localhost', 0)) - port = s.getsockname()[1] - s.close() - return port - - class ZmqBaseTestCase(test_utils.BaseTestCase): - """Base test case for all ZMQ tests that make use of the ZMQ Proxy""" + """Base test case for all ZMQ tests """ @testtools.skipIf(zmq is None, "zmq not available") def setUp(self): super(ZmqBaseTestCase, self).setUp() self.messaging_conf.transport_driver = 'zmq' - # Get driver - transport = oslo_messaging.get_transport(self.conf) - self.driver = transport._driver # Set config values self.internal_ipc_dir = self.useFixture(fixtures.TempDir()).path kwargs = {'rpc_zmq_bind_address': '127.0.0.1', 'rpc_zmq_host': '127.0.0.1', 'rpc_response_timeout': 5, - 'rpc_zmq_port': get_unused_port(), - 'rpc_zmq_ipc_dir': self.internal_ipc_dir} + 'rpc_zmq_ipc_dir': self.internal_ipc_dir, + 'rpc_zmq_matchmaker': 'dummy'} self.config(**kwargs) - # Start RPC - LOG.info("Running internal zmq receiver.") - self.broker = ZmqBroker(self.conf) - self.broker.start() + # Get driver + transport = oslo_messaging.get_transport(self.conf) + self.driver = transport._driver self.listener = TestRPCServerListener(self.driver) @@ -118,8 +102,6 @@ class stopRpc(object): self.attrs = attrs def __call__(self): - if self.attrs['broker']: - self.attrs['broker'].close() if self.attrs['driver']: self.attrs['driver'].cleanup() if self.attrs['listener']: @@ -151,7 +133,7 @@ class TestZmqBasics(ZmqBaseTestCase): def test_send_noreply(self): """Cast() with topic.""" - target = oslo_messaging.Target(topic='testtopic', server="127.0.0.1") + target = oslo_messaging.Target(topic='testtopic', server="my@server") self.listener.listen(target) result = self.driver.send( target, {}, diff --git a/setup-test-env-zmq.sh b/setup-test-env-zmq.sh index b27ee9d3f..353c2602c 100755 --- a/setup-test-env-zmq.sh +++ b/setup-test-env-zmq.sh @@ -22,6 +22,4 @@ EOF redis-server --port $ZMQ_REDIS_PORT & -oslo-messaging-zmq-receiver --config-file ${DATADIR}/zmq.conf > ${DATADIR}/receiver.log 2>&1 & - $* diff --git a/tests/drivers/test_impl_zmq.py b/tests/drivers/test_impl_zmq.py index fb12ac760..d191ae64c 100644 --- a/tests/drivers/test_impl_zmq.py +++ b/tests/drivers/test_impl_zmq.py @@ -13,7 +13,6 @@ # under the License. import logging -import socket import threading import fixtures @@ -21,7 +20,6 @@ import testtools import oslo_messaging from oslo_messaging._drivers import impl_zmq -from oslo_messaging._drivers.zmq_driver.broker.zmq_broker import ZmqBroker from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._i18n import _ from oslo_messaging.tests import utils as test_utils @@ -61,40 +59,26 @@ class TestRPCServerListener(object): self.executor.stop() -def get_unused_port(): - """Returns an unused port on localhost.""" - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - s.bind(('localhost', 0)) - port = s.getsockname()[1] - s.close() - return port - - class ZmqBaseTestCase(test_utils.BaseTestCase): - """Base test case for all ZMQ tests that make use of the ZMQ Proxy""" + """Base test case for all ZMQ tests """ @testtools.skipIf(zmq is None, "zmq not available") def setUp(self): super(ZmqBaseTestCase, self).setUp() self.messaging_conf.transport_driver = 'zmq' - # Get driver - transport = oslo_messaging.get_transport(self.conf) - self.driver = transport._driver # Set config values self.internal_ipc_dir = self.useFixture(fixtures.TempDir()).path kwargs = {'rpc_zmq_bind_address': '127.0.0.1', 'rpc_zmq_host': '127.0.0.1', 'rpc_response_timeout': 5, - 'rpc_zmq_port': get_unused_port(), - 'rpc_zmq_ipc_dir': self.internal_ipc_dir} + 'rpc_zmq_ipc_dir': self.internal_ipc_dir, + 'rpc_zmq_matchmaker': 'dummy'} self.config(**kwargs) - # Start RPC - LOG.info("Running internal zmq receiver.") - self.broker = ZmqBroker(self.conf) - self.broker.start() + # Get driver + transport = oslo_messaging.get_transport(self.conf) + self.driver = transport._driver self.listener = TestRPCServerListener(self.driver) @@ -118,8 +102,6 @@ class stopRpc(object): self.attrs = attrs def __call__(self): - if self.attrs['broker']: - self.attrs['broker'].close() if self.attrs['driver']: self.attrs['driver'].cleanup() if self.attrs['listener']: @@ -146,12 +128,12 @@ class TestZmqBasics(ZmqBaseTestCase): target, {}, {'method': 'hello-world', 'tx_id': 1}, wait_for_reply=True) - self.assertIsNotNone(result) + self.assertTrue(result) def test_send_noreply(self): """Cast() with topic.""" - target = oslo_messaging.Target(topic='testtopic', server="127.0.0.1") + target = oslo_messaging.Target(topic='testtopic', server="my@server") self.listener.listen(target) result = self.driver.send( target, {}, @@ -165,20 +147,21 @@ class TestZmqBasics(ZmqBaseTestCase): method = self.listener.message.message[u'method'] self.assertEqual(u'hello-world', method) - @testtools.skip("Not implemented feature") def test_send_fanout(self): target = oslo_messaging.Target(topic='testtopic', fanout=True) - self.driver.listen(target) + self.listener.listen(target) result = self.driver.send( target, {}, {'method': 'hello-world', 'tx_id': 1}, wait_for_reply=False) + self.listener._received.wait() + self.assertIsNone(result) self.assertEqual(True, self.listener._received.isSet()) - msg_pattern = "{'method': 'hello-world', 'tx_id': 1}" - self.assertEqual(msg_pattern, self.listener.message) + method = self.listener.message.message[u'method'] + self.assertEqual(u'hello-world', method) def test_send_receive_direct(self): """Call() without topic.""" From de629d81047dd6cf8b8cd195aa28785ef6748dd8 Mon Sep 17 00:00:00 2001 From: Victor Sergeyev Date: Thu, 23 Jul 2015 18:31:11 +0300 Subject: [PATCH 16/28] ZMQ: Run more functional tests Change-Id: Ia7b001bf5aba1120544dcc15c5200c50ebe731f6 --- .../_drivers/zmq_driver/matchmaker/base.py | 45 ++++++++++---- .../zmq_driver/matchmaker/matchmaker_redis.py | 26 ++++++-- .../zmq_driver/rpc/client/zmq_call_request.py | 3 +- .../zmq_driver/rpc/client/zmq_cast_dealer.py | 61 +++++++++++-------- .../zmq_driver/rpc/client/zmq_request.py | 10 ++- .../zmq_driver/rpc/server/zmq_server.py | 3 +- .../_drivers/zmq_driver/zmq_target.py | 22 ------- .../zmq/matchmaker/test_impl_matchmaker.py | 8 +-- .../tests/functional/test_functional.py | 7 +++ tox.ini | 6 +- 10 files changed, 112 insertions(+), 79 deletions(-) diff --git a/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py b/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py index 1bd75f2a2..b42221255 100644 --- a/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py +++ b/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py @@ -14,10 +14,11 @@ import abc import collections import logging +import random import six -from oslo_messaging._drivers.zmq_driver import zmq_target +import oslo_messaging from oslo_messaging._i18n import _LI, _LW @@ -34,26 +35,44 @@ class MatchMakerBase(object): @abc.abstractmethod def register(self, target, hostname): - """Register target on nameserver""" + """Register target on nameserver. + + :param target: the target for host + :type target: Target + :param hostname: host for the topic in "host:port" format + :type hostname: String + """ @abc.abstractmethod def get_hosts(self, target): - """Get hosts from nameserver by target""" + """Get all hosts from nameserver by target. + + :param target: the default target for invocations + :type target: Target + :returns: a list of "hostname:port" hosts + """ def get_single_host(self, target): - """Get a single host by target""" + """Get a single host by target. + + :param target: the target for messages + :type target: Target + :returns: a "hostname:port" host + """ + hosts = self.get_hosts(target) - if len(hosts) == 0: - LOG.warning(_LW("No hosts were found for target %s. Using " - "localhost") % target) - return "localhost:" + str(self.conf.rpc_zmq_port) - elif len(hosts) == 1: + if not hosts: + err_msg = "No hosts were found for target %s." % target + LOG.error(err_msg) + raise oslo_messaging.InvalidTarget(err_msg, target) + + if len(hosts) == 1: LOG.info(_LI("A single host found for target %s.") % target) return hosts[0] else: LOG.warning(_LW("Multiple hosts were found for target %s. Using " - "the first one.") % target) - return hosts[0] + "the random one.") % target) + return random.choice(hosts) class DummyMatchMaker(MatchMakerBase): @@ -64,10 +83,10 @@ class DummyMatchMaker(MatchMakerBase): self._cache = collections.defaultdict(list) def register(self, target, hostname): - key = zmq_target.target_to_str(target) + key = str(target) if hostname not in self._cache[key]: self._cache[key].append(hostname) def get_hosts(self, target): - key = zmq_target.target_to_str(target) + key = str(target) return self._cache[key] diff --git a/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py b/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py index 834d35fb3..4ef078631 100644 --- a/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py +++ b/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py @@ -17,7 +17,6 @@ from oslo_config import cfg import redis from oslo_messaging._drivers.zmq_driver.matchmaker import base -from oslo_messaging._drivers.zmq_driver import zmq_target LOG = logging.getLogger(__name__) @@ -48,11 +47,28 @@ class RedisMatchMaker(base.MatchMakerBase): password=self.conf.matchmaker_redis.password, ) + def _target_to_key(self, target): + attributes = ['topic', 'exchange', 'server'] + return ':'.join((getattr(target, attr) or "*") for attr in attributes) + + def _get_keys_by_pattern(self, pattern): + return self._redis.keys(pattern) + + def _get_hosts_by_key(self, key): + return self._redis.lrange(key, 0, -1) + def register(self, target, hostname): - if hostname not in self.get_hosts(target): - key = zmq_target.target_to_str(target) + key = self._target_to_key(target) + if hostname not in self._get_hosts_by_key(key): self._redis.lpush(key, hostname) def get_hosts(self, target): - key = zmq_target.target_to_str(target) - return self._redis.lrange(key, 0, -1)[::-1] + pattern = self._target_to_key(target) + if "*" not in pattern: + # pattern have no placeholders, so this is valid key + return self._get_hosts_by_key(pattern) + + hosts = [] + for key in self._get_keys_by_pattern(pattern): + hosts.extend(self._get_hosts_by_key(key)) + return hosts diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py index dea54d471..0d35c31a6 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py @@ -29,6 +29,8 @@ zmq = zmq_async.import_zmq() class CallRequest(Request): + msg_type = zmq_serializer.CALL_TYPE + def __init__(self, conf, target, context, message, timeout=None, retry=None, allowed_remote_exmods=None, matchmaker=None): self.allowed_remote_exmods = allowed_remote_exmods or [] @@ -40,7 +42,6 @@ class CallRequest(Request): socket = self.zmq_context.socket(zmq.REQ) super(CallRequest, self).__init__(conf, target, context, message, socket, - zmq_serializer.CALL_TYPE, timeout, retry) self.host = self.matchmaker.get_single_host(self.target) self.connect_address = zmq_target.get_tcp_direct_address( diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py index 75551a6a4..379d8ef3a 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py @@ -29,14 +29,7 @@ zmq = zmq_async.import_zmq() class CastRequest(Request): - def __init__(self, conf, target, context, - message, socket, address, timeout=None, retry=None): - self.connect_address = address - fanout_type = zmq_serializer.FANOUT_TYPE - cast_type = zmq_serializer.CAST_TYPE - msg_type = fanout_type if target.fanout else cast_type - super(CastRequest, self).__init__(conf, target, context, message, - socket, msg_type, timeout, retry) + msg_type = zmq_serializer.CAST_TYPE def __call__(self, *args, **kwargs): self.send_request() @@ -50,6 +43,19 @@ class CastRequest(Request): pass +class FanoutRequest(CastRequest): + + msg_type = zmq_serializer.FANOUT_TYPE + + def __init__(self, *args, **kwargs): + self.hosts_count = kwargs.pop("hosts_count") + super(FanoutRequest, self).__init__(*args, **kwargs) + + def send_request(self): + for _ in range(self.hosts_count): + super(FanoutRequest, self).send_request() + + class DealerCastPublisher(zmq_cast_publisher.CastPublisherBase): def __init__(self, conf, matchmaker): @@ -58,22 +64,30 @@ class DealerCastPublisher(zmq_cast_publisher.CastPublisherBase): def cast(self, target, context, message, timeout=None, retry=None): - host = self.matchmaker.get_single_host(target) - connect_address = zmq_target.get_tcp_direct_address(host) - dealer_socket = self._create_socket(connect_address) - request = CastRequest(self.conf, target, context, message, - dealer_socket, connect_address, timeout, retry) + if str(target) in self.outbound_sockets: + dealer_socket, hosts = self.outbound_sockets[str(target)] + else: + dealer_socket = self.zmq_context.socket(zmq.DEALER) + hosts = self.matchmaker.get_hosts(target) + for host in hosts: + self._connect_to_host(dealer_socket, host) + self.outbound_sockets[str(target)] = (dealer_socket, hosts) + + if target.fanout: + request = FanoutRequest(self.conf, target, context, message, + dealer_socket, timeout, retry, + hosts_count=len(hosts)) + else: + request = CastRequest(self.conf, target, context, message, + dealer_socket, timeout, retry) + request.send_request() - def _create_socket(self, address): - if address in self.outbound_sockets: - return self.outbound_sockets[address] + def _connect_to_host(self, socket, host): + address = zmq_target.get_tcp_direct_address(host) try: - dealer_socket = self.zmq_context.socket(zmq.DEALER) LOG.info(_LI("Connecting DEALER to %s") % address) - dealer_socket.connect(address) - self.outbound_sockets[address] = dealer_socket - return dealer_socket + socket.connect(address) except zmq.ZMQError as e: errmsg = _LE("Failed connecting DEALER to %(address)s: %(e)s")\ % (address, e) @@ -81,7 +95,6 @@ class DealerCastPublisher(zmq_cast_publisher.CastPublisherBase): raise rpc_common.RPCException(errmsg) def cleanup(self): - if self.outbound_sockets: - for socket in self.outbound_sockets.values(): - socket.setsockopt(zmq.LINGER, 0) - socket.close() + for socket, hosts in self.outbound_sockets.values(): + socket.setsockopt(zmq.LINGER, 0) + socket.close() diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py index 3f8e1da61..88a4f8515 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py @@ -32,9 +32,10 @@ zmq = zmq_async.import_zmq() class Request(object): def __init__(self, conf, target, context, message, - socket, msg_type, timeout=None, retry=None): + socket, timeout=None, retry=None): - assert msg_type in zmq_serializer.MESSAGE_TYPES, "Unknown msg type!" + if self.msg_type not in zmq_serializer.MESSAGE_TYPES: + raise RuntimeError("Unknown msg type!") if message['method'] is None: errmsg = _LE("No method specified for RPC call") @@ -42,7 +43,6 @@ class Request(object): raise KeyError(errmsg) self.msg_id = uuid.uuid4().hex - self.msg_type = msg_type self.target = target self.context = context self.message = message @@ -51,6 +51,10 @@ class Request(object): self.reply = None self.socket = socket + @abc.abstractproperty + def msg_type(self): + """ZMQ message type""" + @property def is_replied(self): return self.reply is not None diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py index 0132aacaf..8383ae8f8 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py @@ -36,8 +36,7 @@ class ZmqServer(base.Listener): self.socket = self.context.socket(zmq.ROUTER) self.address = zmq_target.get_tcp_random_address(conf) self.port = self.socket.bind_to_random_port(self.address) - LOG.info("Run server on tcp://%s:%d" % - (self.address, self.port)) + LOG.info("Run server on %s:%d" % (self.address, self.port)) except zmq.ZMQError as e: errmsg = _LE("Failed binding to port %(port)d: %(e)s")\ % (self.port, e) diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_target.py b/oslo_messaging/_drivers/zmq_driver/zmq_target.py index 3db6aace3..146c982d8 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_target.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_target.py @@ -12,8 +12,6 @@ # License for the specific language governing permissions and limitations # under the License. -from oslo_messaging import target - def get_tcp_bind_address(port): return "tcp://*:%s" % port @@ -33,23 +31,3 @@ def get_tcp_direct_address(host): def get_tcp_random_address(conf): return "tcp://*" - - -def target_to_str(target): - items = [] - if target.topic: - items.append(target.topic) - if target.exchange: - items.append(target.exchange) - if target.server: - items.append(target.server) - return '.'.join(items) - - -def target_from_dict(target_dict): - return target.Target(exchange=target_dict['exchange'], - topic=target_dict['topic'], - namespace=target_dict['namespace'], - version=target_dict['version'], - server=target_dict['server'], - fanout=target_dict['fanout']) diff --git a/oslo_messaging/tests/drivers/zmq/matchmaker/test_impl_matchmaker.py b/oslo_messaging/tests/drivers/zmq/matchmaker/test_impl_matchmaker.py index da296d82f..1f04920c8 100644 --- a/oslo_messaging/tests/drivers/zmq/matchmaker/test_impl_matchmaker.py +++ b/oslo_messaging/tests/drivers/zmq/matchmaker/test_impl_matchmaker.py @@ -56,8 +56,8 @@ class TestImplMatchmaker(test_utils.BaseTestCase): self.test_matcher.register(self.target, self.host1) self.test_matcher.register(self.target, self.host2) - self.assertEqual(self.test_matcher.get_hosts(self.target), - [self.host1, self.host2]) + self.assertItemsEqual(self.test_matcher.get_hosts(self.target), + [self.host1, self.host2]) self.assertIn(self.test_matcher.get_single_host(self.target), [self.host1, self.host2]) @@ -76,5 +76,5 @@ class TestImplMatchmaker(test_utils.BaseTestCase): def test_get_single_host_wrong_topic(self): target = oslo_messaging.Target(topic="no_such_topic") - self.assertEqual(self.test_matcher.get_single_host(target), - "localhost:9501") + self.assertRaises(oslo_messaging.InvalidTarget, + self.test_matcher.get_single_host, target) diff --git a/oslo_messaging/tests/functional/test_functional.py b/oslo_messaging/tests/functional/test_functional.py index 962d473fe..0e56e0c1c 100644 --- a/oslo_messaging/tests/functional/test_functional.py +++ b/oslo_messaging/tests/functional/test_functional.py @@ -93,6 +93,8 @@ class CallTestCase(utils.SkipIfNoTransportURL): self.assertEqual(0, s.endpoint.ival) def test_timeout(self): + if self.url.startswith("zmq"): + self.skipTest("Skip CallTestCase.test_timeout for ZMQ driver") transport = self.useFixture(utils.TransportFixture(self.url)) target = oslo_messaging.Target(topic="no_such_topic") c = utils.ClientStub(transport.transport, target, timeout=1) @@ -185,6 +187,11 @@ class NotifyTestCase(utils.SkipIfNoTransportURL): # NOTE(sileht): Each test must not use the same topics # to be run in parallel + def setUp(self): + super(NotifyTestCase, self).setUp() + if self.url.startswith("zmq"): + self.skipTest("Skip NotifyTestCase for ZMQ driver") + def test_simple(self): listener = self.useFixture( utils.NotificationFixture(self.url, ['test_simple'])) diff --git a/tox.ini b/tox.ini index 7bc79ce52..6a92dbd23 100644 --- a/tox.ini +++ b/tox.ini @@ -41,11 +41,7 @@ setenv = TRANSPORT_URL=amqp://stackqpid:secretqpid@127.0.0.1:65123// commands = {toxinidir}/setup-test-env-qpid.sh python setup.py testr --slowest --testr-args='oslo_messaging.tests.functional' [testenv:py27-func-zeromq] -commands = {toxinidir}/setup-test-env-zmq.sh python -m testtools.run \ - oslo_messaging.tests.functional.test_functional.CallTestCase.test_exception \ - oslo_messaging.tests.functional.test_functional.CallTestCase.test_timeout \ - oslo_messaging.tests.functional.test_functional.CallTestCase.test_specific_server \ - oslo_messaging.tests.functional.test_functional.CastTestCase.test_specific_server +commands = {toxinidir}/setup-test-env-zmq.sh python setup.py testr --slowest --testr-args='oslo_messaging.tests.functional.test_functional' [flake8] show-source = True From 5920e7bef6853e066e5d3d2df5268cd6d4335f2c Mon Sep 17 00:00:00 2001 From: Victor Sergeyev Date: Wed, 22 Jul 2015 18:36:32 +0300 Subject: [PATCH 17/28] ZMQ: Removed unused code and tests The code of ZMQ driver is under the active development now, so after modifications some code can became unused. This patch removes this code. Change-Id: I4cfa75560eabf82618f31584b4645fd2630ac9cb --- .../zmq_driver/rpc/client/zmq_request.py | 14 +-- .../zmq_driver/rpc/server/zmq_server.py | 2 - .../_drivers/zmq_driver/zmq_serializer.py | 72 -------------- .../_drivers/zmq_driver/zmq_target.py | 8 -- .../zmq/matchmaker/test_matchmaker_redis.py | 97 ------------------- .../tests/drivers/zmq/test_zmq_serializer.py | 67 ------------- 6 files changed, 1 insertion(+), 259 deletions(-) delete mode 100644 oslo_messaging/tests/drivers/zmq/matchmaker/test_matchmaker_redis.py delete mode 100644 oslo_messaging/tests/drivers/zmq/test_zmq_serializer.py diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py index 88a4f8515..b06699d93 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py @@ -13,9 +13,7 @@ # under the License. import abc -from abc import abstractmethod import logging -import uuid import six @@ -42,7 +40,6 @@ class Request(object): LOG.error(errmsg) raise KeyError(errmsg) - self.msg_id = uuid.uuid4().hex self.target = target self.context = context self.message = message @@ -55,17 +52,8 @@ class Request(object): def msg_type(self): """ZMQ message type""" - @property - def is_replied(self): - return self.reply is not None - - @property - def is_timed_out(self): - return False - def send_request(self): self.socket.send_string(self.msg_type, zmq.SNDMORE) - self.socket.send_json(self.target.__dict__, zmq.SNDMORE) self.socket.send_json(self.context, zmq.SNDMORE) self.socket.send_json(self.message) @@ -73,6 +61,6 @@ class Request(object): self.send_request() return self.receive_reply() - @abstractmethod + @abc.abstractmethod def receive_reply(self): "Receive reply from server side" diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py index 8383ae8f8..17b04e86c 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py @@ -74,8 +74,6 @@ class ZmqServer(base.Listener): assert empty == b'', 'Bad format: empty delimiter expected' msg_type = socket.recv_string() assert msg_type is not None, 'Bad format: msg type expected' - target_dict = socket.recv_json() - assert target_dict is not None, 'Bad format: target expected' context = socket.recv_json() message = socket.recv_json() LOG.debug("Received CALL message %s" % str(message)) diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py b/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py index 41663f639..6026ca655 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py @@ -12,17 +12,6 @@ # License for the specific language governing permissions and limitations # under the License. -import logging -import os -import re - -import six - -from oslo_messaging._drivers import common as rpc_common -from oslo_messaging._i18n import _LE, _LW - -LOG = logging.getLogger(__name__) - MESSAGE_CALL_TYPE_POSITION = 1 MESSAGE_CALL_TARGET_POSITION = 2 MESSAGE_CALL_TOPIC_POSITION = 3 @@ -37,64 +26,3 @@ FANOUT_TYPE = 'fanout' NOTIFY_TYPE = 'notify' MESSAGE_TYPES = (CALL_TYPE, CAST_TYPE, FANOUT_TYPE, NOTIFY_TYPE) - - -def get_msg_type(message): - type = message[MESSAGE_CALL_TYPE_POSITION] - if type not in MESSAGE_TYPES: - errmsg = _LE("Unknown message type: %s") % str(type) - LOG.error(errmsg) - raise rpc_common.RPCException(errmsg) - return type - - -def _get_topic_from_msg(message, position): - pathsep = set((os.path.sep or '', os.path.altsep or '', '/', '\\')) - badchars = re.compile(r'[%s]' % re.escape(''.join(pathsep))) - - if len(message) < position + 1: - errmsg = _LE("Message did not contain a topic") - LOG.error("%s: %s" % (errmsg, message)) - raise rpc_common.RPCException("%s: %s" % (errmsg, message)) - - topic = message[position] - - if six.PY3: - topic = topic.decode('utf-8') - - # The topic is received over the network, don't trust this input. - if badchars.search(topic) is not None: - errmsg = _LW("Topic contained dangerous characters") - LOG.warn("%s: %s" % (errmsg, topic)) - raise rpc_common.RPCException("%s: %s" % (errmsg, topic)) - - topic_items = topic.split('.', 1) - - if len(topic_items) != 2: - errmsg = _LE("Topic was not formatted correctly") - LOG.error("%s: %s" % (errmsg, topic)) - raise rpc_common.RPCException("%s: %s" % (errmsg, topic)) - - return topic_items[0], topic_items[1] - - -def get_topic_from_call_message(message): - """Extract topic and server from message. - - :param message: A message - :type message: list - - :returns: (topic: str, server: str) - """ - return _get_topic_from_msg(message, MESSAGE_CALL_TOPIC_POSITION) - - -def get_target_from_call_message(message): - """Extract target from message. - - :param message: A message - :type message: list - - :returns: target: Target - """ - return message[MESSAGE_CALL_TARGET_POSITION] diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_target.py b/oslo_messaging/_drivers/zmq_driver/zmq_target.py index 146c982d8..7feb05d89 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_target.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_target.py @@ -13,14 +13,6 @@ # under the License. -def get_tcp_bind_address(port): - return "tcp://*:%s" % port - - -def get_tcp_address_call(conf, host): - return "tcp://%s:%s" % (host, conf.rpc_zmq_port) - - def combine_address(host, port): return "%s:%s" % (host, port) diff --git a/oslo_messaging/tests/drivers/zmq/matchmaker/test_matchmaker_redis.py b/oslo_messaging/tests/drivers/zmq/matchmaker/test_matchmaker_redis.py deleted file mode 100644 index f2498cd6d..000000000 --- a/oslo_messaging/tests/drivers/zmq/matchmaker/test_matchmaker_redis.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2014 Canonical, Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_utils import importutils -import testtools - -from oslo_messaging.tests import utils as test_utils - -redis = importutils.try_import('redis') -matchmaker_redis = ( - importutils.try_import('oslo_messaging._drivers.matchmaker_redis')) - - -def redis_available(): - '''Helper to see if local redis server is running''' - if not redis: - return False - try: - c = redis.StrictRedis(socket_timeout=1) - c.ping() - return True - except redis.exceptions.ConnectionError: - return False - - -@testtools.skipIf(not matchmaker_redis, "matchmaker/eventlet unavailable") -@testtools.skipIf(not redis_available(), "redis unavailable") -class RedisMatchMakerTest(test_utils.BaseTestCase): - - def setUp(self): - super(RedisMatchMakerTest, self).setUp() - self.ring_data = { - "conductor": ["controller1", "node1", "node2", "node3"], - "scheduler": ["controller1", "node1", "node2", "node3"], - "network": ["controller1", "node1", "node2", "node3"], - "cert": ["controller1"], - "console": ["controller1"], - "l3_agent.node1": ["node1"], - "consoleauth": ["controller1"]} - self.matcher = matchmaker_redis.MatchMakerRedis() - self.populate() - - def tearDown(self): - super(RedisMatchMakerTest, self).tearDown() - c = redis.StrictRedis() - c.flushdb() - - def populate(self): - for k, hosts in self.ring_data.items(): - for h in hosts: - self.matcher.register(k, h) - - def test_direct(self): - self.assertEqual( - self.matcher.queues('cert.controller1'), - [('cert.controller1', 'controller1')]) - - def test_register(self): - self.matcher.register('cert', 'keymaster') - self.assertEqual( - sorted(self.matcher.redis.smembers('cert')), - [b'cert.controller1', b'cert.keymaster']) - self.matcher.register('l3_agent.node1', 'node1') - self.assertEqual( - sorted(self.matcher.redis.smembers('l3_agent.node1')), - [b'l3_agent.node1.node1']) - - def test_unregister(self): - self.matcher.unregister('conductor', 'controller1') - self.assertEqual( - sorted(self.matcher.redis.smembers('conductor')), - [b'conductor.node1', b'conductor.node2', b'conductor.node3']) - - def test_ack_alive(self): - self.matcher.ack_alive('ack_alive', 'controller1') - self.assertEqual( - sorted(self.matcher.redis.smembers('ack_alive')), - [b'ack_alive.controller1']) - - def test_is_alive(self): - self.assertEqual( - self.matcher.is_alive('conductor', 'conductor.controller1'), - True) - self.assertEqual( - self.matcher.is_alive('conductor', 'conductor.controller2'), - False) diff --git a/oslo_messaging/tests/drivers/zmq/test_zmq_serializer.py b/oslo_messaging/tests/drivers/zmq/test_zmq_serializer.py deleted file mode 100644 index 48f52734e..000000000 --- a/oslo_messaging/tests/drivers/zmq/test_zmq_serializer.py +++ /dev/null @@ -1,67 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import re - -from oslo_messaging._drivers.common import RPCException -from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging.tests import utils as test_utils - - -class TestZmqSerializer(test_utils.BaseTestCase): - - def test_message_without_topic_raises_RPCException(self): - # The topic is the 4th element of the message. - msg_without_topic = ['only', 'three', 'parts'] - - expected = "Message did not contain a topic: %s" % msg_without_topic - with self.assertRaisesRegexp(RPCException, re.escape(expected)): - zmq_serializer.get_topic_from_call_message(msg_without_topic) - - def test_invalid_topic_format_raises_RPCException(self): - invalid_topic = "no dots to split on, so not index-able".encode('utf8') - bad_message = ['', '', '', invalid_topic] - - expected_msg = "Topic was not formatted correctly: %s" - expected_msg = expected_msg % invalid_topic.decode('utf8') - with self.assertRaisesRegexp(RPCException, expected_msg): - zmq_serializer.get_topic_from_call_message(bad_message) - - def test_py3_decodes_bytes_correctly(self): - message = ['', '', '', b'topic.ipaddress'] - - actual, _ = zmq_serializer.get_topic_from_call_message(message) - - self.assertEqual('topic', actual) - - def test_bad_characters_in_topic_raise_RPCException(self): - # handle unexpected os path separators: - unexpected_evil = '<' - os.path.sep = unexpected_evil - - unexpected_alt_evil = '>' - os.path.altsep = unexpected_alt_evil - - evil_chars = [unexpected_evil, unexpected_alt_evil, '\\', '/'] - - for evil_char in evil_chars: - evil_topic = '%s%s%s' % ('trust.me', evil_char, 'please') - evil_topic = evil_topic.encode('utf8') - evil_message = ['', '', '', evil_topic] - - expected_msg = "Topic contained dangerous characters: %s" - expected_msg = expected_msg % evil_topic.decode('utf8') - expected_msg = re.escape(expected_msg) - - with self.assertRaisesRegexp(RPCException, expected_msg): - zmq_serializer.get_topic_from_call_message(evil_message) From 99b24b3888233656779a5baec1011554174604d7 Mon Sep 17 00:00:00 2001 From: Victor Sergeyev Date: Wed, 29 Jul 2015 11:04:00 +0300 Subject: [PATCH 18/28] Register matchmaker_redis_opts in RedisMatchMaker Change-Id: Ib74051be2f8b79a07e0c8a38769f4d47cf792bc7 --- .../_drivers/zmq_driver/matchmaker/matchmaker_redis.py | 1 + 1 file changed, 1 insertion(+) diff --git a/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py b/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py index 4ef078631..a55e5076f 100644 --- a/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py +++ b/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py @@ -40,6 +40,7 @@ class RedisMatchMaker(base.MatchMakerBase): def __init__(self, conf, *args, **kwargs): super(RedisMatchMaker, self).__init__(conf, *args, **kwargs) + self.conf.register_opts(matchmaker_redis_opts, "matchmaker_redis") self._redis = redis.StrictRedis( host=self.conf.matchmaker_redis.host, From c90525bfead1f495df86c5c5d795d25abad2e1d9 Mon Sep 17 00:00:00 2001 From: Doug Hellmann Date: Tue, 28 Apr 2015 12:18:34 +0000 Subject: [PATCH 19/28] Remove oslo namespace package Blueprint remove-namespace-packages Cherry-picked from: 03265410e0294e176d18dd42b57268a8056eb8fc Change-Id: Ibaba19ef10b4902c4f4f9fbdf7078e66b75f2035 --- oslo/__init__.py | 16 - oslo/messaging/__init__.py | 38 -- oslo/messaging/_drivers/__init__.py | 0 oslo/messaging/_drivers/common.py | 16 - oslo/messaging/_executors/__init__.py | 0 oslo/messaging/_executors/base.py | 17 - oslo/messaging/conffixture.py | 13 - oslo/messaging/exceptions.py | 13 - oslo/messaging/localcontext.py | 13 - oslo/messaging/notify/__init__.py | 28 - oslo/messaging/notify/dispatcher.py | 13 - oslo/messaging/notify/listener.py | 13 - oslo/messaging/notify/log_handler.py | 13 - oslo/messaging/notify/logger.py | 13 - oslo/messaging/notify/middleware.py | 13 - oslo/messaging/notify/notifier.py | 13 - oslo/messaging/rpc/__init__.py | 32 - oslo/messaging/rpc/client.py | 13 - oslo/messaging/rpc/dispatcher.py | 13 - oslo/messaging/rpc/server.py | 13 - oslo/messaging/serializer.py | 13 - oslo/messaging/server.py | 13 - oslo/messaging/target.py | 13 - oslo/messaging/transport.py | 13 - setup.cfg | 4 - tests/__init__.py | 26 - tests/drivers/__init__.py | 0 tests/drivers/test_impl_qpid.py | 850 ------------------------- tests/drivers/test_impl_rabbit.py | 758 ---------------------- tests/drivers/test_impl_zmq.py | 228 ------- tests/drivers/test_matchmaker.py | 69 -- tests/drivers/test_matchmaker_redis.py | 83 --- tests/drivers/test_matchmaker_ring.py | 73 --- tests/notify/__init__.py | 0 tests/notify/test_dispatcher.py | 171 ----- tests/notify/test_listener.py | 411 ------------ tests/notify/test_log_handler.py | 57 -- tests/notify/test_logger.py | 157 ----- tests/notify/test_middleware.py | 190 ------ tests/notify/test_notifier.py | 540 ---------------- tests/rpc/__init__.py | 0 tests/rpc/test_client.py | 519 --------------- tests/rpc/test_dispatcher.py | 178 ------ tests/rpc/test_server.py | 503 --------------- tests/test_amqp_driver.py | 738 --------------------- tests/test_exception_serialization.py | 308 --------- tests/test_expected_exceptions.py | 66 -- tests/test_target.py | 177 ----- tests/test_transport.py | 367 ----------- tests/test_urls.py | 236 ------- tests/test_warning.py | 61 -- 51 files changed, 7125 deletions(-) delete mode 100644 oslo/__init__.py delete mode 100644 oslo/messaging/__init__.py delete mode 100644 oslo/messaging/_drivers/__init__.py delete mode 100644 oslo/messaging/_drivers/common.py delete mode 100644 oslo/messaging/_executors/__init__.py delete mode 100644 oslo/messaging/_executors/base.py delete mode 100644 oslo/messaging/conffixture.py delete mode 100644 oslo/messaging/exceptions.py delete mode 100644 oslo/messaging/localcontext.py delete mode 100644 oslo/messaging/notify/__init__.py delete mode 100644 oslo/messaging/notify/dispatcher.py delete mode 100644 oslo/messaging/notify/listener.py delete mode 100644 oslo/messaging/notify/log_handler.py delete mode 100644 oslo/messaging/notify/logger.py delete mode 100644 oslo/messaging/notify/middleware.py delete mode 100644 oslo/messaging/notify/notifier.py delete mode 100644 oslo/messaging/rpc/__init__.py delete mode 100644 oslo/messaging/rpc/client.py delete mode 100644 oslo/messaging/rpc/dispatcher.py delete mode 100644 oslo/messaging/rpc/server.py delete mode 100644 oslo/messaging/serializer.py delete mode 100644 oslo/messaging/server.py delete mode 100644 oslo/messaging/target.py delete mode 100644 oslo/messaging/transport.py delete mode 100644 tests/__init__.py delete mode 100644 tests/drivers/__init__.py delete mode 100644 tests/drivers/test_impl_qpid.py delete mode 100644 tests/drivers/test_impl_rabbit.py delete mode 100644 tests/drivers/test_impl_zmq.py delete mode 100644 tests/drivers/test_matchmaker.py delete mode 100644 tests/drivers/test_matchmaker_redis.py delete mode 100644 tests/drivers/test_matchmaker_ring.py delete mode 100644 tests/notify/__init__.py delete mode 100644 tests/notify/test_dispatcher.py delete mode 100644 tests/notify/test_listener.py delete mode 100644 tests/notify/test_log_handler.py delete mode 100644 tests/notify/test_logger.py delete mode 100644 tests/notify/test_middleware.py delete mode 100644 tests/notify/test_notifier.py delete mode 100644 tests/rpc/__init__.py delete mode 100644 tests/rpc/test_client.py delete mode 100644 tests/rpc/test_dispatcher.py delete mode 100644 tests/rpc/test_server.py delete mode 100644 tests/test_amqp_driver.py delete mode 100644 tests/test_exception_serialization.py delete mode 100644 tests/test_expected_exceptions.py delete mode 100644 tests/test_target.py delete mode 100644 tests/test_transport.py delete mode 100644 tests/test_urls.py delete mode 100644 tests/test_warning.py diff --git a/oslo/__init__.py b/oslo/__init__.py deleted file mode 100644 index 8feca65d6..000000000 --- a/oslo/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# Copyright 2012 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -__import__('pkg_resources').declare_namespace(__name__) diff --git a/oslo/messaging/__init__.py b/oslo/messaging/__init__.py deleted file mode 100644 index 125c96a75..000000000 --- a/oslo/messaging/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import warnings - -from .exceptions import * -from .localcontext import * -from .notify import * -from .rpc import * -from .serializer import * -from .server import * -from .target import * -from .transport import * - - -def deprecated(): - new_name = __name__.replace('.', '_') - warnings.warn( - ('The oslo namespace package is deprecated. Please use %s instead.' % - new_name), - DeprecationWarning, - stacklevel=3, - ) - - -deprecated() diff --git a/oslo/messaging/_drivers/__init__.py b/oslo/messaging/_drivers/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/oslo/messaging/_drivers/common.py b/oslo/messaging/_drivers/common.py deleted file mode 100644 index 12f8b095c..000000000 --- a/oslo/messaging/_drivers/common.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# TODO(dhellmann): This private package and these imports can be -# removed after heat fixes their tests. See -# https://bugs.launchpad.net/oslo.messaging/+bug/1410196. -from oslo_messaging._drivers.common import * # noqa diff --git a/oslo/messaging/_executors/__init__.py b/oslo/messaging/_executors/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/oslo/messaging/_executors/base.py b/oslo/messaging/_executors/base.py deleted file mode 100644 index 01dfc0677..000000000 --- a/oslo/messaging/_executors/base.py +++ /dev/null @@ -1,17 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging._executors.base import * # noqa - -# FIXME(dhellmann): Provide a dummy value so the mock in nova's unit -# test fixture works. See bug #1412841 -POLL_TIMEOUT = 0.1 diff --git a/oslo/messaging/conffixture.py b/oslo/messaging/conffixture.py deleted file mode 100644 index 8b4be93a8..000000000 --- a/oslo/messaging/conffixture.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.conffixture import * # noqa diff --git a/oslo/messaging/exceptions.py b/oslo/messaging/exceptions.py deleted file mode 100644 index 4708d87c7..000000000 --- a/oslo/messaging/exceptions.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.exceptions import * # noqa diff --git a/oslo/messaging/localcontext.py b/oslo/messaging/localcontext.py deleted file mode 100644 index 0b24f7f23..000000000 --- a/oslo/messaging/localcontext.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.localcontext import * # noqa diff --git a/oslo/messaging/notify/__init__.py b/oslo/messaging/notify/__init__.py deleted file mode 100644 index 9de833189..000000000 --- a/oslo/messaging/notify/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -__all__ = ['Notifier', - 'LoggingNotificationHandler', - 'get_notification_listener', - 'NotificationResult', - 'PublishErrorsHandler', - 'LoggingErrorNotificationHandler'] - -from .notifier import * -from .listener import * -from .log_handler import * -from .logger import * -from .dispatcher import NotificationResult -from oslo_messaging.notify import _impl_test diff --git a/oslo/messaging/notify/dispatcher.py b/oslo/messaging/notify/dispatcher.py deleted file mode 100644 index d472674ad..000000000 --- a/oslo/messaging/notify/dispatcher.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.notify.dispatcher import * # noqa diff --git a/oslo/messaging/notify/listener.py b/oslo/messaging/notify/listener.py deleted file mode 100644 index 0e73924d9..000000000 --- a/oslo/messaging/notify/listener.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.notify.listener import * # noqa diff --git a/oslo/messaging/notify/log_handler.py b/oslo/messaging/notify/log_handler.py deleted file mode 100644 index 3ee75a082..000000000 --- a/oslo/messaging/notify/log_handler.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.notify.log_handler import * # noqa diff --git a/oslo/messaging/notify/logger.py b/oslo/messaging/notify/logger.py deleted file mode 100644 index f32a424fa..000000000 --- a/oslo/messaging/notify/logger.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.notify.logger import * # noqa diff --git a/oslo/messaging/notify/middleware.py b/oslo/messaging/notify/middleware.py deleted file mode 100644 index 992b65bea..000000000 --- a/oslo/messaging/notify/middleware.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.notify.middleware import * # noqa diff --git a/oslo/messaging/notify/notifier.py b/oslo/messaging/notify/notifier.py deleted file mode 100644 index 0d23eb039..000000000 --- a/oslo/messaging/notify/notifier.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.notify.notifier import * # noqa diff --git a/oslo/messaging/rpc/__init__.py b/oslo/messaging/rpc/__init__.py deleted file mode 100644 index f9cc88194..000000000 --- a/oslo/messaging/rpc/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -__all__ = [ - 'ClientSendError', - 'ExpectedException', - 'NoSuchMethod', - 'RPCClient', - 'RPCDispatcher', - 'RPCDispatcherError', - 'RPCVersionCapError', - 'RemoteError', - 'UnsupportedVersion', - 'expected_exceptions', - 'get_rpc_server', -] - -from .client import * -from .dispatcher import * -from .server import * diff --git a/oslo/messaging/rpc/client.py b/oslo/messaging/rpc/client.py deleted file mode 100644 index c625ba2e9..000000000 --- a/oslo/messaging/rpc/client.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.rpc.client import * # noqa diff --git a/oslo/messaging/rpc/dispatcher.py b/oslo/messaging/rpc/dispatcher.py deleted file mode 100644 index 0cf387106..000000000 --- a/oslo/messaging/rpc/dispatcher.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.rpc.dispatcher import * # noqa diff --git a/oslo/messaging/rpc/server.py b/oslo/messaging/rpc/server.py deleted file mode 100644 index c297fd14a..000000000 --- a/oslo/messaging/rpc/server.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.rpc.server import * # noqa diff --git a/oslo/messaging/serializer.py b/oslo/messaging/serializer.py deleted file mode 100644 index b7b9b3f68..000000000 --- a/oslo/messaging/serializer.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.serializer import * # noqa diff --git a/oslo/messaging/server.py b/oslo/messaging/server.py deleted file mode 100644 index 517f9abe5..000000000 --- a/oslo/messaging/server.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.server import * # noqa diff --git a/oslo/messaging/target.py b/oslo/messaging/target.py deleted file mode 100644 index 2f521a17b..000000000 --- a/oslo/messaging/target.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.target import * # noqa diff --git a/oslo/messaging/transport.py b/oslo/messaging/transport.py deleted file mode 100644 index a10dfe446..000000000 --- a/oslo/messaging/transport.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_messaging.transport import * # noqa diff --git a/setup.cfg b/setup.cfg index ce73f1a0b..e6a93520b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -19,11 +19,7 @@ classifier = [files] packages = - oslo - oslo.messaging oslo_messaging -namespace_packages = - oslo [entry_points] console_scripts = diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index 0222c4e76..000000000 --- a/tests/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2014 eNovance -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# Import oslotest before importing test submodules to setup six.moves for mock -import oslotest - -try: - import eventlet -except ImportError: - pass -else: - # Ensure that eventlet monkey patching is enabled before loading the qpid - # module, otherwise qpid will hang - eventlet.monkey_patch() diff --git a/tests/drivers/__init__.py b/tests/drivers/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/drivers/test_impl_qpid.py b/tests/drivers/test_impl_qpid.py deleted file mode 100644 index ae4d806dc..000000000 --- a/tests/drivers/test_impl_qpid.py +++ /dev/null @@ -1,850 +0,0 @@ -# Copyright (C) 2014 eNovance SAS -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import operator -import random -import threading -import time - -try: - import qpid -except ImportError: - qpid = None -from six.moves import _thread -import testscenarios -import testtools - -from oslo import messaging -from oslo_messaging._drivers import amqp -from oslo_messaging._drivers import impl_qpid as qpid_driver -from oslo_messaging.tests import utils as test_utils -from six.moves import mock - - -load_tests = testscenarios.load_tests_apply_scenarios - -QPID_BROKER = 'localhost:5672' - - -class TestQpidDriverLoad(test_utils.BaseTestCase): - - def setUp(self): - super(TestQpidDriverLoad, self).setUp() - self.messaging_conf.transport_driver = 'qpid' - - def test_driver_load(self): - transport = messaging.get_transport(self.conf) - self.assertIsInstance(transport._driver, qpid_driver.QpidDriver) - - -def _is_qpidd_service_running(): - - """this function checks if the qpid service is running or not.""" - - qpid_running = True - try: - broker = QPID_BROKER - connection = qpid.messaging.Connection(broker) - connection.open() - except Exception: - # qpid service is not running. - qpid_running = False - else: - connection.close() - - return qpid_running - - -class _QpidBaseTestCase(test_utils.BaseTestCase): - - @testtools.skipIf(qpid is None, "qpid not available") - def setUp(self): - super(_QpidBaseTestCase, self).setUp() - self.messaging_conf.transport_driver = 'qpid' - self.fake_qpid = not _is_qpidd_service_running() - - if self.fake_qpid: - self.session_receive = get_fake_qpid_session() - self.session_send = get_fake_qpid_session() - else: - self.broker = QPID_BROKER - # create connection from the qpid.messaging - # connection for the Consumer. - self.con_receive = qpid.messaging.Connection(self.broker) - self.con_receive.open() - # session to receive the messages - self.session_receive = self.con_receive.session() - - # connection for sending the message - self.con_send = qpid.messaging.Connection(self.broker) - self.con_send.open() - # session to send the messages - self.session_send = self.con_send.session() - - # list to store the expected messages and - # the actual received messages - self._expected = [] - self._messages = [] - self.initialized = True - - def tearDown(self): - super(_QpidBaseTestCase, self).tearDown() - - if self.initialized: - if self.fake_qpid: - _fake_session.flush_exchanges() - else: - self.con_receive.close() - self.con_send.close() - - -class TestQpidTransportURL(_QpidBaseTestCase): - - scenarios = [ - ('none', dict(url=None, - expected=[dict(host='localhost:5672', - username='', - password='')])), - ('empty', - dict(url='qpid:///', - expected=[dict(host='localhost:5672', - username='', - password='')])), - ('localhost', - dict(url='qpid://localhost/', - expected=[dict(host='localhost', - username='', - password='')])), - ('no_creds', - dict(url='qpid://host/', - expected=[dict(host='host', - username='', - password='')])), - ('no_port', - dict(url='qpid://user:password@host/', - expected=[dict(host='host', - username='user', - password='password')])), - ('full_url', - dict(url='qpid://user:password@host:10/', - expected=[dict(host='host:10', - username='user', - password='password')])), - ('full_two_url', - dict(url='qpid://user:password@host:10,' - 'user2:password2@host2:12/', - expected=[dict(host='host:10', - username='user', - password='password'), - dict(host='host2:12', - username='user2', - password='password2') - ] - )), - - ] - - @mock.patch.object(qpid_driver.Connection, 'reconnect') - def test_transport_url(self, *args): - transport = messaging.get_transport(self.conf, self.url) - self.addCleanup(transport.cleanup) - driver = transport._driver - - brokers_params = driver._get_connection().brokers_params - self.assertEqual(sorted(self.expected, - key=operator.itemgetter('host')), - sorted(brokers_params, - key=operator.itemgetter('host'))) - - -class TestQpidInvalidTopologyVersion(_QpidBaseTestCase): - """Unit test cases to test invalid qpid topology version.""" - - scenarios = [ - ('direct', dict(consumer_cls=qpid_driver.DirectConsumer, - consumer_kwargs={}, - publisher_cls=qpid_driver.DirectPublisher, - publisher_kwargs={})), - ('topic', dict(consumer_cls=qpid_driver.TopicConsumer, - consumer_kwargs={'exchange_name': 'openstack'}, - publisher_cls=qpid_driver.TopicPublisher, - publisher_kwargs={'exchange_name': 'openstack'})), - ('fanout', dict(consumer_cls=qpid_driver.FanoutConsumer, - consumer_kwargs={}, - publisher_cls=qpid_driver.FanoutPublisher, - publisher_kwargs={})), - ] - - def setUp(self): - super(TestQpidInvalidTopologyVersion, self).setUp() - self.config(qpid_topology_version=-1, - group='oslo_messaging_qpid') - - def test_invalid_topology_version(self): - def consumer_callback(msg): - pass - - msgid_or_topic = 'test' - - # not using self.assertRaises because - # 1. qpid driver raises Exception(msg) for invalid topology version - # 2. flake8 - H202 assertRaises Exception too broad - exception_msg = ("Invalid value for qpid_topology_version: %d" % - self.conf.oslo_messaging_qpid.qpid_topology_version) - recvd_exc_msg = '' - - try: - self.consumer_cls(self.conf.oslo_messaging_qpid, - self.session_receive, - msgid_or_topic, - consumer_callback, - **self.consumer_kwargs) - except Exception as e: - recvd_exc_msg = e.message - - self.assertEqual(exception_msg, recvd_exc_msg) - - recvd_exc_msg = '' - try: - self.publisher_cls(self.conf.oslo_messaging_qpid, - self.session_send, - topic=msgid_or_topic, - **self.publisher_kwargs) - except Exception as e: - recvd_exc_msg = e.message - - self.assertEqual(exception_msg, recvd_exc_msg) - - -class TestQpidDirectConsumerPublisher(_QpidBaseTestCase): - """Unit test cases to test DirectConsumer and Direct Publisher.""" - - _n_qpid_topology = [ - ('v1', dict(qpid_topology=1)), - ('v2', dict(qpid_topology=2)), - ] - - _n_msgs = [ - ('single', dict(no_msgs=1)), - ('multiple', dict(no_msgs=10)), - ] - - @classmethod - def generate_scenarios(cls): - cls.scenarios = testscenarios.multiply_scenarios(cls._n_qpid_topology, - cls._n_msgs) - - def consumer_callback(self, msg): - # This function will be called by the DirectConsumer - # when any message is received. - # Append the received message into the messages list - # so that the received messages can be validated - # with the expected messages - if isinstance(msg, dict): - self._messages.append(msg['content']) - else: - self._messages.append(msg) - - def test_qpid_direct_consumer_producer(self): - self.msgid = str(random.randint(1, 100)) - - # create a DirectConsumer and DirectPublisher class objects - self.dir_cons = qpid_driver.DirectConsumer( - self.conf.oslo_messaging_qpid, - self.session_receive, - self.msgid, - self.consumer_callback) - self.dir_pub = qpid_driver.DirectPublisher( - self.conf.oslo_messaging_qpid, - self.session_send, - self.msgid) - - def try_send_msg(no_msgs): - for i in range(no_msgs): - self._expected.append(str(i)) - snd_msg = {'content_type': 'text/plain', 'content': str(i)} - self.dir_pub.send(snd_msg) - - def try_receive_msg(no_msgs): - for i in range(no_msgs): - self.dir_cons.consume() - - thread1 = threading.Thread(target=try_receive_msg, - args=(self.no_msgs,)) - thread2 = threading.Thread(target=try_send_msg, - args=(self.no_msgs,)) - - thread1.start() - thread2.start() - thread1.join() - thread2.join() - - self.assertEqual(self.no_msgs, len(self._messages)) - self.assertEqual(self._expected, self._messages) - - -TestQpidDirectConsumerPublisher.generate_scenarios() - - -class TestQpidTopicAndFanout(_QpidBaseTestCase): - """Unit Test cases to test TopicConsumer and - TopicPublisher classes of the qpid driver - and FanoutConsumer and FanoutPublisher classes - of the qpid driver - """ - - _n_qpid_topology = [ - ('v1', dict(qpid_topology=1)), - ('v2', dict(qpid_topology=2)), - ] - - _n_msgs = [ - ('single', dict(no_msgs=1)), - ('multiple', dict(no_msgs=10)), - ] - - _n_senders = [ - ('single', dict(no_senders=1)), - ('multiple', dict(no_senders=10)), - ] - - _n_receivers = [ - ('single', dict(no_receivers=1)), - ] - _exchange_class = [ - ('topic', dict(consumer_cls=qpid_driver.TopicConsumer, - consumer_kwargs={'exchange_name': 'openstack'}, - publisher_cls=qpid_driver.TopicPublisher, - publisher_kwargs={'exchange_name': 'openstack'}, - topic='topictest.test', - receive_topic='topictest.test')), - ('fanout', dict(consumer_cls=qpid_driver.FanoutConsumer, - consumer_kwargs={}, - publisher_cls=qpid_driver.FanoutPublisher, - publisher_kwargs={}, - topic='fanouttest', - receive_topic='fanouttest')), - ] - - @classmethod - def generate_scenarios(cls): - cls.scenarios = testscenarios.multiply_scenarios(cls._n_qpid_topology, - cls._n_msgs, - cls._n_senders, - cls._n_receivers, - cls._exchange_class) - - def setUp(self): - super(TestQpidTopicAndFanout, self).setUp() - - # to store the expected messages and the - # actual received messages - # - # NOTE(dhellmann): These are dicts, where the base class uses - # lists. - self._expected = {} - self._messages = {} - - self._senders = [] - self._receivers = [] - - self._sender_threads = [] - self._receiver_threads = [] - - def consumer_callback(self, msg): - """callback function called by the ConsumerBase class of - qpid driver. - Message will be received in the format x-y - where x is the sender id and y is the msg number of the sender - extract the sender id 'x' and store the msg 'x-y' with 'x' as - the key - """ - - if isinstance(msg, dict): - msgcontent = msg['content'] - else: - msgcontent = msg - - splitmsg = msgcontent.split('-') - key = _thread.get_ident() - - if key not in self._messages: - self._messages[key] = dict() - - tdict = self._messages[key] - - if splitmsg[0] not in tdict: - tdict[splitmsg[0]] = [] - - tdict[splitmsg[0]].append(msgcontent) - - def _try_send_msg(self, sender_id, no_msgs): - for i in range(no_msgs): - sendmsg = '%s-%s' % (str(sender_id), str(i)) - key = str(sender_id) - # Store the message in the self._expected for each sender. - # This will be used later to - # validate the test by comparing it with the - # received messages by all the receivers - if key not in self._expected: - self._expected[key] = [] - self._expected[key].append(sendmsg) - send_dict = {'content_type': 'text/plain', 'content': sendmsg} - self._senders[sender_id].send(send_dict) - - def _try_receive_msg(self, receiver_id, no_msgs): - for i in range(self.no_senders * no_msgs): - no_of_attempts = 0 - - # ConsumerBase.consume blocks indefinitely until a message - # is received. - # So qpid_receiver.available() is called before calling - # ConsumerBase.consume() so that we are not - # blocked indefinitely - qpid_receiver = self._receivers[receiver_id].get_receiver() - while no_of_attempts < 50: - if qpid_receiver.available() > 0: - self._receivers[receiver_id].consume() - break - no_of_attempts += 1 - time.sleep(0.05) - - def test_qpid_topic_and_fanout(self): - for receiver_id in range(self.no_receivers): - consumer = self.consumer_cls(self.conf.oslo_messaging_qpid, - self.session_receive, - self.receive_topic, - self.consumer_callback, - **self.consumer_kwargs) - self._receivers.append(consumer) - - # create receivers threads - thread = threading.Thread(target=self._try_receive_msg, - args=(receiver_id, self.no_msgs,)) - self._receiver_threads.append(thread) - - for sender_id in range(self.no_senders): - publisher = self.publisher_cls(self.conf.oslo_messaging_qpid, - self.session_send, - topic=self.topic, - **self.publisher_kwargs) - self._senders.append(publisher) - - # create sender threads - thread = threading.Thread(target=self._try_send_msg, - args=(sender_id, self.no_msgs,)) - self._sender_threads.append(thread) - - for thread in self._receiver_threads: - thread.start() - - for thread in self._sender_threads: - thread.start() - - for thread in self._receiver_threads: - thread.join() - - for thread in self._sender_threads: - thread.join() - - # Each receiver should receive all the messages sent by - # the sender(s). - # So, Iterate through each of the receiver items in - # self._messages and compare with the expected messages - # messages. - - self.assertEqual(self.no_senders, len(self._expected)) - self.assertEqual(self.no_receivers, len(self._messages)) - - for key, messages in self._messages.iteritems(): - self.assertEqual(self._expected, messages) - -TestQpidTopicAndFanout.generate_scenarios() - - -class AddressNodeMatcher(object): - def __init__(self, node): - self.node = node - - def __eq__(self, address): - return address.split(';')[0].strip() == self.node - - -class TestDriverInterface(_QpidBaseTestCase): - """Unit Test cases to test the amqpdriver with qpid - """ - - def setUp(self): - super(TestDriverInterface, self).setUp() - self.config(qpid_topology_version=2, - group='oslo_messaging_qpid') - transport = messaging.get_transport(self.conf) - self.driver = transport._driver - - original_get_connection = self.driver._get_connection - p = mock.patch.object(self.driver, '_get_connection', - side_effect=lambda pooled=True: - original_get_connection(False)) - p.start() - self.addCleanup(p.stop) - - def test_listen_and_direct_send(self): - target = messaging.Target(exchange="exchange_test", - topic="topic_test", - server="server_test") - - with mock.patch('qpid.messaging.Connection') as conn_cls: - conn = conn_cls.return_value - session = conn.session.return_value - session.receiver.side_effect = [mock.Mock(), mock.Mock(), - mock.Mock()] - - listener = self.driver.listen(target) - listener.conn.direct_send("msg_id", {}) - - self.assertEqual(3, len(listener.conn.consumers)) - - expected_calls = [ - mock.call(AddressNodeMatcher( - 'amq.topic/topic/exchange_test/topic_test')), - mock.call(AddressNodeMatcher( - 'amq.topic/topic/exchange_test/topic_test.server_test')), - mock.call(AddressNodeMatcher('amq.topic/fanout/topic_test')), - ] - session.receiver.assert_has_calls(expected_calls) - session.sender.assert_called_with( - AddressNodeMatcher("amq.direct/msg_id")) - - def test_send(self): - target = messaging.Target(exchange="exchange_test", - topic="topic_test", - server="server_test") - with mock.patch('qpid.messaging.Connection') as conn_cls: - conn = conn_cls.return_value - session = conn.session.return_value - - self.driver.send(target, {}, {}) - session.sender.assert_called_with(AddressNodeMatcher( - "amq.topic/topic/exchange_test/topic_test.server_test")) - - def test_send_notification(self): - target = messaging.Target(exchange="exchange_test", - topic="topic_test.info") - with mock.patch('qpid.messaging.Connection') as conn_cls: - conn = conn_cls.return_value - session = conn.session.return_value - - self.driver.send_notification(target, {}, {}, "2.0") - session.sender.assert_called_with(AddressNodeMatcher( - "amq.topic/topic/exchange_test/topic_test.info")) - - -class TestQpidReconnectOrder(test_utils.BaseTestCase): - """Unit Test cases to test reconnection - """ - - @testtools.skipIf(qpid is None, "qpid not available") - def test_reconnect_order(self): - brokers = ['host1', 'host2', 'host3', 'host4', 'host5'] - brokers_count = len(brokers) - - self.config(qpid_hosts=brokers, - group='oslo_messaging_qpid') - - with mock.patch('qpid.messaging.Connection') as conn_mock: - # starting from the first broker in the list - url = messaging.TransportURL.parse(self.conf, None) - connection = qpid_driver.Connection(self.conf, url, - amqp.PURPOSE_SEND) - - # reconnect will advance to the next broker, one broker per - # attempt, and then wrap to the start of the list once the end is - # reached - for _ in range(brokers_count): - connection.reconnect() - - expected = [] - for broker in brokers: - expected.extend([mock.call("%s:5672" % broker), - mock.call().open(), - mock.call().session(), - mock.call().opened(), - mock.call().opened().__nonzero__(), - mock.call().close()]) - - conn_mock.assert_has_calls(expected, any_order=True) - - -def synchronized(func): - func.__lock__ = threading.Lock() - - def synced_func(*args, **kws): - with func.__lock__: - return func(*args, **kws) - - return synced_func - - -class FakeQpidMsgManager(object): - def __init__(self): - self._exchanges = {} - - @synchronized - def add_exchange(self, exchange): - if exchange not in self._exchanges: - self._exchanges[exchange] = {'msgs': [], 'consumers': {}} - - @synchronized - def add_exchange_consumer(self, exchange, consumer_id): - exchange_info = self._exchanges[exchange] - cons_dict = exchange_info['consumers'] - cons_dict[consumer_id] = 0 - - @synchronized - def add_exchange_msg(self, exchange, msg): - exchange_info = self._exchanges[exchange] - exchange_info['msgs'].append(msg) - - def get_exchange_msg(self, exchange, index): - exchange_info = self._exchanges[exchange] - return exchange_info['msgs'][index] - - def get_no_exch_msgs(self, exchange): - exchange_info = self._exchanges[exchange] - return len(exchange_info['msgs']) - - def get_exch_cons_index(self, exchange, consumer_id): - exchange_info = self._exchanges[exchange] - cons_dict = exchange_info['consumers'] - return cons_dict[consumer_id] - - @synchronized - def inc_consumer_index(self, exchange, consumer_id): - exchange_info = self._exchanges[exchange] - cons_dict = exchange_info['consumers'] - cons_dict[consumer_id] += 1 - -_fake_qpid_msg_manager = FakeQpidMsgManager() - - -class FakeQpidSessionSender(object): - def __init__(self, session, id, target, options): - self.session = session - self.id = id - self.target = target - self.options = options - - @synchronized - def send(self, object, sync=True, timeout=None): - _fake_qpid_msg_manager.add_exchange_msg(self.target, object) - - def close(self, timeout=None): - pass - - -class FakeQpidSessionReceiver(object): - - def __init__(self, session, id, source, options): - self.session = session - self.id = id - self.source = source - self.options = options - - @synchronized - def fetch(self, timeout=None): - if timeout is None: - # if timeout is not given, take a default time out - # of 30 seconds to avoid indefinite loop - _timeout = 30 - else: - _timeout = timeout - - deadline = time.time() + _timeout - while time.time() <= deadline: - index = _fake_qpid_msg_manager.get_exch_cons_index(self.source, - self.id) - try: - msg = _fake_qpid_msg_manager.get_exchange_msg(self.source, - index) - except IndexError: - pass - else: - _fake_qpid_msg_manager.inc_consumer_index(self.source, - self.id) - return qpid.messaging.Message(msg) - time.sleep(0.050) - - if timeout is None: - raise Exception('timed out waiting for reply') - - def close(self, timeout=None): - pass - - @synchronized - def available(self): - no_msgs = _fake_qpid_msg_manager.get_no_exch_msgs(self.source) - index = _fake_qpid_msg_manager.get_exch_cons_index(self.source, - self.id) - if no_msgs == 0 or index >= no_msgs: - return 0 - else: - return no_msgs - index - - -class FakeQpidSession(object): - - def __init__(self, connection=None, name=None, transactional=None): - self.connection = connection - self.name = name - self.transactional = transactional - self._receivers = {} - self.conf = None - self.url = None - self._senders = {} - self._sender_id = 0 - self._receiver_id = 0 - - @synchronized - def sender(self, target, **options): - exchange_key = self._extract_exchange_key(target) - _fake_qpid_msg_manager.add_exchange(exchange_key) - - sendobj = FakeQpidSessionSender(self, self._sender_id, - exchange_key, options) - self._senders[self._sender_id] = sendobj - self._sender_id = self._sender_id + 1 - return sendobj - - @synchronized - def receiver(self, source, **options): - exchange_key = self._extract_exchange_key(source) - _fake_qpid_msg_manager.add_exchange(exchange_key) - recvobj = FakeQpidSessionReceiver(self, self._receiver_id, - exchange_key, options) - self._receivers[self._receiver_id] = recvobj - _fake_qpid_msg_manager.add_exchange_consumer(exchange_key, - self._receiver_id) - self._receiver_id += 1 - return recvobj - - def acknowledge(self, message=None, disposition=None, sync=True): - pass - - @synchronized - def flush_exchanges(self): - _fake_qpid_msg_manager._exchanges = {} - - def _extract_exchange_key(self, exchange_msg): - """This function extracts a unique key for the exchange. - This key is used in the dictionary as a 'key' for - this exchange. - Eg. if the exchange_msg (for qpid topology version 1) - is 33/33 ; {"node": {"x-declare": {"auto-delete": true, .... - then 33 is returned as the key. - Eg 2. For topology v2, if the - exchange_msg is - amq.direct/44 ; {"link": {"x-dec....... - then 44 is returned - """ - # first check for ';' - semicolon_split = exchange_msg.split(';') - - # split the first item of semicolon_split with '/' - slash_split = semicolon_split[0].split('/') - # return the last element of the list as the key - key = slash_split[-1] - return key.strip() - - def close(self): - pass - -_fake_session = FakeQpidSession() - - -def get_fake_qpid_session(): - return _fake_session - - -class QPidHATestCase(test_utils.BaseTestCase): - - @testtools.skipIf(qpid is None, "qpid not available") - def setUp(self): - super(QPidHATestCase, self).setUp() - self.brokers = ['host1', 'host2', 'host3', 'host4', 'host5'] - - self.config(qpid_hosts=self.brokers, - qpid_username=None, - qpid_password=None, - group='oslo_messaging_qpid') - - hostname_sets = set() - self.info = {'attempt': 0, - 'fail': False} - - def _connect(myself, broker): - # do as little work that is enough to pass connection attempt - myself.connection = mock.Mock() - hostname = broker['host'] - self.assertNotIn(hostname, hostname_sets) - hostname_sets.add(hostname) - - self.info['attempt'] += 1 - if self.info['fail']: - raise qpid.messaging.exceptions.ConnectionError - - # just make sure connection instantiation does not fail with an - # exception - self.stubs.Set(qpid_driver.Connection, '_connect', _connect) - - # starting from the first broker in the list - url = messaging.TransportURL.parse(self.conf, None) - self.connection = qpid_driver.Connection(self.conf, url, - amqp.PURPOSE_SEND) - self.addCleanup(self.connection.close) - - self.info.update({'attempt': 0, - 'fail': True}) - hostname_sets.clear() - - def test_reconnect_order(self): - self.assertRaises(messaging.MessageDeliveryFailure, - self.connection.reconnect, - retry=len(self.brokers) - 1) - self.assertEqual(len(self.brokers), self.info['attempt']) - - def test_ensure_four_retries(self): - mock_callback = mock.Mock( - side_effect=qpid.messaging.exceptions.ConnectionError) - self.assertRaises(messaging.MessageDeliveryFailure, - self.connection.ensure, None, mock_callback, - retry=4) - self.assertEqual(5, self.info['attempt']) - self.assertEqual(1, mock_callback.call_count) - - def test_ensure_one_retry(self): - mock_callback = mock.Mock( - side_effect=qpid.messaging.exceptions.ConnectionError) - self.assertRaises(messaging.MessageDeliveryFailure, - self.connection.ensure, None, mock_callback, - retry=1) - self.assertEqual(2, self.info['attempt']) - self.assertEqual(1, mock_callback.call_count) - - def test_ensure_no_retry(self): - mock_callback = mock.Mock( - side_effect=qpid.messaging.exceptions.ConnectionError) - self.assertRaises(messaging.MessageDeliveryFailure, - self.connection.ensure, None, mock_callback, - retry=0) - self.assertEqual(1, self.info['attempt']) - self.assertEqual(1, mock_callback.call_count) diff --git a/tests/drivers/test_impl_rabbit.py b/tests/drivers/test_impl_rabbit.py deleted file mode 100644 index b2da4a828..000000000 --- a/tests/drivers/test_impl_rabbit.py +++ /dev/null @@ -1,758 +0,0 @@ -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import sys -import threading -import time -import uuid - -import fixtures -import kombu -from oslotest import mockpatch -import testscenarios - -from oslo import messaging -from oslo_config import cfg -from oslo_messaging._drivers import amqp -from oslo_messaging._drivers import amqpdriver -from oslo_messaging._drivers import common as driver_common -from oslo_messaging._drivers import impl_rabbit as rabbit_driver -from oslo_messaging.tests import utils as test_utils -from oslo_serialization import jsonutils -from six.moves import mock - -load_tests = testscenarios.load_tests_apply_scenarios - - -class TestDeprecatedRabbitDriverLoad(test_utils.BaseTestCase): - - def setUp(self): - super(TestDeprecatedRabbitDriverLoad, self).setUp( - conf=cfg.ConfigOpts()) - self.messaging_conf.transport_driver = 'rabbit' - self.config(fake_rabbit=True, group="oslo_messaging_rabbit") - - def test_driver_load(self): - self.config(heartbeat_timeout_threshold=0, - group='oslo_messaging_rabbit') - transport = messaging.get_transport(self.conf) - self.addCleanup(transport.cleanup) - driver = transport._driver - url = driver._get_connection()._url - - self.assertIsInstance(driver, rabbit_driver.RabbitDriver) - self.assertEqual('memory:////', url) - - -class TestRabbitDriverLoad(test_utils.BaseTestCase): - - scenarios = [ - ('rabbit', dict(transport_driver='rabbit', - url='amqp://guest:guest@localhost:5672//')), - ('kombu', dict(transport_driver='kombu', - url='amqp://guest:guest@localhost:5672//')), - ('rabbit+memory', dict(transport_driver='kombu+memory', - url='memory:///')) - ] - - @mock.patch('oslo_messaging._drivers.impl_rabbit.Connection.ensure') - @mock.patch('oslo_messaging._drivers.impl_rabbit.Connection.reset') - def test_driver_load(self, fake_ensure, fake_reset): - self.config(heartbeat_timeout_threshold=0, - group='oslo_messaging_rabbit') - self.messaging_conf.transport_driver = self.transport_driver - transport = messaging.get_transport(self.conf) - self.addCleanup(transport.cleanup) - driver = transport._driver - url = driver._get_connection()._url - - self.assertIsInstance(driver, rabbit_driver.RabbitDriver) - self.assertEqual(self.url, url) - - -class TestRabbitConsume(test_utils.BaseTestCase): - - def test_consume_timeout(self): - transport = messaging.get_transport(self.conf, 'kombu+memory:////') - self.addCleanup(transport.cleanup) - deadline = time.time() + 6 - with transport._driver._get_connection(amqp.PURPOSE_LISTEN) as conn: - self.assertRaises(driver_common.Timeout, - conn.consume, timeout=3) - - # kombu memory transport doesn't really raise error - # so just simulate a real driver behavior - conn.connection.connection.recoverable_channel_errors = (IOError,) - conn.declare_fanout_consumer("notif.info", lambda msg: True) - with mock.patch('kombu.connection.Connection.drain_events', - side_effect=IOError): - self.assertRaises(driver_common.Timeout, - conn.consume, timeout=3) - - self.assertEqual(0, int(deadline - time.time())) - - -class TestRabbitTransportURL(test_utils.BaseTestCase): - - scenarios = [ - ('none', dict(url=None, - expected=["amqp://guest:guest@localhost:5672//"])), - ('memory', dict(url='kombu+memory:////', - expected=["memory:///"])), - ('empty', - dict(url='rabbit:///', - expected=['amqp://guest:guest@localhost:5672/'])), - ('localhost', - dict(url='rabbit://localhost/', - expected=['amqp://:@localhost:5672/'])), - ('virtual_host', - dict(url='rabbit:///vhost', - expected=['amqp://guest:guest@localhost:5672/vhost'])), - ('no_creds', - dict(url='rabbit://host/virtual_host', - expected=['amqp://:@host:5672/virtual_host'])), - ('no_port', - dict(url='rabbit://user:password@host/virtual_host', - expected=['amqp://user:password@host:5672/virtual_host'])), - ('full_url', - dict(url='rabbit://user:password@host:10/virtual_host', - expected=['amqp://user:password@host:10/virtual_host'])), - ('full_two_url', - dict(url='rabbit://user:password@host:10,' - 'user2:password2@host2:12/virtual_host', - expected=["amqp://user:password@host:10/virtual_host", - "amqp://user2:password2@host2:12/virtual_host"] - )), - ('qpid', - dict(url='kombu+qpid://user:password@host:10/virtual_host', - expected=['qpid://user:password@host:10/virtual_host'])), - ('rabbit', - dict(url='kombu+rabbit://user:password@host:10/virtual_host', - expected=['amqp://user:password@host:10/virtual_host'])), - ('rabbit_ipv6', - dict(url='kombu+rabbit://u:p@[fd00:beef:dead:55::133]:10/vhost', - skip_py26='python 2.6 has broken urlparse for ipv6', - expected=['amqp://u:p@[fd00:beef:dead:55::133]:10/vhost'])), - ('rabbit_ipv4', - dict(url='kombu+rabbit://user:password@10.20.30.40:10/vhost', - expected=['amqp://user:password@10.20.30.40:10/vhost'])), - ] - - def setUp(self): - super(TestRabbitTransportURL, self).setUp() - self.config(heartbeat_timeout_threshold=0, - group='oslo_messaging_rabbit') - self.messaging_conf.transport_driver = 'rabbit' - - @mock.patch('oslo_messaging._drivers.impl_rabbit.Connection.ensure') - @mock.patch('oslo_messaging._drivers.impl_rabbit.Connection.reset') - def test_transport_url(self, fake_ensure_connection, fake_reset): - if hasattr(self, 'skip_py26') and sys.version_info < (2, 7): - self.skipTest(self.skip_py26) - - transport = messaging.get_transport(self.conf, self.url) - self.addCleanup(transport.cleanup) - driver = transport._driver - - # NOTE(sileht): some kombu transport can depend on library that - # we don't want to depend yet, because selecting the transport - # is experimental, only amqp is supported - # for example kombu+qpid depends of qpid-tools - # so, mock the connection.info to skip call to qpid-tools - with mock.patch('kombu.connection.Connection.info'): - urls = driver._get_connection()._url.split(";") - self.assertEqual(sorted(self.expected), sorted(urls)) - - -class TestSendReceive(test_utils.BaseTestCase): - - _n_senders = [ - ('single_sender', dict(n_senders=1)), - ('multiple_senders', dict(n_senders=10)), - ] - - _context = [ - ('empty_context', dict(ctxt={})), - ('with_context', dict(ctxt={'user': 'mark'})), - ] - - _reply = [ - ('rx_id', dict(rx_id=True, reply=None)), - ('none', dict(rx_id=False, reply=None)), - ('empty_list', dict(rx_id=False, reply=[])), - ('empty_dict', dict(rx_id=False, reply={})), - ('false', dict(rx_id=False, reply=False)), - ('zero', dict(rx_id=False, reply=0)), - ] - - _failure = [ - ('success', dict(failure=False)), - ('failure', dict(failure=True, expected=False)), - ('expected_failure', dict(failure=True, expected=True)), - ] - - _timeout = [ - ('no_timeout', dict(timeout=None)), - ('timeout', dict(timeout=0.01)), # FIXME(markmc): timeout=0 is broken? - ] - - @classmethod - def generate_scenarios(cls): - cls.scenarios = testscenarios.multiply_scenarios(cls._n_senders, - cls._context, - cls._reply, - cls._failure, - cls._timeout) - - def test_send_receive(self): - self.config(heartbeat_timeout_threshold=0, - group='oslo_messaging_rabbit') - transport = messaging.get_transport(self.conf, 'kombu+memory:////') - self.addCleanup(transport.cleanup) - - driver = transport._driver - - target = messaging.Target(topic='testtopic') - - listener = driver.listen(target) - - senders = [] - replies = [] - msgs = [] - errors = [] - - def stub_error(msg, *a, **kw): - if (a and len(a) == 1 and isinstance(a[0], dict) and a[0]): - a = a[0] - errors.append(str(msg) % a) - - self.stubs.Set(driver_common.LOG, 'error', stub_error) - - def send_and_wait_for_reply(i): - try: - replies.append(driver.send(target, - self.ctxt, - {'tx_id': i}, - wait_for_reply=True, - timeout=self.timeout)) - self.assertFalse(self.failure) - self.assertIsNone(self.timeout) - except (ZeroDivisionError, messaging.MessagingTimeout) as e: - replies.append(e) - self.assertTrue(self.failure or self.timeout is not None) - - while len(senders) < self.n_senders: - senders.append(threading.Thread(target=send_and_wait_for_reply, - args=(len(senders), ))) - - for i in range(len(senders)): - senders[i].start() - - received = listener.poll() - self.assertIsNotNone(received) - self.assertEqual(self.ctxt, received.ctxt) - self.assertEqual({'tx_id': i}, received.message) - msgs.append(received) - - # reply in reverse, except reply to the first guy second from last - order = list(range(len(senders) - 1, -1, -1)) - if len(order) > 1: - order[-1], order[-2] = order[-2], order[-1] - - for i in order: - if self.timeout is None: - if self.failure: - try: - raise ZeroDivisionError - except Exception: - failure = sys.exc_info() - - # NOTE(noelbk) confirm that Publisher exchanges - # are always declared with passive=True - outer_self = self - test_exchange_was_called = [False] - old_init = kombu.entity.Exchange.__init__ - - def new_init(self, *args, **kwargs): - test_exchange_was_called[0] = True - outer_self.assertTrue(kwargs['passive']) - old_init(self, *args, **kwargs) - kombu.entity.Exchange.__init__ = new_init - - try: - msgs[i].reply(failure=failure, - log_failure=not self.expected) - finally: - kombu.entity.Exchange.__init__ = old_init - - self.assertTrue(test_exchange_was_called[0]) - - elif self.rx_id: - msgs[i].reply({'rx_id': i}) - else: - msgs[i].reply(self.reply) - senders[i].join() - - self.assertEqual(len(senders), len(replies)) - for i, reply in enumerate(replies): - if self.timeout is not None: - self.assertIsInstance(reply, messaging.MessagingTimeout) - elif self.failure: - self.assertIsInstance(reply, ZeroDivisionError) - elif self.rx_id: - self.assertEqual({'rx_id': order[i]}, reply) - else: - self.assertEqual(self.reply, reply) - - if not self.timeout and self.failure and not self.expected: - self.assertTrue(len(errors) > 0, errors) - else: - self.assertEqual(0, len(errors), errors) - - -TestSendReceive.generate_scenarios() - - -class TestPollAsync(test_utils.BaseTestCase): - - def test_poll_timeout(self): - transport = messaging.get_transport(self.conf, 'kombu+memory:////') - self.addCleanup(transport.cleanup) - driver = transport._driver - target = messaging.Target(topic='testtopic') - listener = driver.listen(target) - received = listener.poll(timeout=0.050) - self.assertIsNone(received) - - -class TestRacyWaitForReply(test_utils.BaseTestCase): - - def test_send_receive(self): - transport = messaging.get_transport(self.conf, 'kombu+memory:////') - self.addCleanup(transport.cleanup) - - driver = transport._driver - - target = messaging.Target(topic='testtopic') - - listener = driver.listen(target) - - senders = [] - replies = [] - msgs = [] - - wait_conditions = [] - orig_reply_waiter = amqpdriver.ReplyWaiter.wait - - def reply_waiter(self, msg_id, timeout): - if wait_conditions: - cond = wait_conditions.pop() - with cond: - cond.notify() - with cond: - cond.wait() - return orig_reply_waiter(self, msg_id, timeout) - - self.stubs.Set(amqpdriver.ReplyWaiter, 'wait', reply_waiter) - - def send_and_wait_for_reply(i, wait_for_reply): - replies.append(driver.send(target, - {}, - {'tx_id': i}, - wait_for_reply=wait_for_reply, - timeout=None)) - - while len(senders) < 2: - t = threading.Thread(target=send_and_wait_for_reply, - args=(len(senders), True)) - t.daemon = True - senders.append(t) - - # test the case then msg_id is not set - t = threading.Thread(target=send_and_wait_for_reply, - args=(len(senders), False)) - t.daemon = True - senders.append(t) - - # Start the first guy, receive his message, but delay his polling - notify_condition = threading.Condition() - wait_conditions.append(notify_condition) - with notify_condition: - senders[0].start() - notify_condition.wait() - - msgs.append(listener.poll()) - self.assertEqual({'tx_id': 0}, msgs[-1].message) - - # Start the second guy, receive his message - senders[1].start() - - msgs.append(listener.poll()) - self.assertEqual({'tx_id': 1}, msgs[-1].message) - - # Reply to both in order, making the second thread queue - # the reply meant for the first thread - msgs[0].reply({'rx_id': 0}) - msgs[1].reply({'rx_id': 1}) - - # Wait for the second thread to finish - senders[1].join() - - # Start the 3rd guy, receive his message - senders[2].start() - - msgs.append(listener.poll()) - self.assertEqual({'tx_id': 2}, msgs[-1].message) - - # Verify the _send_reply was not invoked by driver: - with mock.patch.object(msgs[2], '_send_reply') as method: - msgs[2].reply({'rx_id': 2}) - self.assertEqual(method.call_count, 0) - - # Wait for the 3rd thread to finish - senders[2].join() - - # Let the first thread continue - with notify_condition: - notify_condition.notify() - - # Wait for the first thread to finish - senders[0].join() - - # Verify replies were received out of order - self.assertEqual(len(senders), len(replies)) - self.assertEqual({'rx_id': 1}, replies[0]) - self.assertIsNone(replies[1]) - self.assertEqual({'rx_id': 0}, replies[2]) - - -def _declare_queue(target): - connection = kombu.connection.BrokerConnection(transport='memory') - - # Kludge to speed up tests. - connection.transport.polling_interval = 0.0 - - connection.connect() - channel = connection.channel() - - # work around 'memory' transport bug in 1.1.3 - channel._new_queue('ae.undeliver') - - if target.fanout: - exchange = kombu.entity.Exchange(name=target.topic + '_fanout', - type='fanout', - durable=False, - auto_delete=True) - queue = kombu.entity.Queue(name=target.topic + '_fanout_12345', - channel=channel, - exchange=exchange, - routing_key=target.topic) - if target.server: - exchange = kombu.entity.Exchange(name='openstack', - type='topic', - durable=False, - auto_delete=False) - topic = '%s.%s' % (target.topic, target.server) - queue = kombu.entity.Queue(name=topic, - channel=channel, - exchange=exchange, - routing_key=topic) - else: - exchange = kombu.entity.Exchange(name='openstack', - type='topic', - durable=False, - auto_delete=False) - queue = kombu.entity.Queue(name=target.topic, - channel=channel, - exchange=exchange, - routing_key=target.topic) - - queue.declare() - - return connection, channel, queue - - -class TestRequestWireFormat(test_utils.BaseTestCase): - - _target = [ - ('topic_target', - dict(topic='testtopic', server=None, fanout=False)), - ('server_target', - dict(topic='testtopic', server='testserver', fanout=False)), - # NOTE(markmc): https://github.com/celery/kombu/issues/195 - ('fanout_target', - dict(topic='testtopic', server=None, fanout=True, - skip_msg='Requires kombu>2.5.12 to fix kombu issue #195')), - ] - - _msg = [ - ('empty_msg', - dict(msg={}, expected={})), - ('primitive_msg', - dict(msg={'foo': 'bar'}, expected={'foo': 'bar'})), - ('complex_msg', - dict(msg={'a': {'b': datetime.datetime(1920, 2, 3, 4, 5, 6, 7)}}, - expected={'a': {'b': '1920-02-03T04:05:06.000007'}})), - ] - - _context = [ - ('empty_ctxt', dict(ctxt={}, expected_ctxt={})), - ('user_project_ctxt', - dict(ctxt={'user': 'mark', 'project': 'snarkybunch'}, - expected_ctxt={'_context_user': 'mark', - '_context_project': 'snarkybunch'})), - ] - - @classmethod - def generate_scenarios(cls): - cls.scenarios = testscenarios.multiply_scenarios(cls._msg, - cls._context, - cls._target) - - def setUp(self): - super(TestRequestWireFormat, self).setUp() - self.uuids = [] - self.orig_uuid4 = uuid.uuid4 - self.useFixture(fixtures.MonkeyPatch('uuid.uuid4', self.mock_uuid4)) - - def mock_uuid4(self): - self.uuids.append(self.orig_uuid4()) - return self.uuids[-1] - - def test_request_wire_format(self): - if hasattr(self, 'skip_msg'): - self.skipTest(self.skip_msg) - - transport = messaging.get_transport(self.conf, 'kombu+memory:////') - self.addCleanup(transport.cleanup) - - driver = transport._driver - - target = messaging.Target(topic=self.topic, - server=self.server, - fanout=self.fanout) - - connection, channel, queue = _declare_queue(target) - self.addCleanup(connection.release) - - driver.send(target, self.ctxt, self.msg) - - msgs = [] - - def callback(msg): - msg = channel.message_to_python(msg) - msg.ack() - msgs.append(msg.payload) - - queue.consume(callback=callback, - consumer_tag='1', - nowait=False) - - connection.drain_events() - - self.assertEqual(1, len(msgs)) - self.assertIn('oslo.message', msgs[0]) - - received = msgs[0] - received['oslo.message'] = jsonutils.loads(received['oslo.message']) - - # FIXME(markmc): add _msg_id and _reply_q check - expected_msg = { - '_unique_id': self.uuids[0].hex, - } - expected_msg.update(self.expected) - expected_msg.update(self.expected_ctxt) - - expected = { - 'oslo.version': '2.0', - 'oslo.message': expected_msg, - } - - self.assertEqual(expected, received) - - -TestRequestWireFormat.generate_scenarios() - - -def _create_producer(target): - connection = kombu.connection.BrokerConnection(transport='memory') - - # Kludge to speed up tests. - connection.transport.polling_interval = 0.0 - - connection.connect() - channel = connection.channel() - - # work around 'memory' transport bug in 1.1.3 - channel._new_queue('ae.undeliver') - - if target.fanout: - exchange = kombu.entity.Exchange(name=target.topic + '_fanout', - type='fanout', - durable=False, - auto_delete=True) - producer = kombu.messaging.Producer(exchange=exchange, - channel=channel, - routing_key=target.topic) - elif target.server: - exchange = kombu.entity.Exchange(name='openstack', - type='topic', - durable=False, - auto_delete=False) - topic = '%s.%s' % (target.topic, target.server) - producer = kombu.messaging.Producer(exchange=exchange, - channel=channel, - routing_key=topic) - else: - exchange = kombu.entity.Exchange(name='openstack', - type='topic', - durable=False, - auto_delete=False) - producer = kombu.messaging.Producer(exchange=exchange, - channel=channel, - routing_key=target.topic) - - return connection, producer - - -class TestReplyWireFormat(test_utils.BaseTestCase): - - _target = [ - ('topic_target', - dict(topic='testtopic', server=None, fanout=False)), - ('server_target', - dict(topic='testtopic', server='testserver', fanout=False)), - # NOTE(markmc): https://github.com/celery/kombu/issues/195 - ('fanout_target', - dict(topic='testtopic', server=None, fanout=True, - skip_msg='Requires kombu>2.5.12 to fix kombu issue #195')), - ] - - _msg = [ - ('empty_msg', - dict(msg={}, expected={})), - ('primitive_msg', - dict(msg={'foo': 'bar'}, expected={'foo': 'bar'})), - ('complex_msg', - dict(msg={'a': {'b': '1920-02-03T04:05:06.000007'}}, - expected={'a': {'b': '1920-02-03T04:05:06.000007'}})), - ] - - _context = [ - ('empty_ctxt', dict(ctxt={}, expected_ctxt={})), - ('user_project_ctxt', - dict(ctxt={'_context_user': 'mark', - '_context_project': 'snarkybunch'}, - expected_ctxt={'user': 'mark', 'project': 'snarkybunch'})), - ] - - @classmethod - def generate_scenarios(cls): - cls.scenarios = testscenarios.multiply_scenarios(cls._msg, - cls._context, - cls._target) - - def test_reply_wire_format(self): - if hasattr(self, 'skip_msg'): - self.skipTest(self.skip_msg) - - transport = messaging.get_transport(self.conf, 'kombu+memory:////') - self.addCleanup(transport.cleanup) - - driver = transport._driver - - target = messaging.Target(topic=self.topic, - server=self.server, - fanout=self.fanout) - - listener = driver.listen(target) - - connection, producer = _create_producer(target) - self.addCleanup(connection.release) - - msg = { - 'oslo.version': '2.0', - 'oslo.message': {} - } - - msg['oslo.message'].update(self.msg) - msg['oslo.message'].update(self.ctxt) - - msg['oslo.message'].update({ - '_msg_id': uuid.uuid4().hex, - '_unique_id': uuid.uuid4().hex, - '_reply_q': 'reply_' + uuid.uuid4().hex, - }) - - msg['oslo.message'] = jsonutils.dumps(msg['oslo.message']) - - producer.publish(msg) - - received = listener.poll() - self.assertIsNotNone(received) - self.assertEqual(self.expected_ctxt, received.ctxt) - self.assertEqual(self.expected, received.message) - - -TestReplyWireFormat.generate_scenarios() - - -class RpcKombuHATestCase(test_utils.BaseTestCase): - - def setUp(self): - super(RpcKombuHATestCase, self).setUp() - self.brokers = ['host1', 'host2', 'host3', 'host4', 'host5'] - self.config(rabbit_hosts=self.brokers, - rabbit_retry_interval=0.01, - rabbit_retry_backoff=0.01, - kombu_reconnect_delay=0, - group="oslo_messaging_rabbit") - - self.kombu_connect = mock.Mock() - self.useFixture(mockpatch.Patch( - 'kombu.connection.Connection.connect', - side_effect=self.kombu_connect)) - self.useFixture(mockpatch.Patch( - 'kombu.connection.Connection.channel')) - - # starting from the first broker in the list - url = messaging.TransportURL.parse(self.conf, None) - self.connection = rabbit_driver.Connection(self.conf, url, - amqp.PURPOSE_SEND) - self.addCleanup(self.connection.close) - - def test_ensure_four_retry(self): - mock_callback = mock.Mock(side_effect=IOError) - self.assertRaises(messaging.MessageDeliveryFailure, - self.connection.ensure, mock_callback, - retry=4) - self.assertEqual(5, self.kombu_connect.call_count) - self.assertEqual(6, mock_callback.call_count) - - def test_ensure_one_retry(self): - mock_callback = mock.Mock(side_effect=IOError) - self.assertRaises(messaging.MessageDeliveryFailure, - self.connection.ensure, mock_callback, - retry=1) - self.assertEqual(2, self.kombu_connect.call_count) - self.assertEqual(3, mock_callback.call_count) - - def test_ensure_no_retry(self): - mock_callback = mock.Mock(side_effect=IOError) - self.assertRaises(messaging.MessageDeliveryFailure, - self.connection.ensure, mock_callback, - retry=0) - self.assertEqual(1, self.kombu_connect.call_count) - self.assertEqual(2, mock_callback.call_count) diff --git a/tests/drivers/test_impl_zmq.py b/tests/drivers/test_impl_zmq.py deleted file mode 100644 index d191ae64c..000000000 --- a/tests/drivers/test_impl_zmq.py +++ /dev/null @@ -1,228 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import threading - -import fixtures -import testtools - -import oslo_messaging -from oslo_messaging._drivers import impl_zmq -from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._i18n import _ -from oslo_messaging.tests import utils as test_utils - -LOG = logging.getLogger(__name__) - -zmq = zmq_async.import_zmq() - - -class TestRPCServerListener(object): - - def __init__(self, driver): - self.driver = driver - self.target = None - self.listener = None - self.executor = zmq_async.get_executor(self._run) - self._stop = threading.Event() - self._received = threading.Event() - self.message = None - - def listen(self, target): - self.target = target - self.listener = self.driver.listen(self.target) - self.executor.execute() - - def _run(self): - try: - message = self.listener.poll() - if message is not None: - self._received.set() - self.message = message - message.reply(reply=True) - except Exception: - LOG.exception(_("Unexpected exception occurred.")) - - def stop(self): - self.executor.stop() - - -class ZmqBaseTestCase(test_utils.BaseTestCase): - """Base test case for all ZMQ tests """ - - @testtools.skipIf(zmq is None, "zmq not available") - def setUp(self): - super(ZmqBaseTestCase, self).setUp() - self.messaging_conf.transport_driver = 'zmq' - - # Set config values - self.internal_ipc_dir = self.useFixture(fixtures.TempDir()).path - kwargs = {'rpc_zmq_bind_address': '127.0.0.1', - 'rpc_zmq_host': '127.0.0.1', - 'rpc_response_timeout': 5, - 'rpc_zmq_ipc_dir': self.internal_ipc_dir, - 'rpc_zmq_matchmaker': 'dummy'} - self.config(**kwargs) - - # Get driver - transport = oslo_messaging.get_transport(self.conf) - self.driver = transport._driver - - self.listener = TestRPCServerListener(self.driver) - - self.addCleanup(stopRpc(self.__dict__)) - - -class TestConfZmqDriverLoad(test_utils.BaseTestCase): - - @testtools.skipIf(zmq is None, "zmq not available") - def setUp(self): - super(TestConfZmqDriverLoad, self).setUp() - self.messaging_conf.transport_driver = 'zmq' - - def test_driver_load(self): - transport = oslo_messaging.get_transport(self.conf) - self.assertIsInstance(transport._driver, impl_zmq.ZmqDriver) - - -class stopRpc(object): - def __init__(self, attrs): - self.attrs = attrs - - def __call__(self): - if self.attrs['driver']: - self.attrs['driver'].cleanup() - if self.attrs['listener']: - self.attrs['listener'].stop() - - -class TestZmqBasics(ZmqBaseTestCase): - - def test_send_receive_raises(self): - """Call() without method.""" - target = oslo_messaging.Target(topic='testtopic') - self.listener.listen(target) - self.assertRaises( - KeyError, - self.driver.send, - target, {}, {'tx_id': 1}, wait_for_reply=True) - - def test_send_receive_topic(self): - """Call() with topic.""" - - target = oslo_messaging.Target(topic='testtopic') - self.listener.listen(target) - result = self.driver.send( - target, {}, - {'method': 'hello-world', 'tx_id': 1}, - wait_for_reply=True) - self.assertTrue(result) - - def test_send_noreply(self): - """Cast() with topic.""" - - target = oslo_messaging.Target(topic='testtopic', server="my@server") - self.listener.listen(target) - result = self.driver.send( - target, {}, - {'method': 'hello-world', 'tx_id': 1}, - wait_for_reply=False) - - self.listener._received.wait() - - self.assertIsNone(result) - self.assertEqual(True, self.listener._received.isSet()) - method = self.listener.message.message[u'method'] - self.assertEqual(u'hello-world', method) - - def test_send_fanout(self): - target = oslo_messaging.Target(topic='testtopic', fanout=True) - self.listener.listen(target) - - result = self.driver.send( - target, {}, - {'method': 'hello-world', 'tx_id': 1}, - wait_for_reply=False) - - self.listener._received.wait() - - self.assertIsNone(result) - self.assertEqual(True, self.listener._received.isSet()) - method = self.listener.message.message[u'method'] - self.assertEqual(u'hello-world', method) - - def test_send_receive_direct(self): - """Call() without topic.""" - - target = oslo_messaging.Target(server='127.0.0.1') - self.listener.listen(target) - message = {'method': 'hello-world', 'tx_id': 1} - context = {} - result = self.driver.send(target, context, message, - wait_for_reply=True) - self.assertTrue(result) - - -class TestPoller(test_utils.BaseTestCase): - - def setUp(self): - super(TestPoller, self).setUp() - self.poller = zmq_async.get_poller() - self.ctx = zmq.Context() - self.internal_ipc_dir = self.useFixture(fixtures.TempDir()).path - self.ADDR_REQ = "ipc://%s/request1" % self.internal_ipc_dir - - def test_poll_blocking(self): - - rep = self.ctx.socket(zmq.REP) - rep.bind(self.ADDR_REQ) - - reply_poller = zmq_async.get_reply_poller() - reply_poller.register(rep) - - def listener(): - incoming, socket = reply_poller.poll() - self.assertEqual(b'Hello', incoming[0]) - socket.send_string('Reply') - reply_poller.resume_polling(socket) - - executor = zmq_async.get_executor(listener) - executor.execute() - - req1 = self.ctx.socket(zmq.REQ) - req1.connect(self.ADDR_REQ) - - req2 = self.ctx.socket(zmq.REQ) - req2.connect(self.ADDR_REQ) - - req1.send_string('Hello') - req2.send_string('Hello') - - reply = req1.recv_string() - self.assertEqual('Reply', reply) - - reply = req2.recv_string() - self.assertEqual('Reply', reply) - - def test_poll_timeout(self): - rep = self.ctx.socket(zmq.REP) - rep.bind(self.ADDR_REQ) - - reply_poller = zmq_async.get_reply_poller() - reply_poller.register(rep) - - incoming, socket = reply_poller.poll(1) - self.assertIsNone(incoming) - self.assertIsNone(socket) diff --git a/tests/drivers/test_matchmaker.py b/tests/drivers/test_matchmaker.py deleted file mode 100644 index fe59fef15..000000000 --- a/tests/drivers/test_matchmaker.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2014 Canonical, Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import testtools - -from oslo_messaging.tests import utils as test_utils -from oslo_utils import importutils - -# NOTE(jamespage) matchmaker tied directly to eventlet -# which is not yet py3 compatible - skip if import fails -matchmaker = ( - importutils.try_import('oslo.messaging._drivers.matchmaker')) - - -@testtools.skipIf(not matchmaker, "matchmaker/eventlet unavailable") -class MatchmakerTest(test_utils.BaseTestCase): - - def test_fanout_binding(self): - matcher = matchmaker.MatchMakerBase() - matcher.add_binding( - matchmaker.FanoutBinding(), matchmaker.DirectExchange()) - self.assertEqual(matcher.queues('hello.world'), []) - self.assertEqual( - matcher.queues('fanout~fantasy.unicorn'), - [('fanout~fantasy.unicorn', 'unicorn')]) - self.assertEqual( - matcher.queues('fanout~fantasy.pony'), - [('fanout~fantasy.pony', 'pony')]) - - def test_topic_binding(self): - matcher = matchmaker.MatchMakerBase() - matcher.add_binding( - matchmaker.TopicBinding(), matchmaker.StubExchange()) - self.assertEqual( - matcher.queues('hello-world'), [('hello-world', None)]) - - def test_direct_binding(self): - matcher = matchmaker.MatchMakerBase() - matcher.add_binding( - matchmaker.DirectBinding(), matchmaker.StubExchange()) - self.assertEqual( - matcher.queues('hello.server'), [('hello.server', None)]) - self.assertEqual(matcher.queues('hello-world'), []) - - def test_localhost_match(self): - matcher = matchmaker.MatchMakerLocalhost() - self.assertEqual( - matcher.queues('hello.server'), [('hello.server', 'server')]) - - # Gets remapped due to localhost exchange - # all bindings default to first match. - self.assertEqual( - matcher.queues('fanout~testing.server'), - [('fanout~testing.localhost', 'localhost')]) - - self.assertEqual( - matcher.queues('hello-world'), - [('hello-world.localhost', 'localhost')]) diff --git a/tests/drivers/test_matchmaker_redis.py b/tests/drivers/test_matchmaker_redis.py deleted file mode 100644 index 35a8c1464..000000000 --- a/tests/drivers/test_matchmaker_redis.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright 2014 Canonical, Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import testtools - -from oslo_messaging.tests import utils as test_utils -from oslo_utils import importutils - -redis = importutils.try_import('redis') -matchmaker_redis = ( - importutils.try_import('oslo.messaging._drivers.matchmaker_redis')) - - -def redis_available(): - '''Helper to see if local redis server is running''' - if not redis: - return False - try: - c = redis.StrictRedis(socket_timeout=1) - c.ping() - return True - except redis.exceptions.ConnectionError: - return False - - -@testtools.skipIf(not matchmaker_redis, "matchmaker/eventlet unavailable") -@testtools.skipIf(not redis_available(), "redis unavailable") -class RedisMatchMakerTest(test_utils.BaseTestCase): - - def setUp(self): - super(RedisMatchMakerTest, self).setUp() - self.ring_data = { - "conductor": ["controller1", "node1", "node2", "node3"], - "scheduler": ["controller1", "node1", "node2", "node3"], - "network": ["controller1", "node1", "node2", "node3"], - "cert": ["controller1"], - "console": ["controller1"], - "l3_agent.node1": ["node1"], - "consoleauth": ["controller1"]} - self.matcher = matchmaker_redis.MatchMakerRedis() - self.populate() - - def tearDown(self): - super(RedisMatchMakerTest, self).tearDown() - c = redis.StrictRedis() - c.flushdb() - - def populate(self): - for k, hosts in self.ring_data.items(): - for h in hosts: - self.matcher.register(k, h) - - def test_direct(self): - self.assertEqual( - self.matcher.queues('cert.controller1'), - [('cert.controller1', 'controller1')]) - - def test_register(self): - self.matcher.register('cert', 'keymaster') - self.assertEqual( - sorted(self.matcher.redis.smembers('cert')), - ['cert.controller1', 'cert.keymaster']) - self.matcher.register('l3_agent.node1', 'node1') - self.assertEqual( - sorted(self.matcher.redis.smembers('l3_agent.node1')), - ['l3_agent.node1.node1']) - - def test_unregister(self): - self.matcher.unregister('conductor', 'controller1') - self.assertEqual( - sorted(self.matcher.redis.smembers('conductor')), - ['conductor.node1', 'conductor.node2', 'conductor.node3']) diff --git a/tests/drivers/test_matchmaker_ring.py b/tests/drivers/test_matchmaker_ring.py deleted file mode 100644 index 010746472..000000000 --- a/tests/drivers/test_matchmaker_ring.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2014 Canonical, Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import testtools - -from oslo_messaging.tests import utils as test_utils -from oslo_utils import importutils - -# NOTE(jamespage) matchmaker tied directly to eventlet -# which is not yet py3 compatible - skip if import fails -matchmaker_ring = ( - importutils.try_import('oslo.messaging._drivers.matchmaker_ring')) - - -@testtools.skipIf(not matchmaker_ring, "matchmaker/eventlet unavailable") -class MatchmakerRingTest(test_utils.BaseTestCase): - - def setUp(self): - super(MatchmakerRingTest, self).setUp() - self.ring_data = { - "conductor": ["controller1", "node1", "node2", "node3"], - "scheduler": ["controller1", "node1", "node2", "node3"], - "network": ["controller1", "node1", "node2", "node3"], - "cert": ["controller1"], - "console": ["controller1"], - "consoleauth": ["controller1"]} - self.matcher = matchmaker_ring.MatchMakerRing(self.ring_data) - - def test_direct(self): - self.assertEqual( - self.matcher.queues('cert.controller1'), - [('cert.controller1', 'controller1')]) - self.assertEqual( - self.matcher.queues('conductor.node1'), - [('conductor.node1', 'node1')]) - - def test_fanout(self): - self.assertEqual( - self.matcher.queues('fanout~conductor'), - [('fanout~conductor.controller1', 'controller1'), - ('fanout~conductor.node1', 'node1'), - ('fanout~conductor.node2', 'node2'), - ('fanout~conductor.node3', 'node3')]) - - def test_bare_topic(self): - # Round robins through the hosts on the topic - self.assertEqual( - self.matcher.queues('scheduler'), - [('scheduler.controller1', 'controller1')]) - self.assertEqual( - self.matcher.queues('scheduler'), - [('scheduler.node1', 'node1')]) - self.assertEqual( - self.matcher.queues('scheduler'), - [('scheduler.node2', 'node2')]) - self.assertEqual( - self.matcher.queues('scheduler'), - [('scheduler.node3', 'node3')]) - # Cycles loop - self.assertEqual( - self.matcher.queues('scheduler'), - [('scheduler.controller1', 'controller1')]) diff --git a/tests/notify/__init__.py b/tests/notify/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/notify/test_dispatcher.py b/tests/notify/test_dispatcher.py deleted file mode 100644 index 5c61840a8..000000000 --- a/tests/notify/test_dispatcher.py +++ /dev/null @@ -1,171 +0,0 @@ - -# Copyright 2013 eNovance -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import itertools - -from oslo_utils import timeutils -import testscenarios - -from oslo import messaging -from oslo.messaging.notify import dispatcher as notify_dispatcher -from oslo_messaging.tests import utils as test_utils -from six.moves import mock - -load_tests = testscenarios.load_tests_apply_scenarios - - -notification_msg = dict( - publisher_id="publisher_id", - event_type="compute.start", - payload={"info": "fuu"}, - message_id="uuid", - timestamp=str(timeutils.utcnow()) -) - - -class TestDispatcherScenario(test_utils.BaseTestCase): - - scenarios = [ - ('no_endpoints', - dict(endpoints=[], - endpoints_expect_calls=[], - priority='info', - ex=None, - return_value=messaging.NotificationResult.HANDLED)), - ('one_endpoints', - dict(endpoints=[['warn']], - endpoints_expect_calls=['warn'], - priority='warn', - ex=None, - return_value=messaging.NotificationResult.HANDLED)), - ('two_endpoints_only_one_match', - dict(endpoints=[['warn'], ['info']], - endpoints_expect_calls=[None, 'info'], - priority='info', - ex=None, - return_value=messaging.NotificationResult.HANDLED)), - ('two_endpoints_both_match', - dict(endpoints=[['debug', 'info'], ['info', 'debug']], - endpoints_expect_calls=['debug', 'debug'], - priority='debug', - ex=None, - return_value=messaging.NotificationResult.HANDLED)), - ('no_return_value', - dict(endpoints=[['warn']], - endpoints_expect_calls=['warn'], - priority='warn', - ex=None, return_value=None)), - ('requeue', - dict(endpoints=[['debug', 'warn']], - endpoints_expect_calls=['debug'], - priority='debug', msg=notification_msg, - ex=None, - return_value=messaging.NotificationResult.REQUEUE)), - ('exception', - dict(endpoints=[['debug', 'warn']], - endpoints_expect_calls=['debug'], - priority='debug', msg=notification_msg, - ex=Exception, - return_value=messaging.NotificationResult.HANDLED)), - ] - - def test_dispatcher(self): - endpoints = [] - for endpoint_methods in self.endpoints: - e = mock.Mock(spec=endpoint_methods) - endpoints.append(e) - for m in endpoint_methods: - method = getattr(e, m) - if self.ex: - method.side_effect = self.ex() - else: - method.return_value = self.return_value - - msg = notification_msg.copy() - msg['priority'] = self.priority - - targets = [messaging.Target(topic='notifications')] - dispatcher = notify_dispatcher.NotificationDispatcher( - targets, endpoints, None, allow_requeue=True, pool=None) - - # check it listen on wanted topics - self.assertEqual(sorted(set((targets[0], prio) - for prio in itertools.chain.from_iterable( - self.endpoints))), - sorted(dispatcher._targets_priorities)) - - incoming = mock.Mock(ctxt={}, message=msg) - with dispatcher(incoming) as callback: - callback() - - # check endpoint callbacks are called or not - for i, endpoint_methods in enumerate(self.endpoints): - for m in endpoint_methods: - if m == self.endpoints_expect_calls[i]: - method = getattr(endpoints[i], m) - method.assert_called_once_with( - {}, - msg['publisher_id'], - msg['event_type'], - msg['payload'], { - 'timestamp': mock.ANY, - 'message_id': mock.ANY - }) - else: - self.assertEqual(0, endpoints[i].call_count) - - if self.ex: - self.assertEqual(1, incoming.acknowledge.call_count) - self.assertEqual(0, incoming.requeue.call_count) - elif self.return_value == messaging.NotificationResult.HANDLED \ - or self.return_value is None: - self.assertEqual(1, incoming.acknowledge.call_count) - self.assertEqual(0, incoming.requeue.call_count) - elif self.return_value == messaging.NotificationResult.REQUEUE: - self.assertEqual(0, incoming.acknowledge.call_count) - self.assertEqual(1, incoming.requeue.call_count) - - -class TestDispatcher(test_utils.BaseTestCase): - - @mock.patch('oslo_messaging.notify.dispatcher.LOG') - def test_dispatcher_unknown_prio(self, mylog): - msg = notification_msg.copy() - msg['priority'] = 'what???' - dispatcher = notify_dispatcher.NotificationDispatcher( - [mock.Mock()], [mock.Mock()], None, allow_requeue=True, pool=None) - with dispatcher(mock.Mock(ctxt={}, message=msg)) as callback: - callback() - mylog.warning.assert_called_once_with('Unknown priority "%s"', - 'what???') - - def test_dispatcher_executor_callback(self): - endpoint = mock.Mock(spec=['warn']) - endpoint_method = endpoint.warn - endpoint_method.return_value = messaging.NotificationResult.HANDLED - - targets = [messaging.Target(topic='notifications')] - dispatcher = notify_dispatcher.NotificationDispatcher( - targets, [endpoint], None, allow_requeue=True) - - msg = notification_msg.copy() - msg['priority'] = 'warn' - - incoming = mock.Mock(ctxt={}, message=msg) - executor_callback = mock.Mock() - with dispatcher(incoming, executor_callback) as callback: - callback() - self.assertTrue(executor_callback.called) - self.assertEqual(executor_callback.call_args[0][0], endpoint_method) diff --git a/tests/notify/test_listener.py b/tests/notify/test_listener.py deleted file mode 100644 index 84e257d16..000000000 --- a/tests/notify/test_listener.py +++ /dev/null @@ -1,411 +0,0 @@ - -# Copyright 2013 eNovance -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import threading -import time - -import testscenarios - -from oslo import messaging -from oslo.messaging.notify import dispatcher -from oslo_config import cfg -from oslo_messaging.tests import utils as test_utils -from six.moves import mock - -load_tests = testscenarios.load_tests_apply_scenarios - - -class RestartableServerThread(object): - def __init__(self, server): - self.server = server - self.thread = None - - def start(self): - if self.thread is None: - self.thread = threading.Thread(target=self.server.start) - self.thread.daemon = True - self.thread.start() - - def stop(self): - if self.thread is not None: - # Check start() does nothing with a running listener - self.server.start() - self.server.stop() - self.server.wait() - self.thread.join(timeout=15) - ret = self.thread.isAlive() - self.thread = None - return ret - return True - - -class ListenerSetupMixin(object): - - class ThreadTracker(object): - def __init__(self): - self._received_msgs = 0 - self.threads = [] - self.lock = threading.Lock() - - def info(self, ctxt, publisher_id, event_type, payload, metadata): - # NOTE(sileht): this run into an other thread - with self.lock: - self._received_msgs += 1 - - def wait_for_messages(self, expect_messages): - while self._received_msgs < expect_messages: - time.sleep(0.01) - - def stop(self): - for thread in self.threads: - thread.stop() - self.threads = [] - - def start(self, thread): - self.threads.append(thread) - thread.start() - - def setUp(self): - self.trackers = {} - self.addCleanup(self._stop_trackers) - - def _stop_trackers(self): - for pool in self.trackers: - self.trackers[pool].stop() - self.trackers = {} - - def _setup_listener(self, transport, endpoints, - targets=None, pool=None): - - if pool is None: - tracker_name = '__default__' - else: - tracker_name = pool - - if targets is None: - targets = [messaging.Target(topic='testtopic')] - - tracker = self.trackers.setdefault( - tracker_name, self.ThreadTracker()) - listener = messaging.get_notification_listener( - transport, targets=targets, endpoints=[tracker] + endpoints, - allow_requeue=True, pool=pool) - - thread = RestartableServerThread(listener) - tracker.start(thread) - return thread - - def wait_for_messages(self, expect_messages, tracker_name='__default__'): - self.trackers[tracker_name].wait_for_messages(expect_messages) - - def _setup_notifier(self, transport, topic='testtopic', - publisher_id='testpublisher'): - return messaging.Notifier(transport, topic=topic, - driver='messaging', - publisher_id=publisher_id) - - -class TestNotifyListener(test_utils.BaseTestCase, ListenerSetupMixin): - - def __init__(self, *args): - super(TestNotifyListener, self).__init__(*args) - ListenerSetupMixin.__init__(self) - - def setUp(self): - super(TestNotifyListener, self).setUp(conf=cfg.ConfigOpts()) - ListenerSetupMixin.setUp(self) - - def test_constructor(self): - transport = messaging.get_transport(self.conf, url='fake:') - target = messaging.Target(topic='foo') - endpoints = [object()] - - listener = messaging.get_notification_listener(transport, [target], - endpoints) - - self.assertIs(listener.conf, self.conf) - self.assertIs(listener.transport, transport) - self.assertIsInstance(listener.dispatcher, - dispatcher.NotificationDispatcher) - self.assertIs(listener.dispatcher.endpoints, endpoints) - self.assertEqual('blocking', listener.executor) - - def test_no_target_topic(self): - transport = messaging.get_transport(self.conf, url='fake:') - - listener = messaging.get_notification_listener(transport, - [messaging.Target()], - [mock.Mock()]) - try: - listener.start() - except Exception as ex: - self.assertIsInstance(ex, messaging.InvalidTarget, ex) - else: - self.assertTrue(False) - - def test_unknown_executor(self): - transport = messaging.get_transport(self.conf, url='fake:') - - try: - messaging.get_notification_listener(transport, [], [], - executor='foo') - except Exception as ex: - self.assertIsInstance(ex, messaging.ExecutorLoadFailure) - self.assertEqual('foo', ex.executor) - else: - self.assertTrue(False) - - def test_one_topic(self): - transport = messaging.get_transport(self.conf, url='fake:') - - endpoint = mock.Mock() - endpoint.info.return_value = None - listener_thread = self._setup_listener(transport, [endpoint]) - - notifier = self._setup_notifier(transport) - notifier.info({}, 'an_event.start', 'test message') - - self.wait_for_messages(1) - self.assertFalse(listener_thread.stop()) - - endpoint.info.assert_called_once_with( - {}, 'testpublisher', 'an_event.start', 'test message', - {'message_id': mock.ANY, 'timestamp': mock.ANY}) - - def test_two_topics(self): - transport = messaging.get_transport(self.conf, url='fake:') - - endpoint = mock.Mock() - endpoint.info.return_value = None - targets = [messaging.Target(topic="topic1"), - messaging.Target(topic="topic2")] - listener_thread = self._setup_listener(transport, [endpoint], - targets=targets) - notifier = self._setup_notifier(transport, topic='topic1') - notifier.info({'ctxt': '1'}, 'an_event.start1', 'test') - notifier = self._setup_notifier(transport, topic='topic2') - notifier.info({'ctxt': '2'}, 'an_event.start2', 'test') - - self.wait_for_messages(2) - self.assertFalse(listener_thread.stop()) - - endpoint.info.assert_has_calls([ - mock.call({'ctxt': '1'}, 'testpublisher', - 'an_event.start1', 'test', - {'timestamp': mock.ANY, 'message_id': mock.ANY}), - mock.call({'ctxt': '2'}, 'testpublisher', - 'an_event.start2', 'test', - {'timestamp': mock.ANY, 'message_id': mock.ANY})], - any_order=True) - - def test_two_exchanges(self): - transport = messaging.get_transport(self.conf, url='fake:') - - endpoint = mock.Mock() - endpoint.info.return_value = None - targets = [messaging.Target(topic="topic", - exchange="exchange1"), - messaging.Target(topic="topic", - exchange="exchange2")] - listener_thread = self._setup_listener(transport, [endpoint], - targets=targets) - - notifier = self._setup_notifier(transport, topic="topic") - - def mock_notifier_exchange(name): - def side_effect(target, ctxt, message, version, retry): - target.exchange = name - return transport._driver.send_notification(target, ctxt, - message, version, - retry=retry) - transport._send_notification = mock.MagicMock( - side_effect=side_effect) - - notifier.info({'ctxt': '0'}, - 'an_event.start', 'test message default exchange') - mock_notifier_exchange('exchange1') - notifier.info({'ctxt': '1'}, - 'an_event.start', 'test message exchange1') - mock_notifier_exchange('exchange2') - notifier.info({'ctxt': '2'}, - 'an_event.start', 'test message exchange2') - - self.wait_for_messages(2) - self.assertFalse(listener_thread.stop()) - - endpoint.info.assert_has_calls([ - mock.call({'ctxt': '1'}, 'testpublisher', 'an_event.start', - 'test message exchange1', - {'timestamp': mock.ANY, 'message_id': mock.ANY}), - mock.call({'ctxt': '2'}, 'testpublisher', 'an_event.start', - 'test message exchange2', - {'timestamp': mock.ANY, 'message_id': mock.ANY})], - any_order=True) - - def test_two_endpoints(self): - transport = messaging.get_transport(self.conf, url='fake:') - - endpoint1 = mock.Mock() - endpoint1.info.return_value = None - endpoint2 = mock.Mock() - endpoint2.info.return_value = messaging.NotificationResult.HANDLED - listener_thread = self._setup_listener(transport, - [endpoint1, endpoint2]) - notifier = self._setup_notifier(transport) - notifier.info({}, 'an_event.start', 'test') - - self.wait_for_messages(1) - self.assertFalse(listener_thread.stop()) - - endpoint1.info.assert_called_once_with( - {}, 'testpublisher', 'an_event.start', 'test', { - 'timestamp': mock.ANY, - 'message_id': mock.ANY}) - - endpoint2.info.assert_called_once_with( - {}, 'testpublisher', 'an_event.start', 'test', { - 'timestamp': mock.ANY, - 'message_id': mock.ANY}) - - def test_requeue(self): - transport = messaging.get_transport(self.conf, url='fake:') - endpoint = mock.Mock() - endpoint.info = mock.Mock() - - def side_effect_requeue(*args, **kwargs): - if endpoint.info.call_count == 1: - return messaging.NotificationResult.REQUEUE - return messaging.NotificationResult.HANDLED - - endpoint.info.side_effect = side_effect_requeue - listener_thread = self._setup_listener(transport, [endpoint]) - notifier = self._setup_notifier(transport) - notifier.info({}, 'an_event.start', 'test') - - self.wait_for_messages(2) - self.assertFalse(listener_thread.stop()) - - endpoint.info.assert_has_calls([ - mock.call({}, 'testpublisher', 'an_event.start', 'test', - {'timestamp': mock.ANY, 'message_id': mock.ANY}), - mock.call({}, 'testpublisher', 'an_event.start', 'test', - {'timestamp': mock.ANY, 'message_id': mock.ANY})]) - - def test_two_pools(self): - transport = messaging.get_transport(self.conf, url='fake:') - - endpoint1 = mock.Mock() - endpoint1.info.return_value = None - endpoint2 = mock.Mock() - endpoint2.info.return_value = None - - targets = [messaging.Target(topic="topic")] - listener1_thread = self._setup_listener(transport, [endpoint1], - targets=targets, pool="pool1") - listener2_thread = self._setup_listener(transport, [endpoint2], - targets=targets, pool="pool2") - - notifier = self._setup_notifier(transport, topic="topic") - notifier.info({'ctxt': '0'}, 'an_event.start', 'test message0') - notifier.info({'ctxt': '1'}, 'an_event.start', 'test message1') - - self.wait_for_messages(2, "pool1") - self.wait_for_messages(2, "pool2") - self.assertFalse(listener2_thread.stop()) - self.assertFalse(listener1_thread.stop()) - - def mocked_endpoint_call(i): - return mock.call({'ctxt': '%d' % i}, 'testpublisher', - 'an_event.start', 'test message%d' % i, - {'timestamp': mock.ANY, 'message_id': mock.ANY}) - - endpoint1.info.assert_has_calls([mocked_endpoint_call(0), - mocked_endpoint_call(1)]) - endpoint2.info.assert_has_calls([mocked_endpoint_call(0), - mocked_endpoint_call(1)]) - - def test_two_pools_three_listener(self): - transport = messaging.get_transport(self.conf, url='fake:') - - endpoint1 = mock.Mock() - endpoint1.info.return_value = None - endpoint2 = mock.Mock() - endpoint2.info.return_value = None - endpoint3 = mock.Mock() - endpoint3.info.return_value = None - - targets = [messaging.Target(topic="topic")] - listener1_thread = self._setup_listener(transport, [endpoint1], - targets=targets, pool="pool1") - listener2_thread = self._setup_listener(transport, [endpoint2], - targets=targets, pool="pool2") - listener3_thread = self._setup_listener(transport, [endpoint3], - targets=targets, pool="pool2") - - def mocked_endpoint_call(i): - return mock.call({'ctxt': '%d' % i}, 'testpublisher', - 'an_event.start', 'test message%d' % i, - {'timestamp': mock.ANY, 'message_id': mock.ANY}) - - notifier = self._setup_notifier(transport, topic="topic") - mocked_endpoint1_calls = [] - for i in range(0, 25): - notifier.info({'ctxt': '%d' % i}, 'an_event.start', - 'test message%d' % i) - mocked_endpoint1_calls.append(mocked_endpoint_call(i)) - - self.wait_for_messages(25, 'pool2') - listener2_thread.stop() - - for i in range(0, 25): - notifier.info({'ctxt': '%d' % i}, 'an_event.start', - 'test message%d' % i) - mocked_endpoint1_calls.append(mocked_endpoint_call(i)) - - self.wait_for_messages(50, 'pool2') - listener2_thread.start() - listener3_thread.stop() - - for i in range(0, 25): - notifier.info({'ctxt': '%d' % i}, 'an_event.start', - 'test message%d' % i) - mocked_endpoint1_calls.append(mocked_endpoint_call(i)) - - self.wait_for_messages(75, 'pool2') - listener3_thread.start() - - for i in range(0, 25): - notifier.info({'ctxt': '%d' % i}, 'an_event.start', - 'test message%d' % i) - mocked_endpoint1_calls.append(mocked_endpoint_call(i)) - - self.wait_for_messages(100, 'pool1') - self.wait_for_messages(100, 'pool2') - - self.assertFalse(listener3_thread.stop()) - self.assertFalse(listener2_thread.stop()) - self.assertFalse(listener1_thread.stop()) - - self.assertEqual(100, endpoint1.info.call_count) - endpoint1.info.assert_has_calls(mocked_endpoint1_calls) - - self.assertLessEqual(25, endpoint2.info.call_count) - self.assertLessEqual(25, endpoint3.info.call_count) - - self.assertEqual(100, endpoint2.info.call_count + - endpoint3.info.call_count) - for call in mocked_endpoint1_calls: - self.assertIn(call, endpoint2.info.mock_calls + - endpoint3.info.mock_calls) diff --git a/tests/notify/test_log_handler.py b/tests/notify/test_log_handler.py deleted file mode 100644 index 3adc572e8..000000000 --- a/tests/notify/test_log_handler.py +++ /dev/null @@ -1,57 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging - -from oslo import messaging -from oslo.messaging.notify import log_handler -from oslo_messaging.tests.notify import test_notifier -from oslo_messaging.tests import utils as test_utils -from six.moves import mock - - -class PublishErrorsHandlerTestCase(test_utils.BaseTestCase): - """Tests for log.PublishErrorsHandler""" - def setUp(self): - super(PublishErrorsHandlerTestCase, self).setUp() - self.publisherrorshandler = (log_handler. - PublishErrorsHandler(logging.ERROR)) - - def test_emit_cfg_log_notifier_in_notifier_drivers(self): - drivers = ['messaging', 'log'] - self.config(notification_driver=drivers) - self.stub_flg = True - - transport = test_notifier._FakeTransport(self.conf) - notifier = messaging.Notifier(transport) - - def fake_notifier(*args, **kwargs): - self.stub_flg = False - - self.stubs.Set(notifier, 'error', fake_notifier) - - logrecord = logging.LogRecord(name='name', level='WARN', - pathname='/tmp', lineno=1, msg='Message', - args=None, exc_info=None) - self.publisherrorshandler.emit(logrecord) - self.assertTrue(self.stub_flg) - - @mock.patch('oslo_messaging.notify.notifier.Notifier._notify') - def test_emit_notification(self, mock_notify): - logrecord = logging.LogRecord(name='name', level='ERROR', - pathname='/tmp', lineno=1, msg='Message', - args=None, exc_info=None) - self.publisherrorshandler.emit(logrecord) - self.assertEqual('error.publisher', - self.publisherrorshandler._notifier.publisher_id) - mock_notify.assert_called_with({}, 'error_notification', - {'error': 'Message'}, 'ERROR') diff --git a/tests/notify/test_logger.py b/tests/notify/test_logger.py deleted file mode 100644 index 06ad82012..000000000 --- a/tests/notify/test_logger.py +++ /dev/null @@ -1,157 +0,0 @@ -# Copyright 2013 eNovance -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import logging -import logging.config -import os -import sys - -from oslo_utils import timeutils -import testscenarios -import testtools - -from oslo import messaging -import oslo_messaging -from oslo_messaging.tests.notify import test_notifier -from oslo_messaging.tests import utils as test_utils -from six.moves import mock - - -load_tests = testscenarios.load_tests_apply_scenarios - -# Stolen from openstack.common.logging -logging.AUDIT = logging.INFO + 1 -logging.addLevelName(logging.AUDIT, 'AUDIT') - - -class TestLogNotifier(test_utils.BaseTestCase): - - scenarios = [ - ('debug', dict(priority='debug')), - ('info', dict(priority='info')), - ('warning', dict(priority='warning', queue='WARN')), - ('warn', dict(priority='warn')), - ('error', dict(priority='error')), - ('critical', dict(priority='critical')), - ('audit', dict(priority='audit')), - ] - - def setUp(self): - super(TestLogNotifier, self).setUp() - self.addCleanup(oslo_messaging.notify._impl_test.reset) - self.config(notification_driver=['test']) - # NOTE(jamespage) disable thread information logging for testing - # as this causes test failures when zmq tests monkey_patch via - # eventlet - logging.logThreads = 0 - - @mock.patch('oslo_utils.timeutils.utcnow') - def test_logger(self, mock_utcnow): - with mock.patch('oslo_messaging.transport.get_transport', - return_value=test_notifier._FakeTransport(self.conf)): - self.logger = messaging.LoggingNotificationHandler('test://') - - mock_utcnow.return_value = datetime.datetime.utcnow() - - levelno = getattr(logging, self.priority.upper(), 42) - - record = logging.LogRecord('foo', - levelno, - '/foo/bar', - 42, - 'Something happened', - None, - None) - - self.logger.emit(record) - - context = oslo_messaging.notify._impl_test.NOTIFICATIONS[0][0] - self.assertEqual({}, context) - - n = oslo_messaging.notify._impl_test.NOTIFICATIONS[0][1] - self.assertEqual(getattr(self, 'queue', self.priority.upper()), - n['priority']) - self.assertEqual('logrecord', n['event_type']) - self.assertEqual(str(timeutils.utcnow()), n['timestamp']) - self.assertEqual(None, n['publisher_id']) - self.assertEqual( - {'process': os.getpid(), - 'funcName': None, - 'name': 'foo', - 'thread': None, - 'levelno': levelno, - 'processName': 'MainProcess', - 'pathname': '/foo/bar', - 'lineno': 42, - 'msg': 'Something happened', - 'exc_info': None, - 'levelname': logging.getLevelName(levelno), - 'extra': None}, - n['payload']) - - @testtools.skipUnless(hasattr(logging.config, 'dictConfig'), - "Need logging.config.dictConfig (Python >= 2.7)") - @mock.patch('oslo_utils.timeutils.utcnow') - def test_logging_conf(self, mock_utcnow): - with mock.patch('oslo_messaging.transport.get_transport', - return_value=test_notifier._FakeTransport(self.conf)): - logging.config.dictConfig({ - 'version': 1, - 'handlers': { - 'notification': { - 'class': 'oslo.messaging.LoggingNotificationHandler', - 'level': self.priority.upper(), - 'url': 'test://', - }, - }, - 'loggers': { - 'default': { - 'handlers': ['notification'], - 'level': self.priority.upper(), - }, - }, - }) - - mock_utcnow.return_value = datetime.datetime.utcnow() - - levelno = getattr(logging, self.priority.upper()) - - logger = logging.getLogger('default') - lineno = sys._getframe().f_lineno + 1 - logger.log(levelno, 'foobar') - - n = oslo_messaging.notify._impl_test.NOTIFICATIONS[0][1] - self.assertEqual(getattr(self, 'queue', self.priority.upper()), - n['priority']) - self.assertEqual('logrecord', n['event_type']) - self.assertEqual(str(timeutils.utcnow()), n['timestamp']) - self.assertEqual(None, n['publisher_id']) - pathname = __file__ - if pathname.endswith(('.pyc', '.pyo')): - pathname = pathname[:-1] - self.assertDictEqual( - n['payload'], - {'process': os.getpid(), - 'funcName': 'test_logging_conf', - 'name': 'default', - 'thread': None, - 'levelno': levelno, - 'processName': 'MainProcess', - 'pathname': pathname, - 'lineno': lineno, - 'msg': 'foobar', - 'exc_info': None, - 'levelname': logging.getLevelName(levelno), - 'extra': None}) diff --git a/tests/notify/test_middleware.py b/tests/notify/test_middleware.py deleted file mode 100644 index ed81cb0c0..000000000 --- a/tests/notify/test_middleware.py +++ /dev/null @@ -1,190 +0,0 @@ -# Copyright 2013-2014 eNovance -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -import webob - -from oslo.messaging.notify import middleware -from oslo_messaging.tests import utils -from six.moves import mock - - -class FakeApp(object): - def __call__(self, env, start_response): - body = 'Some response' - start_response('200 OK', [ - ('Content-Type', 'text/plain'), - ('Content-Length', str(sum(map(len, body)))) - ]) - return [body] - - -class FakeFailingApp(object): - def __call__(self, env, start_response): - raise Exception("It happens!") - - -class NotifierMiddlewareTest(utils.BaseTestCase): - - def test_notification(self): - m = middleware.RequestNotifier(FakeApp()) - req = webob.Request.blank('/foo/bar', - environ={'REQUEST_METHOD': 'GET', - 'HTTP_X_AUTH_TOKEN': uuid.uuid4()}) - with mock.patch( - 'oslo.messaging.notify.notifier.Notifier._notify') as notify: - m(req) - # Check first notification with only 'request' - call_args = notify.call_args_list[0][0] - self.assertEqual(call_args[1], 'http.request') - self.assertEqual(call_args[3], 'INFO') - self.assertEqual(set(call_args[2].keys()), - set(['request'])) - - request = call_args[2]['request'] - self.assertEqual(request['PATH_INFO'], '/foo/bar') - self.assertEqual(request['REQUEST_METHOD'], 'GET') - self.assertIn('HTTP_X_SERVICE_NAME', request) - self.assertNotIn('HTTP_X_AUTH_TOKEN', request) - self.assertFalse(any(map(lambda s: s.startswith('wsgi.'), - request.keys())), - "WSGI fields are filtered out") - - # Check second notification with request + response - call_args = notify.call_args_list[1][0] - self.assertEqual(call_args[1], 'http.response') - self.assertEqual(call_args[3], 'INFO') - self.assertEqual(set(call_args[2].keys()), - set(['request', 'response'])) - - request = call_args[2]['request'] - self.assertEqual(request['PATH_INFO'], '/foo/bar') - self.assertEqual(request['REQUEST_METHOD'], 'GET') - self.assertIn('HTTP_X_SERVICE_NAME', request) - self.assertNotIn('HTTP_X_AUTH_TOKEN', request) - self.assertFalse(any(map(lambda s: s.startswith('wsgi.'), - request.keys())), - "WSGI fields are filtered out") - - response = call_args[2]['response'] - self.assertEqual(response['status'], '200 OK') - self.assertEqual(response['headers']['content-length'], '13') - - def test_notification_response_failure(self): - m = middleware.RequestNotifier(FakeFailingApp()) - req = webob.Request.blank('/foo/bar', - environ={'REQUEST_METHOD': 'GET', - 'HTTP_X_AUTH_TOKEN': uuid.uuid4()}) - with mock.patch( - 'oslo.messaging.notify.notifier.Notifier._notify') as notify: - try: - m(req) - self.fail("Application exception has not been re-raised") - except Exception: - pass - # Check first notification with only 'request' - call_args = notify.call_args_list[0][0] - self.assertEqual(call_args[1], 'http.request') - self.assertEqual(call_args[3], 'INFO') - self.assertEqual(set(call_args[2].keys()), - set(['request'])) - - request = call_args[2]['request'] - self.assertEqual(request['PATH_INFO'], '/foo/bar') - self.assertEqual(request['REQUEST_METHOD'], 'GET') - self.assertIn('HTTP_X_SERVICE_NAME', request) - self.assertNotIn('HTTP_X_AUTH_TOKEN', request) - self.assertFalse(any(map(lambda s: s.startswith('wsgi.'), - request.keys())), - "WSGI fields are filtered out") - - # Check second notification with 'request' and 'exception' - call_args = notify.call_args_list[1][0] - self.assertEqual(call_args[1], 'http.response') - self.assertEqual(call_args[3], 'INFO') - self.assertEqual(set(call_args[2].keys()), - set(['request', 'exception'])) - - request = call_args[2]['request'] - self.assertEqual(request['PATH_INFO'], '/foo/bar') - self.assertEqual(request['REQUEST_METHOD'], 'GET') - self.assertIn('HTTP_X_SERVICE_NAME', request) - self.assertNotIn('HTTP_X_AUTH_TOKEN', request) - self.assertFalse(any(map(lambda s: s.startswith('wsgi.'), - request.keys())), - "WSGI fields are filtered out") - - exception = call_args[2]['exception'] - self.assertIn('middleware.py', exception['traceback'][0]) - self.assertIn('It happens!', exception['traceback'][-1]) - self.assertEqual(exception['value'], "Exception('It happens!',)") - - def test_process_request_fail(self): - def notify_error(context, publisher_id, event_type, - priority, payload): - raise Exception('error') - with mock.patch('oslo.messaging.notify.notifier.Notifier._notify', - notify_error): - m = middleware.RequestNotifier(FakeApp()) - req = webob.Request.blank('/foo/bar', - environ={'REQUEST_METHOD': 'GET'}) - m.process_request(req) - - def test_process_response_fail(self): - def notify_error(context, publisher_id, event_type, - priority, payload): - raise Exception('error') - with mock.patch('oslo.messaging.notify.notifier.Notifier._notify', - notify_error): - m = middleware.RequestNotifier(FakeApp()) - req = webob.Request.blank('/foo/bar', - environ={'REQUEST_METHOD': 'GET'}) - m.process_response(req, webob.response.Response()) - - def test_ignore_req_opt(self): - m = middleware.RequestNotifier(FakeApp(), - ignore_req_list='get, PUT') - req = webob.Request.blank('/skip/foo', - environ={'REQUEST_METHOD': 'GET'}) - req1 = webob.Request.blank('/skip/foo', - environ={'REQUEST_METHOD': 'PUT'}) - req2 = webob.Request.blank('/accept/foo', - environ={'REQUEST_METHOD': 'POST'}) - with mock.patch( - 'oslo.messaging.notify.notifier.Notifier._notify') as notify: - # Check GET request does not send notification - m(req) - m(req1) - self.assertEqual(len(notify.call_args_list), 0) - - # Check non-GET request does send notification - m(req2) - self.assertEqual(len(notify.call_args_list), 2) - call_args = notify.call_args_list[0][0] - self.assertEqual(call_args[1], 'http.request') - self.assertEqual(call_args[3], 'INFO') - self.assertEqual(set(call_args[2].keys()), - set(['request'])) - - request = call_args[2]['request'] - self.assertEqual(request['PATH_INFO'], '/accept/foo') - self.assertEqual(request['REQUEST_METHOD'], 'POST') - - call_args = notify.call_args_list[1][0] - self.assertEqual(call_args[1], 'http.response') - self.assertEqual(call_args[3], 'INFO') - self.assertEqual(set(call_args[2].keys()), - set(['request', 'response'])) diff --git a/tests/notify/test_notifier.py b/tests/notify/test_notifier.py deleted file mode 100644 index 9cc8ec06b..000000000 --- a/tests/notify/test_notifier.py +++ /dev/null @@ -1,540 +0,0 @@ - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import logging -import sys -import uuid - -import fixtures -from oslo_serialization import jsonutils -from oslo_utils import timeutils -from stevedore import dispatch -from stevedore import extension -import testscenarios -import yaml - -from oslo import messaging -from oslo.messaging.notify import notifier as msg_notifier -from oslo.messaging import serializer as msg_serializer -from oslo_messaging.notify import _impl_log -from oslo_messaging.notify import _impl_messaging -from oslo_messaging.notify import _impl_test -from oslo_messaging.tests import utils as test_utils -from six.moves import mock - -load_tests = testscenarios.load_tests_apply_scenarios - - -class _FakeTransport(object): - - def __init__(self, conf): - self.conf = conf - - def _send_notification(self, target, ctxt, message, version, retry=None): - pass - - -class _ReRaiseLoggedExceptionsFixture(fixtures.Fixture): - - """Record logged exceptions and re-raise in cleanup. - - The notifier just logs notification send errors so, for the sake of - debugging test failures, we record any exceptions logged and re-raise them - during cleanup. - """ - - class FakeLogger(object): - - def __init__(self): - self.exceptions = [] - - def exception(self, msg, *args, **kwargs): - self.exceptions.append(sys.exc_info()[1]) - - def setUp(self): - super(_ReRaiseLoggedExceptionsFixture, self).setUp() - - self.logger = self.FakeLogger() - - def reraise_exceptions(): - for ex in self.logger.exceptions: - raise ex - - self.addCleanup(reraise_exceptions) - - -class TestMessagingNotifier(test_utils.BaseTestCase): - - _v1 = [ - ('v1', dict(v1=True)), - ('not_v1', dict(v1=False)), - ] - - _v2 = [ - ('v2', dict(v2=True)), - ('not_v2', dict(v2=False)), - ] - - _publisher_id = [ - ('ctor_pub_id', dict(ctor_pub_id='test', - expected_pub_id='test')), - ('prep_pub_id', dict(prep_pub_id='test.localhost', - expected_pub_id='test.localhost')), - ('override', dict(ctor_pub_id='test', - prep_pub_id='test.localhost', - expected_pub_id='test.localhost')), - ] - - _topics = [ - ('no_topics', dict(topics=[])), - ('single_topic', dict(topics=['notifications'])), - ('multiple_topic2', dict(topics=['foo', 'bar'])), - ] - - _priority = [ - ('audit', dict(priority='audit')), - ('debug', dict(priority='debug')), - ('info', dict(priority='info')), - ('warn', dict(priority='warn')), - ('error', dict(priority='error')), - ('sample', dict(priority='sample')), - ('critical', dict(priority='critical')), - ] - - _payload = [ - ('payload', dict(payload={'foo': 'bar'})), - ] - - _context = [ - ('ctxt', dict(ctxt={'user': 'bob'})), - ] - - _retry = [ - ('unconfigured', dict()), - ('None', dict(retry=None)), - ('0', dict(retry=0)), - ('5', dict(retry=5)), - ] - - @classmethod - def generate_scenarios(cls): - cls.scenarios = testscenarios.multiply_scenarios(cls._v1, - cls._v2, - cls._publisher_id, - cls._topics, - cls._priority, - cls._payload, - cls._context, - cls._retry) - - def setUp(self): - super(TestMessagingNotifier, self).setUp() - - self.logger = self.useFixture(_ReRaiseLoggedExceptionsFixture()).logger - self.stubs.Set(_impl_messaging, 'LOG', self.logger) - self.stubs.Set(msg_notifier, '_LOG', self.logger) - - @mock.patch('oslo_utils.timeutils.utcnow') - def test_notifier(self, mock_utcnow): - drivers = [] - if self.v1: - drivers.append('messaging') - if self.v2: - drivers.append('messagingv2') - - self.config(notification_driver=drivers, - notification_topics=self.topics) - - transport = _FakeTransport(self.conf) - - if hasattr(self, 'ctor_pub_id'): - notifier = messaging.Notifier(transport, - publisher_id=self.ctor_pub_id) - else: - notifier = messaging.Notifier(transport) - - prepare_kwds = {} - if hasattr(self, 'retry'): - prepare_kwds['retry'] = self.retry - if hasattr(self, 'prep_pub_id'): - prepare_kwds['publisher_id'] = self.prep_pub_id - if prepare_kwds: - notifier = notifier.prepare(**prepare_kwds) - - self.mox.StubOutWithMock(transport, '_send_notification') - - message_id = uuid.uuid4() - self.mox.StubOutWithMock(uuid, 'uuid4') - uuid.uuid4().AndReturn(message_id) - - mock_utcnow.return_value = datetime.datetime.utcnow() - - message = { - 'message_id': str(message_id), - 'publisher_id': self.expected_pub_id, - 'event_type': 'test.notify', - 'priority': self.priority.upper(), - 'payload': self.payload, - 'timestamp': str(timeutils.utcnow()), - } - - sends = [] - if self.v1: - sends.append(dict(version=1.0)) - if self.v2: - sends.append(dict(version=2.0)) - - for send_kwargs in sends: - for topic in self.topics: - if hasattr(self, 'retry'): - send_kwargs['retry'] = self.retry - else: - send_kwargs['retry'] = None - target = messaging.Target(topic='%s.%s' % (topic, - self.priority)) - transport._send_notification(target, self.ctxt, message, - **send_kwargs).InAnyOrder() - - self.mox.ReplayAll() - - method = getattr(notifier, self.priority) - method(self.ctxt, 'test.notify', self.payload) - - -TestMessagingNotifier.generate_scenarios() - - -class TestSerializer(test_utils.BaseTestCase): - - def setUp(self): - super(TestSerializer, self).setUp() - self.addCleanup(_impl_test.reset) - - @mock.patch('oslo_utils.timeutils.utcnow') - def test_serializer(self, mock_utcnow): - transport = _FakeTransport(self.conf) - - serializer = msg_serializer.NoOpSerializer() - - notifier = messaging.Notifier(transport, - 'test.localhost', - driver='test', - topic='test', - serializer=serializer) - - message_id = uuid.uuid4() - self.mox.StubOutWithMock(uuid, 'uuid4') - uuid.uuid4().AndReturn(message_id) - - mock_utcnow.return_value = datetime.datetime.utcnow() - - self.mox.StubOutWithMock(serializer, 'serialize_context') - self.mox.StubOutWithMock(serializer, 'serialize_entity') - serializer.serialize_context(dict(user='bob')).\ - AndReturn(dict(user='alice')) - serializer.serialize_entity(dict(user='bob'), 'bar').AndReturn('sbar') - - self.mox.ReplayAll() - - notifier.info(dict(user='bob'), 'test.notify', 'bar') - - message = { - 'message_id': str(message_id), - 'publisher_id': 'test.localhost', - 'event_type': 'test.notify', - 'priority': 'INFO', - 'payload': 'sbar', - 'timestamp': str(timeutils.utcnow()), - } - - self.assertEqual([(dict(user='alice'), message, 'INFO', None)], - _impl_test.NOTIFICATIONS) - - -class TestLogNotifier(test_utils.BaseTestCase): - - @mock.patch('oslo_utils.timeutils.utcnow') - def test_notifier(self, mock_utcnow): - self.config(notification_driver=['log']) - - transport = _FakeTransport(self.conf) - - notifier = messaging.Notifier(transport, 'test.localhost') - - message_id = uuid.uuid4() - self.mox.StubOutWithMock(uuid, 'uuid4') - uuid.uuid4().AndReturn(message_id) - - mock_utcnow.return_value = datetime.datetime.utcnow() - - message = { - 'message_id': str(message_id), - 'publisher_id': 'test.localhost', - 'event_type': 'test.notify', - 'priority': 'INFO', - 'payload': 'bar', - 'timestamp': str(timeutils.utcnow()), - } - - logger = self.mox.CreateMockAnything() - - self.mox.StubOutWithMock(logging, 'getLogger') - logging.getLogger('oslo.messaging.notification.test.notify').\ - AndReturn(logger) - - logger.info(jsonutils.dumps(message)) - - self.mox.ReplayAll() - - notifier.info({}, 'test.notify', 'bar') - - def test_sample_priority(self): - # Ensure logger drops sample-level notifications. - driver = _impl_log.LogDriver(None, None, None) - - logger = self.mox.CreateMock( - logging.getLogger('oslo.messaging.notification.foo')) - logger.sample = None - self.mox.StubOutWithMock(logging, 'getLogger') - logging.getLogger('oslo.messaging.notification.foo').\ - AndReturn(logger) - - self.mox.ReplayAll() - - msg = {'event_type': 'foo'} - driver.notify(None, msg, "sample", None) - - -class TestRoutingNotifier(test_utils.BaseTestCase): - def setUp(self): - super(TestRoutingNotifier, self).setUp() - self.config(notification_driver=['routing']) - - transport = _FakeTransport(self.conf) - self.notifier = messaging.Notifier(transport) - self.router = self.notifier._driver_mgr['routing'].obj - - def _fake_extension_manager(self, ext): - return extension.ExtensionManager.make_test_instance( - [extension.Extension('test', None, None, ext), ]) - - def _empty_extension_manager(self): - return extension.ExtensionManager.make_test_instance([]) - - def test_should_load_plugin(self): - self.router.used_drivers = set(["zoo", "blah"]) - ext = mock.MagicMock() - ext.name = "foo" - self.assertFalse(self.router._should_load_plugin(ext)) - ext.name = "zoo" - self.assertTrue(self.router._should_load_plugin(ext)) - - def test_load_notifiers_no_config(self): - # default routing_notifier_config="" - self.router._load_notifiers() - self.assertEqual({}, self.router.routing_groups) - self.assertEqual(0, len(self.router.used_drivers)) - - def test_load_notifiers_no_extensions(self): - self.config(routing_notifier_config="routing_notifier.yaml") - routing_config = r"" - config_file = mock.MagicMock() - config_file.return_value = routing_config - - with mock.patch.object(self.router, '_get_notifier_config_file', - config_file): - with mock.patch('stevedore.dispatch.DispatchExtensionManager', - return_value=self._empty_extension_manager()): - with mock.patch('oslo_messaging.notify.' - '_impl_routing.LOG') as mylog: - self.router._load_notifiers() - self.assertFalse(mylog.debug.called) - self.assertEqual({}, self.router.routing_groups) - - def test_load_notifiers_config(self): - self.config(routing_notifier_config="routing_notifier.yaml") - routing_config = r""" -group_1: - rpc : foo -group_2: - rpc : blah - """ - - config_file = mock.MagicMock() - config_file.return_value = routing_config - - with mock.patch.object(self.router, '_get_notifier_config_file', - config_file): - with mock.patch('stevedore.dispatch.DispatchExtensionManager', - return_value=self._fake_extension_manager( - mock.MagicMock())): - self.router._load_notifiers() - groups = list(self.router.routing_groups.keys()) - groups.sort() - self.assertEqual(['group_1', 'group_2'], groups) - - def test_get_drivers_for_message_accepted_events(self): - config = r""" -group_1: - rpc: - accepted_events: - - foo.* - - blah.zoo.* - - zip - """ - groups = yaml.load(config) - group = groups['group_1'] - - # No matching event ... - self.assertEqual([], - self.router._get_drivers_for_message( - group, "unknown", "info")) - - # Child of foo ... - self.assertEqual(['rpc'], - self.router._get_drivers_for_message( - group, "foo.1", "info")) - - # Foo itself ... - self.assertEqual([], - self.router._get_drivers_for_message( - group, "foo", "info")) - - # Child of blah.zoo - self.assertEqual(['rpc'], - self.router._get_drivers_for_message( - group, "blah.zoo.zing", "info")) - - def test_get_drivers_for_message_accepted_priorities(self): - config = r""" -group_1: - rpc: - accepted_priorities: - - info - - error - """ - groups = yaml.load(config) - group = groups['group_1'] - - # No matching priority - self.assertEqual([], - self.router._get_drivers_for_message( - group, None, "unknown")) - - # Info ... - self.assertEqual(['rpc'], - self.router._get_drivers_for_message( - group, None, "info")) - - # Error (to make sure the list is getting processed) ... - self.assertEqual(['rpc'], - self.router._get_drivers_for_message( - group, None, "error")) - - def test_get_drivers_for_message_both(self): - config = r""" -group_1: - rpc: - accepted_priorities: - - info - accepted_events: - - foo.* - driver_1: - accepted_priorities: - - info - driver_2: - accepted_events: - - foo.* - """ - groups = yaml.load(config) - group = groups['group_1'] - - # Valid event, but no matching priority - self.assertEqual(['driver_2'], - self.router._get_drivers_for_message( - group, 'foo.blah', "unknown")) - - # Valid priority, but no matching event - self.assertEqual(['driver_1'], - self.router._get_drivers_for_message( - group, 'unknown', "info")) - - # Happy day ... - x = self.router._get_drivers_for_message(group, 'foo.blah', "info") - x.sort() - self.assertEqual(['driver_1', 'driver_2', 'rpc'], x) - - def test_filter_func(self): - ext = mock.MagicMock() - ext.name = "rpc" - - # Good ... - self.assertTrue(self.router._filter_func(ext, {}, {}, 'info', - None, ['foo', 'rpc'])) - - # Bad - self.assertFalse(self.router._filter_func(ext, {}, {}, 'info', - None, ['foo'])) - - def test_notify(self): - self.router.routing_groups = {'group_1': None, 'group_2': None} - drivers_mock = mock.MagicMock() - drivers_mock.side_effect = [['rpc'], ['foo']] - - with mock.patch.object(self.router, 'plugin_manager') as pm: - with mock.patch.object(self.router, '_get_drivers_for_message', - drivers_mock): - self.notifier.info({}, 'my_event', {}) - self.assertEqual(sorted(['rpc', 'foo']), - sorted(pm.map.call_args[0][6])) - - def test_notify_filtered(self): - self.config(routing_notifier_config="routing_notifier.yaml") - routing_config = r""" -group_1: - rpc: - accepted_events: - - my_event - rpc2: - accepted_priorities: - - info - bar: - accepted_events: - - nothing - """ - config_file = mock.MagicMock() - config_file.return_value = routing_config - - rpc_driver = mock.Mock() - rpc2_driver = mock.Mock() - bar_driver = mock.Mock() - - pm = dispatch.DispatchExtensionManager.make_test_instance( - [extension.Extension('rpc', None, None, rpc_driver), - extension.Extension('rpc2', None, None, rpc2_driver), - extension.Extension('bar', None, None, bar_driver)], - ) - - with mock.patch.object(self.router, '_get_notifier_config_file', - config_file): - with mock.patch('stevedore.dispatch.DispatchExtensionManager', - return_value=pm): - self.notifier.info({}, 'my_event', {}) - self.assertFalse(bar_driver.info.called) - rpc_driver.notify.assert_called_once_with( - {}, mock.ANY, 'INFO', None) - rpc2_driver.notify.assert_called_once_with( - {}, mock.ANY, 'INFO', None) diff --git a/tests/rpc/__init__.py b/tests/rpc/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/rpc/test_client.py b/tests/rpc/test_client.py deleted file mode 100644 index 65c4f6752..000000000 --- a/tests/rpc/test_client.py +++ /dev/null @@ -1,519 +0,0 @@ - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import testscenarios - -from oslo import messaging -from oslo.messaging import exceptions -from oslo.messaging import serializer as msg_serializer -from oslo_config import cfg -from oslo_messaging.tests import utils as test_utils - -load_tests = testscenarios.load_tests_apply_scenarios - - -class _FakeTransport(object): - - def __init__(self, conf): - self.conf = conf - - def _send(self, *args, **kwargs): - pass - - -class TestCastCall(test_utils.BaseTestCase): - - scenarios = [ - ('cast_no_ctxt_no_args', dict(call=False, ctxt={}, args={})), - ('call_no_ctxt_no_args', dict(call=True, ctxt={}, args={})), - ('cast_ctxt_and_args', - dict(call=False, - ctxt=dict(user='testuser', project='testtenant'), - args=dict(bar='blaa', foobar=11.01))), - ('call_ctxt_and_args', - dict(call=True, - ctxt=dict(user='testuser', project='testtenant'), - args=dict(bar='blaa', foobar=11.01))), - ] - - def test_cast_call(self): - self.config(rpc_response_timeout=None) - - transport = _FakeTransport(self.conf) - client = messaging.RPCClient(transport, messaging.Target()) - - self.mox.StubOutWithMock(transport, '_send') - - msg = dict(method='foo', args=self.args) - kwargs = {'retry': None} - if self.call: - kwargs['wait_for_reply'] = True - kwargs['timeout'] = None - - transport._send(messaging.Target(), self.ctxt, msg, **kwargs) - self.mox.ReplayAll() - - method = client.call if self.call else client.cast - method(self.ctxt, 'foo', **self.args) - - -class TestCastToTarget(test_utils.BaseTestCase): - - _base = [ - ('all_none', dict(ctor={}, prepare={}, expect={})), - ('ctor_exchange', - dict(ctor=dict(exchange='testexchange'), - prepare={}, - expect=dict(exchange='testexchange'))), - ('prepare_exchange', - dict(ctor={}, - prepare=dict(exchange='testexchange'), - expect=dict(exchange='testexchange'))), - ('prepare_exchange_none', - dict(ctor=dict(exchange='testexchange'), - prepare=dict(exchange=None), - expect={})), - ('both_exchange', - dict(ctor=dict(exchange='ctorexchange'), - prepare=dict(exchange='testexchange'), - expect=dict(exchange='testexchange'))), - ('ctor_topic', - dict(ctor=dict(topic='testtopic'), - prepare={}, - expect=dict(topic='testtopic'))), - ('prepare_topic', - dict(ctor={}, - prepare=dict(topic='testtopic'), - expect=dict(topic='testtopic'))), - ('prepare_topic_none', - dict(ctor=dict(topic='testtopic'), - prepare=dict(topic=None), - expect={})), - ('both_topic', - dict(ctor=dict(topic='ctortopic'), - prepare=dict(topic='testtopic'), - expect=dict(topic='testtopic'))), - ('ctor_namespace', - dict(ctor=dict(namespace='testnamespace'), - prepare={}, - expect=dict(namespace='testnamespace'))), - ('prepare_namespace', - dict(ctor={}, - prepare=dict(namespace='testnamespace'), - expect=dict(namespace='testnamespace'))), - ('prepare_namespace_none', - dict(ctor=dict(namespace='testnamespace'), - prepare=dict(namespace=None), - expect={})), - ('both_namespace', - dict(ctor=dict(namespace='ctornamespace'), - prepare=dict(namespace='testnamespace'), - expect=dict(namespace='testnamespace'))), - ('ctor_version', - dict(ctor=dict(version='testversion'), - prepare={}, - expect=dict(version='testversion'))), - ('prepare_version', - dict(ctor={}, - prepare=dict(version='testversion'), - expect=dict(version='testversion'))), - ('prepare_version_none', - dict(ctor=dict(version='testversion'), - prepare=dict(version=None), - expect={})), - ('both_version', - dict(ctor=dict(version='ctorversion'), - prepare=dict(version='testversion'), - expect=dict(version='testversion'))), - ('ctor_server', - dict(ctor=dict(server='testserver'), - prepare={}, - expect=dict(server='testserver'))), - ('prepare_server', - dict(ctor={}, - prepare=dict(server='testserver'), - expect=dict(server='testserver'))), - ('prepare_server_none', - dict(ctor=dict(server='testserver'), - prepare=dict(server=None), - expect={})), - ('both_server', - dict(ctor=dict(server='ctorserver'), - prepare=dict(server='testserver'), - expect=dict(server='testserver'))), - ('ctor_fanout', - dict(ctor=dict(fanout=True), - prepare={}, - expect=dict(fanout=True))), - ('prepare_fanout', - dict(ctor={}, - prepare=dict(fanout=True), - expect=dict(fanout=True))), - ('prepare_fanout_none', - dict(ctor=dict(fanout=True), - prepare=dict(fanout=None), - expect={})), - ('both_fanout', - dict(ctor=dict(fanout=True), - prepare=dict(fanout=False), - expect=dict(fanout=False))), - ] - - _prepare = [ - ('single_prepare', dict(double_prepare=False)), - ('double_prepare', dict(double_prepare=True)), - ] - - @classmethod - def generate_scenarios(cls): - cls.scenarios = testscenarios.multiply_scenarios(cls._base, - cls._prepare) - - def setUp(self): - super(TestCastToTarget, self).setUp(conf=cfg.ConfigOpts()) - - def test_cast_to_target(self): - target = messaging.Target(**self.ctor) - expect_target = messaging.Target(**self.expect) - - transport = _FakeTransport(self.conf) - client = messaging.RPCClient(transport, target) - - self.mox.StubOutWithMock(transport, '_send') - - msg = dict(method='foo', args={}) - if 'namespace' in self.expect: - msg['namespace'] = self.expect['namespace'] - if 'version' in self.expect: - msg['version'] = self.expect['version'] - transport._send(expect_target, {}, msg, retry=None) - - self.mox.ReplayAll() - - if self.prepare: - client = client.prepare(**self.prepare) - if self.double_prepare: - client = client.prepare(**self.prepare) - client.cast({}, 'foo') - - -TestCastToTarget.generate_scenarios() - - -_notset = object() - - -class TestCallTimeout(test_utils.BaseTestCase): - - scenarios = [ - ('all_none', - dict(confval=None, ctor=None, prepare=_notset, expect=None)), - ('confval', - dict(confval=21.1, ctor=None, prepare=_notset, expect=21.1)), - ('ctor', - dict(confval=None, ctor=21.1, prepare=_notset, expect=21.1)), - ('ctor_zero', - dict(confval=None, ctor=0, prepare=_notset, expect=0)), - ('prepare', - dict(confval=None, ctor=None, prepare=21.1, expect=21.1)), - ('prepare_override', - dict(confval=None, ctor=10.1, prepare=21.1, expect=21.1)), - ('prepare_zero', - dict(confval=None, ctor=None, prepare=0, expect=0)), - ] - - def test_call_timeout(self): - self.config(rpc_response_timeout=self.confval) - - transport = _FakeTransport(self.conf) - client = messaging.RPCClient(transport, messaging.Target(), - timeout=self.ctor) - - self.mox.StubOutWithMock(transport, '_send') - - msg = dict(method='foo', args={}) - kwargs = dict(wait_for_reply=True, timeout=self.expect, retry=None) - transport._send(messaging.Target(), {}, msg, **kwargs) - - self.mox.ReplayAll() - - if self.prepare is not _notset: - client = client.prepare(timeout=self.prepare) - client.call({}, 'foo') - - -class TestCallRetry(test_utils.BaseTestCase): - - scenarios = [ - ('all_none', dict(ctor=None, prepare=_notset, expect=None)), - ('ctor', dict(ctor=21, prepare=_notset, expect=21)), - ('ctor_zero', dict(ctor=0, prepare=_notset, expect=0)), - ('prepare', dict(ctor=None, prepare=21, expect=21)), - ('prepare_override', dict(ctor=10, prepare=21, expect=21)), - ('prepare_zero', dict(ctor=None, prepare=0, expect=0)), - ] - - def test_call_retry(self): - transport = _FakeTransport(self.conf) - client = messaging.RPCClient(transport, messaging.Target(), - retry=self.ctor) - - self.mox.StubOutWithMock(transport, '_send') - - msg = dict(method='foo', args={}) - kwargs = dict(wait_for_reply=True, timeout=60, - retry=self.expect) - transport._send(messaging.Target(), {}, msg, **kwargs) - - self.mox.ReplayAll() - - if self.prepare is not _notset: - client = client.prepare(retry=self.prepare) - client.call({}, 'foo') - - -class TestCallFanout(test_utils.BaseTestCase): - - scenarios = [ - ('target', dict(prepare=_notset, target={'fanout': True})), - ('prepare', dict(prepare={'fanout': True}, target={})), - ('both', dict(prepare={'fanout': True}, target={'fanout': True})), - ] - - def test_call_fanout(self): - transport = _FakeTransport(self.conf) - client = messaging.RPCClient(transport, - messaging.Target(**self.target)) - - if self.prepare is not _notset: - client = client.prepare(**self.prepare) - - self.assertRaises(exceptions.InvalidTarget, - client.call, {}, 'foo') - - -class TestSerializer(test_utils.BaseTestCase): - - scenarios = [ - ('cast', - dict(call=False, - ctxt=dict(user='bob'), - args=dict(a='a', b='b', c='c'), - retval=None)), - ('call', - dict(call=True, - ctxt=dict(user='bob'), - args=dict(a='a', b='b', c='c'), - retval='d')), - ] - - def test_call_serializer(self): - self.config(rpc_response_timeout=None) - - transport = _FakeTransport(self.conf) - serializer = msg_serializer.NoOpSerializer() - - client = messaging.RPCClient(transport, messaging.Target(), - serializer=serializer) - - self.mox.StubOutWithMock(transport, '_send') - - msg = dict(method='foo', - args=dict([(k, 's' + v) for k, v in self.args.items()])) - kwargs = dict(wait_for_reply=True, timeout=None) if self.call else {} - kwargs['retry'] = None - transport._send(messaging.Target(), - dict(user='alice'), - msg, - **kwargs).AndReturn(self.retval) - - self.mox.StubOutWithMock(serializer, 'serialize_entity') - self.mox.StubOutWithMock(serializer, 'deserialize_entity') - self.mox.StubOutWithMock(serializer, 'serialize_context') - - for arg in self.args: - serializer.serialize_entity(self.ctxt, arg).AndReturn('s' + arg) - - if self.call: - serializer.deserialize_entity(self.ctxt, self.retval).\ - AndReturn('d' + self.retval) - - serializer.serialize_context(self.ctxt).AndReturn(dict(user='alice')) - - self.mox.ReplayAll() - - method = client.call if self.call else client.cast - retval = method(self.ctxt, 'foo', **self.args) - if self.retval is not None: - self.assertEqual('d' + self.retval, retval) - - -class TestVersionCap(test_utils.BaseTestCase): - - _call_vs_cast = [ - ('call', dict(call=True)), - ('cast', dict(call=False)), - ] - - _cap_scenarios = [ - ('all_none', - dict(cap=None, prepare_cap=_notset, - version=None, prepare_version=_notset, - success=True)), - ('ctor_cap_ok', - dict(cap='1.1', prepare_cap=_notset, - version='1.0', prepare_version=_notset, - success=True)), - ('ctor_cap_override_ok', - dict(cap='2.0', prepare_cap='1.1', - version='1.0', prepare_version='1.0', - success=True)), - ('ctor_cap_override_none_ok', - dict(cap='1.1', prepare_cap=None, - version='1.0', prepare_version=_notset, - success=True)), - ('ctor_cap_minor_fail', - dict(cap='1.0', prepare_cap=_notset, - version='1.1', prepare_version=_notset, - success=False)), - ('ctor_cap_major_fail', - dict(cap='2.0', prepare_cap=_notset, - version=None, prepare_version='1.0', - success=False)), - ] - - @classmethod - def generate_scenarios(cls): - cls.scenarios = ( - testscenarios.multiply_scenarios(cls._call_vs_cast, - cls._cap_scenarios)) - - def test_version_cap(self): - self.config(rpc_response_timeout=None) - - transport = _FakeTransport(self.conf) - - target = messaging.Target(version=self.version) - client = messaging.RPCClient(transport, target, - version_cap=self.cap) - - if self.success: - self.mox.StubOutWithMock(transport, '_send') - - if self.prepare_version is not _notset: - target = target(version=self.prepare_version) - - msg = dict(method='foo', args={}) - if target.version is not None: - msg['version'] = target.version - - kwargs = {'retry': None} - if self.call: - kwargs['wait_for_reply'] = True - kwargs['timeout'] = None - - transport._send(target, {}, msg, **kwargs) - - self.mox.ReplayAll() - - prep_kwargs = {} - if self.prepare_cap is not _notset: - prep_kwargs['version_cap'] = self.prepare_cap - if self.prepare_version is not _notset: - prep_kwargs['version'] = self.prepare_version - if prep_kwargs: - client = client.prepare(**prep_kwargs) - - method = client.call if self.call else client.cast - try: - method({}, 'foo') - except Exception as ex: - self.assertIsInstance(ex, messaging.RPCVersionCapError, ex) - self.assertFalse(self.success) - else: - self.assertTrue(self.success) - - -TestVersionCap.generate_scenarios() - - -class TestCanSendVersion(test_utils.BaseTestCase): - - scenarios = [ - ('all_none', - dict(cap=None, prepare_cap=_notset, - version=None, prepare_version=_notset, - can_send_version=_notset, - can_send=True)), - ('ctor_cap_ok', - dict(cap='1.1', prepare_cap=_notset, - version='1.0', prepare_version=_notset, - can_send_version=_notset, - can_send=True)), - ('ctor_cap_override_ok', - dict(cap='2.0', prepare_cap='1.1', - version='1.0', prepare_version='1.0', - can_send_version=_notset, - can_send=True)), - ('ctor_cap_override_none_ok', - dict(cap='1.1', prepare_cap=None, - version='1.0', prepare_version=_notset, - can_send_version=_notset, - can_send=True)), - ('ctor_cap_can_send_ok', - dict(cap='1.1', prepare_cap=None, - version='1.0', prepare_version=_notset, - can_send_version='1.1', - can_send=True)), - ('ctor_cap_can_send_none_ok', - dict(cap='1.1', prepare_cap=None, - version='1.0', prepare_version=_notset, - can_send_version=None, - can_send=True)), - ('ctor_cap_minor_fail', - dict(cap='1.0', prepare_cap=_notset, - version='1.1', prepare_version=_notset, - can_send_version=_notset, - can_send=False)), - ('ctor_cap_major_fail', - dict(cap='2.0', prepare_cap=_notset, - version=None, prepare_version='1.0', - can_send_version=_notset, - can_send=False)), - ] - - def test_version_cap(self): - self.config(rpc_response_timeout=None) - - transport = _FakeTransport(self.conf) - - target = messaging.Target(version=self.version) - client = messaging.RPCClient(transport, target, - version_cap=self.cap) - - prep_kwargs = {} - if self.prepare_cap is not _notset: - prep_kwargs['version_cap'] = self.prepare_cap - if self.prepare_version is not _notset: - prep_kwargs['version'] = self.prepare_version - if prep_kwargs: - client = client.prepare(**prep_kwargs) - - if self.can_send_version is not _notset: - can_send = client.can_send_version(version=self.can_send_version) - else: - can_send = client.can_send_version() - - self.assertEqual(self.can_send, can_send) diff --git a/tests/rpc/test_dispatcher.py b/tests/rpc/test_dispatcher.py deleted file mode 100644 index 64181f026..000000000 --- a/tests/rpc/test_dispatcher.py +++ /dev/null @@ -1,178 +0,0 @@ - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import testscenarios - -from oslo import messaging -from oslo.messaging import serializer as msg_serializer -from oslo_messaging.tests import utils as test_utils -from six.moves import mock - -load_tests = testscenarios.load_tests_apply_scenarios - - -class _FakeEndpoint(object): - - def __init__(self, target=None): - self.target = target - - def foo(self, ctxt, **kwargs): - pass - - def bar(self, ctxt, **kwargs): - pass - - -class TestDispatcher(test_utils.BaseTestCase): - - scenarios = [ - ('no_endpoints', - dict(endpoints=[], - dispatch_to=None, - ctxt={}, msg=dict(method='foo'), - success=False, ex=messaging.UnsupportedVersion)), - ('default_target', - dict(endpoints=[{}], - dispatch_to=dict(endpoint=0, method='foo'), - ctxt={}, msg=dict(method='foo'), - success=True, ex=None)), - ('default_target_ctxt_and_args', - dict(endpoints=[{}], - dispatch_to=dict(endpoint=0, method='bar'), - ctxt=dict(user='bob'), msg=dict(method='bar', - args=dict(blaa=True)), - success=True, ex=None)), - ('default_target_namespace', - dict(endpoints=[{}], - dispatch_to=dict(endpoint=0, method='foo'), - ctxt={}, msg=dict(method='foo', namespace=None), - success=True, ex=None)), - ('default_target_version', - dict(endpoints=[{}], - dispatch_to=dict(endpoint=0, method='foo'), - ctxt={}, msg=dict(method='foo', version='1.0'), - success=True, ex=None)), - ('default_target_no_such_method', - dict(endpoints=[{}], - dispatch_to=None, - ctxt={}, msg=dict(method='foobar'), - success=False, ex=messaging.NoSuchMethod)), - ('namespace', - dict(endpoints=[{}, dict(namespace='testns')], - dispatch_to=dict(endpoint=1, method='foo'), - ctxt={}, msg=dict(method='foo', namespace='testns'), - success=True, ex=None)), - ('namespace_mismatch', - dict(endpoints=[{}, dict(namespace='testns')], - dispatch_to=None, - ctxt={}, msg=dict(method='foo', namespace='nstest'), - success=False, ex=messaging.UnsupportedVersion)), - ('version', - dict(endpoints=[dict(version='1.5'), dict(version='3.4')], - dispatch_to=dict(endpoint=1, method='foo'), - ctxt={}, msg=dict(method='foo', version='3.2'), - success=True, ex=None)), - ('version_mismatch', - dict(endpoints=[dict(version='1.5'), dict(version='3.0')], - dispatch_to=None, - ctxt={}, msg=dict(method='foo', version='3.2'), - success=False, ex=messaging.UnsupportedVersion)), - ] - - def test_dispatcher(self): - endpoints = [mock.Mock(spec=_FakeEndpoint, - target=messaging.Target(**e)) - for e in self.endpoints] - - serializer = None - target = messaging.Target() - dispatcher = messaging.RPCDispatcher(target, endpoints, serializer) - - def check_reply(reply=None, failure=None, log_failure=True): - if self.ex and failure is not None: - ex = failure[1] - self.assertFalse(self.success, ex) - self.assertIsNotNone(self.ex, ex) - self.assertIsInstance(ex, self.ex, ex) - if isinstance(ex, messaging.NoSuchMethod): - self.assertEqual(self.msg.get('method'), ex.method) - elif isinstance(ex, messaging.UnsupportedVersion): - self.assertEqual(self.msg.get('version', '1.0'), - ex.version) - if ex.method: - self.assertEqual(self.msg.get('method'), ex.method) - else: - self.assertTrue(self.success, failure) - self.assertIsNone(failure) - - incoming = mock.Mock(ctxt=self.ctxt, message=self.msg) - incoming.reply.side_effect = check_reply - - with dispatcher(incoming) as callback: - callback() - - for n, endpoint in enumerate(endpoints): - for method_name in ['foo', 'bar']: - method = getattr(endpoint, method_name) - if self.dispatch_to and n == self.dispatch_to['endpoint'] and \ - method_name == self.dispatch_to['method']: - method.assert_called_once_with( - self.ctxt, **self.msg.get('args', {})) - else: - self.assertEqual(0, method.call_count) - - self.assertEqual(1, incoming.reply.call_count) - - -class TestSerializer(test_utils.BaseTestCase): - - scenarios = [ - ('no_args_or_retval', - dict(ctxt={}, dctxt={}, args={}, retval=None)), - ('args_and_retval', - dict(ctxt=dict(user='bob'), - dctxt=dict(user='alice'), - args=dict(a='a', b='b', c='c'), - retval='d')), - ] - - def test_serializer(self): - endpoint = _FakeEndpoint() - serializer = msg_serializer.NoOpSerializer() - target = messaging.Target() - dispatcher = messaging.RPCDispatcher(target, [endpoint], serializer) - - self.mox.StubOutWithMock(endpoint, 'foo') - args = dict([(k, 'd' + v) for k, v in self.args.items()]) - endpoint.foo(self.dctxt, **args).AndReturn(self.retval) - - self.mox.StubOutWithMock(serializer, 'serialize_entity') - self.mox.StubOutWithMock(serializer, 'deserialize_entity') - self.mox.StubOutWithMock(serializer, 'deserialize_context') - - serializer.deserialize_context(self.ctxt).AndReturn(self.dctxt) - - for arg in self.args: - serializer.deserialize_entity(self.dctxt, arg).AndReturn('d' + arg) - - serializer.serialize_entity(self.dctxt, self.retval).\ - AndReturn('s' + self.retval if self.retval else None) - - self.mox.ReplayAll() - - retval = dispatcher._dispatch(self.ctxt, dict(method='foo', - args=self.args)) - if self.retval is not None: - self.assertEqual('s' + self.retval, retval) diff --git a/tests/rpc/test_server.py b/tests/rpc/test_server.py deleted file mode 100644 index 6e1ae1603..000000000 --- a/tests/rpc/test_server.py +++ /dev/null @@ -1,503 +0,0 @@ - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import threading - -import testscenarios - -from oslo import messaging -from oslo_config import cfg -from oslo_messaging.tests import utils as test_utils -from six.moves import mock - -load_tests = testscenarios.load_tests_apply_scenarios - - -class ServerSetupMixin(object): - - class Server(object): - def __init__(self, transport, topic, server, endpoint, serializer): - target = messaging.Target(topic=topic, server=server) - self._server = messaging.get_rpc_server(transport, - target, - [endpoint, self], - serializer=serializer) - - def stop(self, ctxt): - # Check start() does nothing with a running server - self._server.start() - self._server.stop() - self._server.wait() - - def start(self): - self._server.start() - - class TestSerializer(object): - - def serialize_entity(self, ctxt, entity): - return ('s' + entity) if entity else entity - - def deserialize_entity(self, ctxt, entity): - return ('d' + entity) if entity else entity - - def serialize_context(self, ctxt): - return dict([(k, 's' + v) for k, v in ctxt.items()]) - - def deserialize_context(self, ctxt): - return dict([(k, 'd' + v) for k, v in ctxt.items()]) - - def __init__(self): - self.serializer = self.TestSerializer() - - def _setup_server(self, transport, endpoint, topic=None, server=None): - server = self.Server(transport, - topic=topic or 'testtopic', - server=server or 'testserver', - endpoint=endpoint, - serializer=self.serializer) - - thread = threading.Thread(target=server.start) - thread.daemon = True - thread.start() - - return thread - - def _stop_server(self, client, server_thread, topic=None): - if topic is not None: - client = client.prepare(topic=topic) - client.cast({}, 'stop') - server_thread.join(timeout=30) - - def _setup_client(self, transport, topic='testtopic'): - return messaging.RPCClient(transport, - messaging.Target(topic=topic), - serializer=self.serializer) - - -class TestRPCServer(test_utils.BaseTestCase, ServerSetupMixin): - - def __init__(self, *args): - super(TestRPCServer, self).__init__(*args) - ServerSetupMixin.__init__(self) - - def setUp(self): - super(TestRPCServer, self).setUp(conf=cfg.ConfigOpts()) - - def test_constructor(self): - transport = messaging.get_transport(self.conf, url='fake:') - target = messaging.Target(topic='foo', server='bar') - endpoints = [object()] - serializer = object() - - server = messaging.get_rpc_server(transport, target, endpoints, - serializer=serializer) - - self.assertIs(server.conf, self.conf) - self.assertIs(server.transport, transport) - self.assertIsInstance(server.dispatcher, messaging.RPCDispatcher) - self.assertIs(server.dispatcher.endpoints, endpoints) - self.assertIs(server.dispatcher.serializer, serializer) - self.assertEqual('blocking', server.executor) - - def test_server_wait_method(self): - transport = messaging.get_transport(self.conf, url='fake:') - target = messaging.Target(topic='foo', server='bar') - endpoints = [object()] - serializer = object() - - server = messaging.get_rpc_server(transport, target, endpoints, - serializer=serializer) - # Mocking executor - server._executor = mock.Mock() - # Here assigning executor's listener object to listener variable - # before calling wait method, beacuse in wait method we are - # setting executor to None. - listener = server._executor.listener - # call server wait method - server.wait() - self.assertIsNone(server._executor) - self.assertEqual(1, listener.cleanup.call_count) - - def test_no_target_server(self): - transport = messaging.get_transport(self.conf, url='fake:') - - server = messaging.get_rpc_server(transport, - messaging.Target(topic='testtopic'), - []) - try: - server.start() - except Exception as ex: - self.assertIsInstance(ex, messaging.InvalidTarget, ex) - self.assertEqual('testtopic', ex.target.topic) - else: - self.assertTrue(False) - - def test_no_server_topic(self): - transport = messaging.get_transport(self.conf, url='fake:') - target = messaging.Target(server='testserver') - server = messaging.get_rpc_server(transport, target, []) - try: - server.start() - except Exception as ex: - self.assertIsInstance(ex, messaging.InvalidTarget, ex) - self.assertEqual('testserver', ex.target.server) - else: - self.assertTrue(False) - - def _test_no_client_topic(self, call=True): - transport = messaging.get_transport(self.conf, url='fake:') - - client = self._setup_client(transport, topic=None) - - method = client.call if call else client.cast - - try: - method({}, 'ping', arg='foo') - except Exception as ex: - self.assertIsInstance(ex, messaging.InvalidTarget, ex) - self.assertIsNotNone(ex.target) - else: - self.assertTrue(False) - - def test_no_client_topic_call(self): - self._test_no_client_topic(call=True) - - def test_no_client_topic_cast(self): - self._test_no_client_topic(call=False) - - def test_client_call_timeout(self): - transport = messaging.get_transport(self.conf, url='fake:') - - finished = False - wait = threading.Condition() - - class TestEndpoint(object): - def ping(self, ctxt, arg): - with wait: - if not finished: - wait.wait() - - server_thread = self._setup_server(transport, TestEndpoint()) - client = self._setup_client(transport) - - try: - client.prepare(timeout=0).call({}, 'ping', arg='foo') - except Exception as ex: - self.assertIsInstance(ex, messaging.MessagingTimeout, ex) - else: - self.assertTrue(False) - - with wait: - finished = True - wait.notify() - - self._stop_server(client, server_thread) - - def test_unknown_executor(self): - transport = messaging.get_transport(self.conf, url='fake:') - - try: - messaging.get_rpc_server(transport, None, [], executor='foo') - except Exception as ex: - self.assertIsInstance(ex, messaging.ExecutorLoadFailure) - self.assertEqual('foo', ex.executor) - else: - self.assertTrue(False) - - def test_cast(self): - transport = messaging.get_transport(self.conf, url='fake:') - - class TestEndpoint(object): - def __init__(self): - self.pings = [] - - def ping(self, ctxt, arg): - self.pings.append(arg) - - endpoint = TestEndpoint() - server_thread = self._setup_server(transport, endpoint) - client = self._setup_client(transport) - - client.cast({}, 'ping', arg='foo') - client.cast({}, 'ping', arg='bar') - - self._stop_server(client, server_thread) - - self.assertEqual(['dsfoo', 'dsbar'], endpoint.pings) - - def test_call(self): - transport = messaging.get_transport(self.conf, url='fake:') - - class TestEndpoint(object): - def ping(self, ctxt, arg): - return arg - - server_thread = self._setup_server(transport, TestEndpoint()) - client = self._setup_client(transport) - - self.assertIsNone(client.call({}, 'ping', arg=None)) - self.assertEqual(0, client.call({}, 'ping', arg=0)) - self.assertEqual(False, client.call({}, 'ping', arg=False)) - self.assertEqual([], client.call({}, 'ping', arg=[])) - self.assertEqual({}, client.call({}, 'ping', arg={})) - self.assertEqual('dsdsfoo', client.call({}, 'ping', arg='foo')) - - self._stop_server(client, server_thread) - - def test_direct_call(self): - transport = messaging.get_transport(self.conf, url='fake:') - - class TestEndpoint(object): - def ping(self, ctxt, arg): - return arg - - server_thread = self._setup_server(transport, TestEndpoint()) - client = self._setup_client(transport) - - direct = client.prepare(server='testserver') - self.assertIsNone(direct.call({}, 'ping', arg=None)) - self.assertEqual(0, client.call({}, 'ping', arg=0)) - self.assertEqual(False, client.call({}, 'ping', arg=False)) - self.assertEqual([], client.call({}, 'ping', arg=[])) - self.assertEqual({}, client.call({}, 'ping', arg={})) - self.assertEqual('dsdsfoo', direct.call({}, 'ping', arg='foo')) - - self._stop_server(client, server_thread) - - def test_context(self): - transport = messaging.get_transport(self.conf, url='fake:') - - class TestEndpoint(object): - def ctxt_check(self, ctxt, key): - return ctxt[key] - - server_thread = self._setup_server(transport, TestEndpoint()) - client = self._setup_client(transport) - - self.assertEqual('dsdsb', - client.call({'dsa': 'b'}, - 'ctxt_check', - key='a')) - - self._stop_server(client, server_thread) - - def test_failure(self): - transport = messaging.get_transport(self.conf, url='fake:') - - class TestEndpoint(object): - def ping(self, ctxt, arg): - raise ValueError(arg) - - server_thread = self._setup_server(transport, TestEndpoint()) - client = self._setup_client(transport) - - try: - client.call({}, 'ping', arg='foo') - except Exception as ex: - self.assertIsInstance(ex, ValueError) - self.assertEqual('dsfoo', str(ex)) - else: - self.assertTrue(False) - - self._stop_server(client, server_thread) - - def test_expected_failure(self): - transport = messaging.get_transport(self.conf, url='fake:') - - class TestEndpoint(object): - @messaging.expected_exceptions(ValueError) - def ping(self, ctxt, arg): - raise ValueError(arg) - - server_thread = self._setup_server(transport, TestEndpoint()) - client = self._setup_client(transport) - - try: - client.call({}, 'ping', arg='foo') - except Exception as ex: - self.assertIsInstance(ex, ValueError) - self.assertEqual('dsfoo', str(ex)) - else: - self.assertTrue(False) - - self._stop_server(client, server_thread) - - -class TestMultipleServers(test_utils.BaseTestCase, ServerSetupMixin): - - _exchanges = [ - ('same_exchange', dict(exchange1=None, exchange2=None)), - ('diff_exchange', dict(exchange1='x1', exchange2='x2')), - ] - - _topics = [ - ('same_topic', dict(topic1='t', topic2='t')), - ('diff_topic', dict(topic1='t1', topic2='t2')), - ] - - _server = [ - ('same_server', dict(server1=None, server2=None)), - ('diff_server', dict(server1='s1', server2='s2')), - ] - - _fanout = [ - ('not_fanout', dict(fanout1=None, fanout2=None)), - ('fanout', dict(fanout1=True, fanout2=True)), - ] - - _method = [ - ('call', dict(call1=True, call2=True)), - ('cast', dict(call1=False, call2=False)), - ] - - _endpoints = [ - ('one_endpoint', - dict(multi_endpoints=False, - expect1=['ds1', 'ds2'], - expect2=['ds1', 'ds2'])), - ('two_endpoints', - dict(multi_endpoints=True, - expect1=['ds1'], - expect2=['ds2'])), - ] - - @classmethod - def generate_scenarios(cls): - cls.scenarios = testscenarios.multiply_scenarios(cls._exchanges, - cls._topics, - cls._server, - cls._fanout, - cls._method, - cls._endpoints) - - # fanout call not supported - def filter_fanout_call(scenario): - params = scenario[1] - fanout = params['fanout1'] or params['fanout2'] - call = params['call1'] or params['call2'] - return not (call and fanout) - - # listening multiple times on same topic/server pair not supported - def filter_same_topic_and_server(scenario): - params = scenario[1] - single_topic = params['topic1'] == params['topic2'] - single_server = params['server1'] == params['server2'] - return not (single_topic and single_server) - - # fanout to multiple servers on same topic and exchange - # each endpoint will receive both messages - def fanout_to_servers(scenario): - params = scenario[1] - fanout = params['fanout1'] or params['fanout2'] - single_exchange = params['exchange1'] == params['exchange2'] - single_topic = params['topic1'] == params['topic2'] - multi_servers = params['server1'] != params['server2'] - if fanout and single_exchange and single_topic and multi_servers: - params['expect1'] = params['expect1'][:] + params['expect1'] - params['expect2'] = params['expect2'][:] + params['expect2'] - return scenario - - # multiple endpoints on same topic and exchange - # either endpoint can get either message - def single_topic_multi_endpoints(scenario): - params = scenario[1] - single_exchange = params['exchange1'] == params['exchange2'] - single_topic = params['topic1'] == params['topic2'] - if single_topic and single_exchange and params['multi_endpoints']: - params['expect_either'] = (params['expect1'] + - params['expect2']) - params['expect1'] = params['expect2'] = [] - else: - params['expect_either'] = [] - return scenario - - for f in [filter_fanout_call, filter_same_topic_and_server]: - cls.scenarios = filter(f, cls.scenarios) - for m in [fanout_to_servers, single_topic_multi_endpoints]: - cls.scenarios = map(m, cls.scenarios) - - def __init__(self, *args): - super(TestMultipleServers, self).__init__(*args) - ServerSetupMixin.__init__(self) - - def setUp(self): - super(TestMultipleServers, self).setUp(conf=cfg.ConfigOpts()) - - def test_multiple_servers(self): - url1 = 'fake:///' + (self.exchange1 or '') - url2 = 'fake:///' + (self.exchange2 or '') - - transport1 = messaging.get_transport(self.conf, url=url1) - if url1 != url2: - transport2 = messaging.get_transport(self.conf, url=url1) - else: - transport2 = transport1 - - class TestEndpoint(object): - def __init__(self): - self.pings = [] - - def ping(self, ctxt, arg): - self.pings.append(arg) - - def alive(self, ctxt): - return 'alive' - - if self.multi_endpoints: - endpoint1, endpoint2 = TestEndpoint(), TestEndpoint() - else: - endpoint1 = endpoint2 = TestEndpoint() - - thread1 = self._setup_server(transport1, endpoint1, - topic=self.topic1, server=self.server1) - thread2 = self._setup_server(transport2, endpoint2, - topic=self.topic2, server=self.server2) - - client1 = self._setup_client(transport1, topic=self.topic1) - client2 = self._setup_client(transport2, topic=self.topic2) - - client1 = client1.prepare(server=self.server1) - client2 = client2.prepare(server=self.server2) - - if self.fanout1: - client1.call({}, 'alive') - client1 = client1.prepare(fanout=True) - if self.fanout2: - client2.call({}, 'alive') - client2 = client2.prepare(fanout=True) - - (client1.call if self.call1 else client1.cast)({}, 'ping', arg='1') - (client2.call if self.call2 else client2.cast)({}, 'ping', arg='2') - - self.assertTrue(thread1.isAlive()) - self._stop_server(client1.prepare(fanout=None), - thread1, topic=self.topic1) - self.assertTrue(thread2.isAlive()) - self._stop_server(client2.prepare(fanout=None), - thread2, topic=self.topic2) - - def check(pings, expect): - self.assertEqual(len(expect), len(pings)) - for a in expect: - self.assertIn(a, pings) - - if self.expect_either: - check(endpoint1.pings + endpoint2.pings, self.expect_either) - else: - check(endpoint1.pings, self.expect1) - check(endpoint2.pings, self.expect2) - - -TestMultipleServers.generate_scenarios() diff --git a/tests/test_amqp_driver.py b/tests/test_amqp_driver.py deleted file mode 100644 index df0a76952..000000000 --- a/tests/test_amqp_driver.py +++ /dev/null @@ -1,738 +0,0 @@ -# Copyright (C) 2014 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import os -import select -import socket -import threading -import time -import uuid - -import six -from six import moves -import testtools - -from oslo import messaging -from oslo_messaging.tests import utils as test_utils - -if six.PY2: - # NOTE(flaper87): pyngus currently doesn't support py34. It's - # on the works, though. - from oslo_messaging._drivers.protocols.amqp import driver as amqp_driver - import pyngus - - -LOG = logging.getLogger(__name__) - - -class _ListenerThread(threading.Thread): - """Run a blocking listener in a thread.""" - def __init__(self, listener, msg_count): - super(_ListenerThread, self).__init__() - self.listener = listener - self.msg_count = msg_count - self.messages = moves.queue.Queue() - self.daemon = True - self.start() - - def run(self): - LOG.debug("Listener started") - while self.msg_count > 0: - in_msg = self.listener.poll() - self.messages.put(in_msg) - self.msg_count -= 1 - if in_msg.message.get('method') == 'echo': - in_msg.reply(reply={'correlation-id': - in_msg.message.get('id')}) - LOG.debug("Listener stopped") - - def get_messages(self): - """Returns a list of all received messages.""" - msgs = [] - try: - while True: - m = self.messages.get(False) - msgs.append(m) - except moves.queue.Empty: - pass - return msgs - - -@testtools.skipUnless(six.PY2, "No Py3K support yet") -class TestProtonDriverLoad(test_utils.BaseTestCase): - - def setUp(self): - super(TestProtonDriverLoad, self).setUp() - self.messaging_conf.transport_driver = 'amqp' - - def test_driver_load(self): - transport = messaging.get_transport(self.conf) - self.assertIsInstance(transport._driver, - amqp_driver.ProtonDriver) - - -class _AmqpBrokerTestCase(test_utils.BaseTestCase): - - @testtools.skipUnless(six.PY2, "No Py3K support yet") - def setUp(self): - super(_AmqpBrokerTestCase, self).setUp() - self._broker = FakeBroker() - self._broker_addr = "amqp://%s:%d" % (self._broker.host, - self._broker.port) - self._broker_url = messaging.TransportURL.parse(self.conf, - self._broker_addr) - self._broker.start() - - def tearDown(self): - super(_AmqpBrokerTestCase, self).tearDown() - self._broker.stop() - - -class TestAmqpSend(_AmqpBrokerTestCase): - """Test sending and receiving messages.""" - - def test_driver_unconnected_cleanup(self): - """Verify the driver can cleanly shutdown even if never connected.""" - driver = amqp_driver.ProtonDriver(self.conf, self._broker_url) - driver.cleanup() - - def test_listener_cleanup(self): - """Verify unused listener can cleanly shutdown.""" - driver = amqp_driver.ProtonDriver(self.conf, self._broker_url) - target = messaging.Target(topic="test-topic") - listener = driver.listen(target) - self.assertIsInstance(listener, amqp_driver.ProtonListener) - driver.cleanup() - - def test_send_no_reply(self): - driver = amqp_driver.ProtonDriver(self.conf, self._broker_url) - target = messaging.Target(topic="test-topic") - listener = _ListenerThread(driver.listen(target), 1) - rc = driver.send(target, {"context": True}, - {"msg": "value"}, wait_for_reply=False) - self.assertIsNone(rc) - listener.join(timeout=30) - self.assertFalse(listener.isAlive()) - self.assertEqual(listener.messages.get().message, {"msg": "value"}) - driver.cleanup() - - def test_send_exchange_with_reply(self): - driver = amqp_driver.ProtonDriver(self.conf, self._broker_url) - target1 = messaging.Target(topic="test-topic", exchange="e1") - listener1 = _ListenerThread(driver.listen(target1), 1) - target2 = messaging.Target(topic="test-topic", exchange="e2") - listener2 = _ListenerThread(driver.listen(target2), 1) - - rc = driver.send(target1, {"context": "whatever"}, - {"method": "echo", "id": "e1"}, - wait_for_reply=True, - timeout=30) - self.assertIsNotNone(rc) - self.assertEqual(rc.get('correlation-id'), 'e1') - - rc = driver.send(target2, {"context": "whatever"}, - {"method": "echo", "id": "e2"}, - wait_for_reply=True, - timeout=30) - self.assertIsNotNone(rc) - self.assertEqual(rc.get('correlation-id'), 'e2') - - listener1.join(timeout=30) - self.assertFalse(listener1.isAlive()) - listener2.join(timeout=30) - self.assertFalse(listener2.isAlive()) - driver.cleanup() - - def test_messaging_patterns(self): - """Verify the direct, shared, and fanout message patterns work.""" - driver = amqp_driver.ProtonDriver(self.conf, self._broker_url) - target1 = messaging.Target(topic="test-topic", server="server1") - listener1 = _ListenerThread(driver.listen(target1), 4) - target2 = messaging.Target(topic="test-topic", server="server2") - listener2 = _ListenerThread(driver.listen(target2), 3) - - shared_target = messaging.Target(topic="test-topic") - fanout_target = messaging.Target(topic="test-topic", - fanout=True) - # this should go to only one server: - driver.send(shared_target, {"context": "whatever"}, - {"method": "echo", "id": "either-1"}, - wait_for_reply=True) - self.assertEqual(self._broker.topic_count, 1) - self.assertEqual(self._broker.direct_count, 1) # reply - - # this should go to the other server: - driver.send(shared_target, {"context": "whatever"}, - {"method": "echo", "id": "either-2"}, - wait_for_reply=True) - self.assertEqual(self._broker.topic_count, 2) - self.assertEqual(self._broker.direct_count, 2) # reply - - # these should only go to listener1: - driver.send(target1, {"context": "whatever"}, - {"method": "echo", "id": "server1-1"}, - wait_for_reply=True) - - driver.send(target1, {"context": "whatever"}, - {"method": "echo", "id": "server1-2"}, - wait_for_reply=True) - self.assertEqual(self._broker.direct_count, 6) # 2X(send+reply) - - # this should only go to listener2: - driver.send(target2, {"context": "whatever"}, - {"method": "echo", "id": "server2"}, - wait_for_reply=True) - self.assertEqual(self._broker.direct_count, 8) - - # both listeners should get a copy: - driver.send(fanout_target, {"context": "whatever"}, - {"method": "echo", "id": "fanout"}) - - listener1.join(timeout=30) - self.assertFalse(listener1.isAlive()) - listener2.join(timeout=30) - self.assertFalse(listener2.isAlive()) - self.assertEqual(self._broker.fanout_count, 1) - - listener1_ids = [x.message.get('id') for x in listener1.get_messages()] - listener2_ids = [x.message.get('id') for x in listener2.get_messages()] - - self.assertTrue('fanout' in listener1_ids and - 'fanout' in listener2_ids) - self.assertTrue('server1-1' in listener1_ids and - 'server1-1' not in listener2_ids) - self.assertTrue('server1-2' in listener1_ids and - 'server1-2' not in listener2_ids) - self.assertTrue('server2' in listener2_ids and - 'server2' not in listener1_ids) - if 'either-1' in listener1_ids: - self.assertTrue('either-2' in listener2_ids and - 'either-2' not in listener1_ids and - 'either-1' not in listener2_ids) - else: - self.assertTrue('either-2' in listener1_ids and - 'either-2' not in listener2_ids and - 'either-1' in listener2_ids) - driver.cleanup() - - def test_send_timeout(self): - """Verify send timeout.""" - driver = amqp_driver.ProtonDriver(self.conf, self._broker_url) - target = messaging.Target(topic="test-topic") - listener = _ListenerThread(driver.listen(target), 1) - - # the listener will drop this message: - try: - driver.send(target, - {"context": "whatever"}, - {"method": "drop"}, - wait_for_reply=True, - timeout=1.0) - except Exception as ex: - self.assertIsInstance(ex, messaging.MessagingTimeout, ex) - else: - self.assertTrue(False, "No Exception raised!") - listener.join(timeout=30) - self.assertFalse(listener.isAlive()) - driver.cleanup() - - -class TestAmqpNotification(_AmqpBrokerTestCase): - """Test sending and receiving notifications.""" - - def test_notification(self): - driver = amqp_driver.ProtonDriver(self.conf, self._broker_url) - notifications = [(messaging.Target(topic="topic-1"), 'info'), - (messaging.Target(topic="topic-1"), 'error'), - (messaging.Target(topic="topic-2"), 'debug')] - nl = driver.listen_for_notifications(notifications, None) - - # send one for each support version: - msg_count = len(notifications) * 2 - listener = _ListenerThread(nl, msg_count) - targets = ['topic-1.info', - 'topic-1.bad', # will raise MessagingDeliveryFailure - 'bad-topic.debug', # will raise MessagingDeliveryFailure - 'topic-1.error', - 'topic-2.debug'] - - excepted_targets = [] - exception_count = 0 - for version in (1.0, 2.0): - for t in targets: - try: - driver.send_notification(messaging.Target(topic=t), - "context", {'target': t}, - version) - except messaging.MessageDeliveryFailure: - exception_count += 1 - excepted_targets.append(t) - - listener.join(timeout=30) - self.assertFalse(listener.isAlive()) - topics = [x.message.get('target') for x in listener.get_messages()] - self.assertEqual(len(topics), msg_count) - self.assertEqual(topics.count('topic-1.info'), 2) - self.assertEqual(topics.count('topic-1.error'), 2) - self.assertEqual(topics.count('topic-2.debug'), 2) - self.assertEqual(self._broker.dropped_count, 4) - self.assertEqual(exception_count, 4) - self.assertEqual(excepted_targets.count('topic-1.bad'), 2) - self.assertEqual(excepted_targets.count('bad-topic.debug'), 2) - driver.cleanup() - - -@testtools.skipUnless(six.PY2, "No Py3K support yet") -class TestAuthentication(test_utils.BaseTestCase): - - def setUp(self): - super(TestAuthentication, self).setUp() - # for simplicity, encode the credentials as they would appear 'on the - # wire' in a SASL frame - username and password prefixed by zero. - user_credentials = ["\0joe\0secret"] - self._broker = FakeBroker(sasl_mechanisms="PLAIN", - user_credentials=user_credentials) - self._broker.start() - - def tearDown(self): - super(TestAuthentication, self).tearDown() - self._broker.stop() - - def test_authentication_ok(self): - """Verify that username and password given in TransportHost are - accepted by the broker. - """ - - addr = "amqp://joe:secret@%s:%d" % (self._broker.host, - self._broker.port) - url = messaging.TransportURL.parse(self.conf, addr) - driver = amqp_driver.ProtonDriver(self.conf, url) - target = messaging.Target(topic="test-topic") - listener = _ListenerThread(driver.listen(target), 1) - rc = driver.send(target, {"context": True}, - {"method": "echo"}, wait_for_reply=True) - self.assertIsNotNone(rc) - listener.join(timeout=30) - self.assertFalse(listener.isAlive()) - driver.cleanup() - - def test_authentication_failure(self): - """Verify that a bad password given in TransportHost is - rejected by the broker. - """ - - addr = "amqp://joe:badpass@%s:%d" % (self._broker.host, - self._broker.port) - url = messaging.TransportURL.parse(self.conf, addr) - driver = amqp_driver.ProtonDriver(self.conf, url) - target = messaging.Target(topic="test-topic") - _ListenerThread(driver.listen(target), 1) - self.assertRaises(messaging.MessagingTimeout, - driver.send, - target, {"context": True}, - {"method": "echo"}, - wait_for_reply=True, - timeout=2.0) - driver.cleanup() - - -@testtools.skipUnless(six.PY2, "No Py3K support yet") -class TestFailover(test_utils.BaseTestCase): - - def setUp(self): - super(TestFailover, self).setUp() - self._brokers = [FakeBroker(), FakeBroker()] - hosts = [] - for broker in self._brokers: - hosts.append(messaging.TransportHost(hostname=broker.host, - port=broker.port)) - self._broker_url = messaging.TransportURL(self.conf, - transport="amqp", - hosts=hosts) - - def tearDown(self): - super(TestFailover, self).tearDown() - for broker in self._brokers: - if broker.isAlive(): - broker.stop() - - def test_broker_failover(self): - """Simulate failover of one broker to another.""" - self._brokers[0].start() - driver = amqp_driver.ProtonDriver(self.conf, self._broker_url) - - target = messaging.Target(topic="my-topic") - listener = _ListenerThread(driver.listen(target), 2) - - rc = driver.send(target, {"context": "whatever"}, - {"method": "echo", "id": "echo-1"}, - wait_for_reply=True, - timeout=30) - self.assertIsNotNone(rc) - self.assertEqual(rc.get('correlation-id'), 'echo-1') - # 1 request msg, 1 response: - self.assertEqual(self._brokers[0].topic_count, 1) - self.assertEqual(self._brokers[0].direct_count, 1) - - # fail broker 0 and start broker 1: - self._brokers[0].stop() - self._brokers[1].start() - deadline = time.time() + 30 - responded = False - sequence = 2 - while deadline > time.time() and not responded: - if not listener.isAlive(): - # listener may have exited after replying to an old correlation - # id: restart new listener - listener = _ListenerThread(driver.listen(target), 1) - try: - rc = driver.send(target, {"context": "whatever"}, - {"method": "echo", - "id": "echo-%d" % sequence}, - wait_for_reply=True, - timeout=2) - self.assertIsNotNone(rc) - self.assertEqual(rc.get('correlation-id'), - 'echo-%d' % sequence) - responded = True - except messaging.MessagingTimeout: - sequence += 1 - - self.assertTrue(responded) - listener.join(timeout=30) - self.assertFalse(listener.isAlive()) - - # note: stopping the broker first tests cleaning up driver without a - # connection active - self._brokers[1].stop() - driver.cleanup() - - -class FakeBroker(threading.Thread): - """A test AMQP message 'broker'.""" - - if six.PY2: - class Connection(pyngus.ConnectionEventHandler): - """A single AMQP connection.""" - - def __init__(self, server, socket_, name, - sasl_mechanisms, user_credentials): - """Create a Connection using socket_.""" - self.socket = socket_ - self.name = name - self.server = server - self.connection = server.container.create_connection(name, - self) - self.connection.user_context = self - self.sasl_mechanisms = sasl_mechanisms - self.user_credentials = user_credentials - if sasl_mechanisms: - self.connection.pn_sasl.mechanisms(sasl_mechanisms) - self.connection.pn_sasl.server() - self.connection.open() - self.sender_links = set() - self.closed = False - - def destroy(self): - """Destroy the test connection.""" - while self.sender_links: - link = self.sender_links.pop() - link.destroy() - self.connection.destroy() - self.connection = None - self.socket.close() - - def fileno(self): - """Allows use of this in a select() call.""" - return self.socket.fileno() - - def process_input(self): - """Called when socket is read-ready.""" - try: - pyngus.read_socket_input(self.connection, self.socket) - except socket.error: - pass - self.connection.process(time.time()) - - def send_output(self): - """Called when socket is write-ready.""" - try: - pyngus.write_socket_output(self.connection, - self.socket) - except socket.error: - pass - self.connection.process(time.time()) - - # Pyngus ConnectionEventHandler callbacks: - - def connection_remote_closed(self, connection, reason): - """Peer has closed the connection.""" - self.connection.close() - - def connection_closed(self, connection): - """Connection close completed.""" - self.closed = True # main loop will destroy - - def connection_failed(self, connection, error): - """Connection failure detected.""" - self.connection_closed(connection) - - def sender_requested(self, connection, link_handle, - name, requested_source, properties): - """Create a new message source.""" - addr = requested_source or "source-" + uuid.uuid4().hex - link = FakeBroker.SenderLink(self.server, self, - link_handle, addr) - self.sender_links.add(link) - - def receiver_requested(self, connection, link_handle, - name, requested_target, properties): - """Create a new message consumer.""" - addr = requested_target or "target-" + uuid.uuid4().hex - FakeBroker.ReceiverLink(self.server, self, - link_handle, addr) - - def sasl_step(self, connection, pn_sasl): - if self.sasl_mechanisms == 'PLAIN': - credentials = pn_sasl.recv() - if not credentials: - return # wait until some arrives - if credentials not in self.user_credentials: - # failed - return pn_sasl.done(pn_sasl.AUTH) - pn_sasl.done(pn_sasl.OK) - - class SenderLink(pyngus.SenderEventHandler): - """An AMQP sending link.""" - def __init__(self, server, conn, handle, src_addr=None): - self.server = server - cnn = conn.connection - self.link = cnn.accept_sender(handle, - source_override=src_addr, - event_handler=self) - self.link.open() - self.routed = False - - def destroy(self): - """Destroy the link.""" - self._cleanup() - if self.link: - self.link.destroy() - self.link = None - - def send_message(self, message): - """Send a message over this link.""" - self.link.send(message) - - def _cleanup(self): - if self.routed: - self.server.remove_route(self.link.source_address, - self) - self.routed = False - - # Pyngus SenderEventHandler callbacks: - - def sender_active(self, sender_link): - self.server.add_route(self.link.source_address, self) - self.routed = True - - def sender_remote_closed(self, sender_link, error): - self._cleanup() - self.link.close() - - def sender_closed(self, sender_link): - self.destroy() - - class ReceiverLink(pyngus.ReceiverEventHandler): - """An AMQP Receiving link.""" - def __init__(self, server, conn, handle, addr=None): - self.server = server - cnn = conn.connection - self.link = cnn.accept_receiver(handle, - target_override=addr, - event_handler=self) - self.link.open() - self.link.add_capacity(10) - - # ReceiverEventHandler callbacks: - - def receiver_remote_closed(self, receiver_link, error): - self.link.close() - - def receiver_closed(self, receiver_link): - self.link.destroy() - self.link = None - - def message_received(self, receiver_link, message, handle): - """Forward this message out the proper sending link.""" - if self.server.forward_message(message): - self.link.message_accepted(handle) - else: - self.link.message_rejected(handle) - - if self.link.capacity < 1: - self.link.add_capacity(10) - - def __init__(self, server_prefix="exclusive", - broadcast_prefix="broadcast", - group_prefix="unicast", - address_separator=".", - sock_addr="", sock_port=0, - sasl_mechanisms="ANONYMOUS", - user_credentials=None): - """Create a fake broker listening on sock_addr:sock_port.""" - if not pyngus: - raise AssertionError("pyngus module not present") - threading.Thread.__init__(self) - self._server_prefix = server_prefix + address_separator - self._broadcast_prefix = broadcast_prefix + address_separator - self._group_prefix = group_prefix + address_separator - self._address_separator = address_separator - self._sasl_mechanisms = sasl_mechanisms - self._user_credentials = user_credentials - self._wakeup_pipe = os.pipe() - self._my_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self._my_socket.bind((sock_addr, sock_port)) - self.host, self.port = self._my_socket.getsockname() - self.container = pyngus.Container("test_server_%s:%d" - % (self.host, self.port)) - self._connections = {} - self._sources = {} - # count of messages forwarded, by messaging pattern - self.direct_count = 0 - self.topic_count = 0 - self.fanout_count = 0 - self.dropped_count = 0 - - def start(self): - """Start the server.""" - LOG.debug("Starting Test Broker on %s:%d", self.host, self.port) - self._shutdown = False - self.daemon = True - self._my_socket.listen(10) - super(FakeBroker, self).start() - - def stop(self): - """Shutdown the server.""" - LOG.debug("Stopping test Broker %s:%d", self.host, self.port) - self._shutdown = True - os.write(self._wakeup_pipe[1], "!") - self.join() - LOG.debug("Test Broker %s:%d stopped", self.host, self.port) - - def run(self): - """Process I/O and timer events until the broker is stopped.""" - LOG.debug("Test Broker on %s:%d started", self.host, self.port) - while not self._shutdown: - readers, writers, timers = self.container.need_processing() - - # map pyngus Connections back to _TestConnections: - readfd = [c.user_context for c in readers] - readfd.extend([self._my_socket, self._wakeup_pipe[0]]) - writefd = [c.user_context for c in writers] - - timeout = None - if timers: - # [0] == next expiring timer - deadline = timers[0].next_tick - now = time.time() - timeout = 0 if deadline <= now else deadline - now - - readable, writable, ignore = select.select(readfd, - writefd, - [], - timeout) - worked = set() - for r in readable: - if r is self._my_socket: - # new inbound connection request received, - # create a new Connection for it: - client_socket, client_address = self._my_socket.accept() - name = str(client_address) - conn = FakeBroker.Connection(self, client_socket, name, - self._sasl_mechanisms, - self._user_credentials) - self._connections[conn.name] = conn - elif r is self._wakeup_pipe[0]: - os.read(self._wakeup_pipe[0], 512) - else: - r.process_input() - worked.add(r) - - for t in timers: - now = time.time() - if t.next_tick > now: - break - t.process(now) - conn = t.user_context - worked.add(conn) - - for w in writable: - w.send_output() - worked.add(w) - - # clean up any closed connections: - while worked: - conn = worked.pop() - if conn.closed: - del self._connections[conn.name] - conn.destroy() - - # Shutting down - self._my_socket.close() - for conn in self._connections.itervalues(): - conn.destroy() - return 0 - - def add_route(self, address, link): - # route from address -> link[, link ...] - if address not in self._sources: - self._sources[address] = [link] - elif link not in self._sources[address]: - self._sources[address].append(link) - - def remove_route(self, address, link): - if address in self._sources: - if link in self._sources[address]: - self._sources[address].remove(link) - if not self._sources[address]: - del self._sources[address] - - def forward_message(self, message): - # returns True if message was routed - dest = message.address - if dest not in self._sources: - self.dropped_count += 1 - return False - LOG.debug("Forwarding [%s]", dest) - # route "behavior" determined by prefix: - if dest.startswith(self._broadcast_prefix): - self.fanout_count += 1 - for link in self._sources[dest]: - LOG.debug("Broadcast to %s", dest) - link.send_message(message) - elif dest.startswith(self._group_prefix): - # round-robin: - self.topic_count += 1 - link = self._sources[dest].pop(0) - link.send_message(message) - LOG.debug("Send to %s", dest) - self._sources[dest].append(link) - else: - # unicast: - self.direct_count += 1 - LOG.debug("Unicast to %s", dest) - self._sources[dest][0].send_message(message) - return True diff --git a/tests/test_exception_serialization.py b/tests/test_exception_serialization.py deleted file mode 100644 index baa2b79c3..000000000 --- a/tests/test_exception_serialization.py +++ /dev/null @@ -1,308 +0,0 @@ - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sys - -import six -import testscenarios - -from oslo import messaging - -from oslo_messaging._drivers import common as exceptions -from oslo_messaging.tests import utils as test_utils -from oslo_serialization import jsonutils - -load_tests = testscenarios.load_tests_apply_scenarios - -EXCEPTIONS_MODULE = 'exceptions' if six.PY2 else 'builtins' - - -class NovaStyleException(Exception): - - format = 'I am Nova' - - def __init__(self, message=None, **kwargs): - self.kwargs = kwargs - if not message: - message = self.format % kwargs - super(NovaStyleException, self).__init__(message) - - -class KwargsStyleException(NovaStyleException): - - format = 'I am %(who)s' - - -def add_remote_postfix(ex): - ex_type = type(ex) - message = str(ex) - str_override = lambda self: message - new_ex_type = type(ex_type.__name__ + "_Remote", (ex_type,), - {'__str__': str_override, - '__unicode__': str_override}) - new_ex_type.__module__ = '%s_Remote' % ex.__class__.__module__ - try: - ex.__class__ = new_ex_type - except TypeError: - ex.args = (message,) + ex.args[1:] - return ex - - -class SerializeRemoteExceptionTestCase(test_utils.BaseTestCase): - - _log_failure = [ - ('log_failure', dict(log_failure=True)), - ('do_not_log_failure', dict(log_failure=False)), - ] - - _add_remote = [ - ('add_remote', dict(add_remote=True)), - ('do_not_add_remote', dict(add_remote=False)), - ] - - _exception_types = [ - ('bog_standard', dict(cls=Exception, - args=['test'], - kwargs={}, - clsname='Exception', - modname=EXCEPTIONS_MODULE, - msg='test')), - ('nova_style', dict(cls=NovaStyleException, - args=[], - kwargs={}, - clsname='NovaStyleException', - modname=__name__, - msg='I am Nova')), - ('nova_style_with_msg', dict(cls=NovaStyleException, - args=['testing'], - kwargs={}, - clsname='NovaStyleException', - modname=__name__, - msg='testing')), - ('kwargs_style', dict(cls=KwargsStyleException, - args=[], - kwargs={'who': 'Oslo'}, - clsname='KwargsStyleException', - modname=__name__, - msg='I am Oslo')), - ] - - @classmethod - def generate_scenarios(cls): - cls.scenarios = testscenarios.multiply_scenarios(cls._log_failure, - cls._add_remote, - cls._exception_types) - - def setUp(self): - super(SerializeRemoteExceptionTestCase, self).setUp() - - def test_serialize_remote_exception(self): - errors = [] - - def stub_error(msg, *a, **kw): - if (a and len(a) == 1 and isinstance(a[0], dict) and a[0]): - a = a[0] - errors.append(str(msg) % a) - - self.stubs.Set(exceptions.LOG, 'error', stub_error) - - try: - try: - raise self.cls(*self.args, **self.kwargs) - except Exception as ex: - cls_error = ex - if self.add_remote: - ex = add_remote_postfix(ex) - raise ex - except Exception: - exc_info = sys.exc_info() - - serialized = exceptions.serialize_remote_exception( - exc_info, log_failure=self.log_failure) - - failure = jsonutils.loads(serialized) - - self.assertEqual(self.clsname, failure['class'], failure) - self.assertEqual(self.modname, failure['module']) - self.assertEqual(self.msg, failure['message']) - self.assertEqual([self.msg], failure['args']) - self.assertEqual(self.kwargs, failure['kwargs']) - - # Note: _Remote prefix not stripped from tracebacks - tb = cls_error.__class__.__name__ + ': ' + self.msg - self.assertIn(tb, ''.join(failure['tb'])) - - if self.log_failure: - self.assertTrue(len(errors) > 0, errors) - else: - self.assertEqual(0, len(errors), errors) - - -SerializeRemoteExceptionTestCase.generate_scenarios() - - -class DeserializeRemoteExceptionTestCase(test_utils.BaseTestCase): - - _standard_allowed = [__name__] - - scenarios = [ - ('bog_standard', - dict(allowed=_standard_allowed, - clsname='Exception', - modname=EXCEPTIONS_MODULE, - cls=Exception, - args=['test'], - kwargs={}, - str='test\ntraceback\ntraceback\n', - remote_name='Exception', - remote_args=('test\ntraceback\ntraceback\n', ), - remote_kwargs={})), - ('nova_style', - dict(allowed=_standard_allowed, - clsname='NovaStyleException', - modname=__name__, - cls=NovaStyleException, - args=[], - kwargs={}, - str='test\ntraceback\ntraceback\n', - remote_name='NovaStyleException_Remote', - remote_args=('I am Nova', ), - remote_kwargs={})), - ('nova_style_with_msg', - dict(allowed=_standard_allowed, - clsname='NovaStyleException', - modname=__name__, - cls=NovaStyleException, - args=['testing'], - kwargs={}, - str='test\ntraceback\ntraceback\n', - remote_name='NovaStyleException_Remote', - remote_args=('testing', ), - remote_kwargs={})), - ('kwargs_style', - dict(allowed=_standard_allowed, - clsname='KwargsStyleException', - modname=__name__, - cls=KwargsStyleException, - args=[], - kwargs={'who': 'Oslo'}, - str='test\ntraceback\ntraceback\n', - remote_name='KwargsStyleException_Remote', - remote_args=('I am Oslo', ), - remote_kwargs={})), - ('not_allowed', - dict(allowed=[], - clsname='NovaStyleException', - modname=__name__, - cls=messaging.RemoteError, - args=[], - kwargs={}, - str=("Remote error: NovaStyleException test\n" - "[%r]." % u'traceback\ntraceback\n'), - msg=("Remote error: NovaStyleException test\n" - "[%r]." % u'traceback\ntraceback\n'), - remote_name='RemoteError', - remote_args=(), - remote_kwargs={'exc_type': 'NovaStyleException', - 'value': 'test', - 'traceback': 'traceback\ntraceback\n'})), - ('unknown_module', - dict(allowed=['notexist'], - clsname='Exception', - modname='notexist', - cls=messaging.RemoteError, - args=[], - kwargs={}, - str=("Remote error: Exception test\n" - "[%r]." % u'traceback\ntraceback\n'), - msg=("Remote error: Exception test\n" - "[%r]." % u'traceback\ntraceback\n'), - remote_name='RemoteError', - remote_args=(), - remote_kwargs={'exc_type': 'Exception', - 'value': 'test', - 'traceback': 'traceback\ntraceback\n'})), - ('unknown_exception', - dict(allowed=[], - clsname='FarcicalError', - modname=EXCEPTIONS_MODULE, - cls=messaging.RemoteError, - args=[], - kwargs={}, - str=("Remote error: FarcicalError test\n" - "[%r]." % u'traceback\ntraceback\n'), - msg=("Remote error: FarcicalError test\n" - "[%r]." % u'traceback\ntraceback\n'), - remote_name='RemoteError', - remote_args=(), - remote_kwargs={'exc_type': 'FarcicalError', - 'value': 'test', - 'traceback': 'traceback\ntraceback\n'})), - ('unknown_kwarg', - dict(allowed=[], - clsname='Exception', - modname=EXCEPTIONS_MODULE, - cls=messaging.RemoteError, - args=[], - kwargs={'foobar': 'blaa'}, - str=("Remote error: Exception test\n" - "[%r]." % u'traceback\ntraceback\n'), - msg=("Remote error: Exception test\n" - "[%r]." % u'traceback\ntraceback\n'), - remote_name='RemoteError', - remote_args=(), - remote_kwargs={'exc_type': 'Exception', - 'value': 'test', - 'traceback': 'traceback\ntraceback\n'})), - ('system_exit', - dict(allowed=[], - clsname='SystemExit', - modname=EXCEPTIONS_MODULE, - cls=messaging.RemoteError, - args=[], - kwargs={}, - str=("Remote error: SystemExit test\n" - "[%r]." % u'traceback\ntraceback\n'), - msg=("Remote error: SystemExit test\n" - "[%r]." % u'traceback\ntraceback\n'), - remote_name='RemoteError', - remote_args=(), - remote_kwargs={'exc_type': 'SystemExit', - 'value': 'test', - 'traceback': 'traceback\ntraceback\n'})), - ] - - def test_deserialize_remote_exception(self): - failure = { - 'class': self.clsname, - 'module': self.modname, - 'message': 'test', - 'tb': ['traceback\ntraceback\n'], - 'args': self.args, - 'kwargs': self.kwargs, - } - - serialized = jsonutils.dumps(failure) - - ex = exceptions.deserialize_remote_exception(serialized, self.allowed) - - self.assertIsInstance(ex, self.cls) - self.assertEqual(self.remote_name, ex.__class__.__name__) - self.assertEqual(self.str, six.text_type(ex)) - if hasattr(self, 'msg'): - self.assertEqual(self.msg, six.text_type(ex)) - self.assertEqual((self.msg,) + self.remote_args, ex.args) - else: - self.assertEqual(self.remote_args, ex.args) diff --git a/tests/test_expected_exceptions.py b/tests/test_expected_exceptions.py deleted file mode 100644 index 702f3a2b3..000000000 --- a/tests/test_expected_exceptions.py +++ /dev/null @@ -1,66 +0,0 @@ - -# Copyright 2012 OpenStack Foundation -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo import messaging -from oslo_messaging.tests import utils as test_utils - - -class TestExpectedExceptions(test_utils.BaseTestCase): - - def test_exception(self): - e = None - try: - try: - raise ValueError() - except Exception: - raise messaging.ExpectedException() - except messaging.ExpectedException as e: - self.assertIsInstance(e, messaging.ExpectedException) - self.assertTrue(hasattr(e, 'exc_info')) - self.assertIsInstance(e.exc_info[1], ValueError) - - def test_decorator_expected(self): - class FooException(Exception): - pass - - @messaging.expected_exceptions(FooException) - def naughty(): - raise FooException() - - self.assertRaises(messaging.ExpectedException, naughty) - - def test_decorator_expected_subclass(self): - class FooException(Exception): - pass - - class BarException(FooException): - pass - - @messaging.expected_exceptions(FooException) - def naughty(): - raise BarException() - - self.assertRaises(messaging.ExpectedException, naughty) - - def test_decorator_unexpected(self): - class FooException(Exception): - pass - - @messaging.expected_exceptions(FooException) - def really_naughty(): - raise ValueError() - - self.assertRaises(ValueError, really_naughty) diff --git a/tests/test_target.py b/tests/test_target.py deleted file mode 100644 index 68f98f4d7..000000000 --- a/tests/test_target.py +++ /dev/null @@ -1,177 +0,0 @@ - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import testscenarios - -from oslo import messaging -from oslo_messaging.tests import utils as test_utils - -load_tests = testscenarios.load_tests_apply_scenarios - - -class TargetConstructorTestCase(test_utils.BaseTestCase): - - scenarios = [ - ('all_none', dict(kwargs=dict())), - ('exchange', dict(kwargs=dict(exchange='testexchange'))), - ('topic', dict(kwargs=dict(topic='testtopic'))), - ('namespace', dict(kwargs=dict(namespace='testnamespace'))), - ('version', dict(kwargs=dict(version='3.4'))), - ('server', dict(kwargs=dict(server='testserver'))), - ('fanout', dict(kwargs=dict(fanout=True))), - ] - - def test_constructor(self): - target = messaging.Target(**self.kwargs) - for k in self.kwargs: - self.assertEqual(self.kwargs[k], getattr(target, k)) - for k in ['exchange', 'topic', 'namespace', - 'version', 'server', 'fanout']: - if k in self.kwargs: - continue - self.assertIsNone(getattr(target, k)) - - -class TargetCallableTestCase(test_utils.BaseTestCase): - - scenarios = [ - ('all_none', dict(attrs=dict(), kwargs=dict(), vals=dict())), - ('exchange_attr', dict(attrs=dict(exchange='testexchange'), - kwargs=dict(), - vals=dict(exchange='testexchange'))), - ('exchange_arg', dict(attrs=dict(), - kwargs=dict(exchange='testexchange'), - vals=dict(exchange='testexchange'))), - ('topic_attr', dict(attrs=dict(topic='testtopic'), - kwargs=dict(), - vals=dict(topic='testtopic'))), - ('topic_arg', dict(attrs=dict(), - kwargs=dict(topic='testtopic'), - vals=dict(topic='testtopic'))), - ('namespace_attr', dict(attrs=dict(namespace='testnamespace'), - kwargs=dict(), - vals=dict(namespace='testnamespace'))), - ('namespace_arg', dict(attrs=dict(), - kwargs=dict(namespace='testnamespace'), - vals=dict(namespace='testnamespace'))), - ('version_attr', dict(attrs=dict(version='3.4'), - kwargs=dict(), - vals=dict(version='3.4'))), - ('version_arg', dict(attrs=dict(), - kwargs=dict(version='3.4'), - vals=dict(version='3.4'))), - ('server_attr', dict(attrs=dict(server='testserver'), - kwargs=dict(), - vals=dict(server='testserver'))), - ('server_arg', dict(attrs=dict(), - kwargs=dict(server='testserver'), - vals=dict(server='testserver'))), - ('fanout_attr', dict(attrs=dict(fanout=True), - kwargs=dict(), - vals=dict(fanout=True))), - ('fanout_arg', dict(attrs=dict(), - kwargs=dict(fanout=True), - vals=dict(fanout=True))), - ] - - def test_callable(self): - target = messaging.Target(**self.attrs) - target = target(**self.kwargs) - for k in self.vals: - self.assertEqual(self.vals[k], getattr(target, k)) - for k in ['exchange', 'topic', 'namespace', - 'version', 'server', 'fanout']: - if k in self.vals: - continue - self.assertIsNone(getattr(target, k)) - - -class TargetReprTestCase(test_utils.BaseTestCase): - - scenarios = [ - ('all_none', dict(kwargs=dict(), repr='')), - ('exchange', dict(kwargs=dict(exchange='testexchange'), - repr='exchange=testexchange')), - ('topic', dict(kwargs=dict(topic='testtopic'), - repr='topic=testtopic')), - ('namespace', dict(kwargs=dict(namespace='testnamespace'), - repr='namespace=testnamespace')), - ('version', dict(kwargs=dict(version='3.4'), - repr='version=3.4')), - ('server', dict(kwargs=dict(server='testserver'), - repr='server=testserver')), - ('fanout', dict(kwargs=dict(fanout=True), - repr='fanout=True')), - ('exchange_and_fanout', dict(kwargs=dict(exchange='testexchange', - fanout=True), - repr='exchange=testexchange, ' - 'fanout=True')), - ] - - def test_repr(self): - target = messaging.Target(**self.kwargs) - self.assertEqual('', str(target)) - - -_notset = object() - - -class EqualityTestCase(test_utils.BaseTestCase): - - @classmethod - def generate_scenarios(cls): - attr = [ - ('exchange', dict(attr='exchange')), - ('topic', dict(attr='topic')), - ('namespace', dict(attr='namespace')), - ('version', dict(attr='version')), - ('server', dict(attr='server')), - ('fanout', dict(attr='fanout')), - ] - a = [ - ('a_notset', dict(a_value=_notset)), - ('a_none', dict(a_value=None)), - ('a_empty', dict(a_value='')), - ('a_foo', dict(a_value='foo')), - ('a_bar', dict(a_value='bar')), - ] - b = [ - ('b_notset', dict(b_value=_notset)), - ('b_none', dict(b_value=None)), - ('b_empty', dict(b_value='')), - ('b_foo', dict(b_value='foo')), - ('b_bar', dict(b_value='bar')), - ] - - cls.scenarios = testscenarios.multiply_scenarios(attr, a, b) - for s in cls.scenarios: - s[1]['equals'] = (s[1]['a_value'] == s[1]['b_value']) - - def test_equality(self): - a_kwargs = {self.attr: self.a_value} - b_kwargs = {self.attr: self.b_value} - - a = messaging.Target(**a_kwargs) - b = messaging.Target(**b_kwargs) - - if self.equals: - self.assertEqual(a, b) - self.assertFalse(a != b) - else: - self.assertNotEqual(a, b) - self.assertFalse(a == b) - - -EqualityTestCase.generate_scenarios() diff --git a/tests/test_transport.py b/tests/test_transport.py deleted file mode 100644 index a3b5b9128..000000000 --- a/tests/test_transport.py +++ /dev/null @@ -1,367 +0,0 @@ - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import fixtures -from mox3 import mox -import six -from stevedore import driver -import testscenarios - -from oslo import messaging -from oslo.messaging import transport -from oslo_config import cfg -from oslo_messaging.tests import utils as test_utils -from oslo_messaging import transport as private_transport - -load_tests = testscenarios.load_tests_apply_scenarios - - -class _FakeDriver(object): - - def __init__(self, conf): - self.conf = conf - - def send(self, *args, **kwargs): - pass - - def send_notification(self, *args, **kwargs): - pass - - def listen(self, target): - pass - - -class _FakeManager(object): - - def __init__(self, driver): - self.driver = driver - - -class GetTransportTestCase(test_utils.BaseTestCase): - - scenarios = [ - ('rpc_backend', - dict(url=None, transport_url=None, rpc_backend='testbackend', - control_exchange=None, allowed=None, aliases=None, - expect=dict(backend='testbackend', - exchange=None, - url='testbackend:', - allowed=[]))), - ('transport_url', - dict(url=None, transport_url='testtransport:', rpc_backend=None, - control_exchange=None, allowed=None, aliases=None, - expect=dict(backend='testtransport', - exchange=None, - url='testtransport:', - allowed=[]))), - ('url_param', - dict(url='testtransport:', transport_url=None, rpc_backend=None, - control_exchange=None, allowed=None, aliases=None, - expect=dict(backend='testtransport', - exchange=None, - url='testtransport:', - allowed=[]))), - ('control_exchange', - dict(url=None, transport_url=None, rpc_backend='testbackend', - control_exchange='testexchange', allowed=None, aliases=None, - expect=dict(backend='testbackend', - exchange='testexchange', - url='testbackend:', - allowed=[]))), - ('allowed_remote_exmods', - dict(url=None, transport_url=None, rpc_backend='testbackend', - control_exchange=None, allowed=['foo', 'bar'], aliases=None, - expect=dict(backend='testbackend', - exchange=None, - url='testbackend:', - allowed=['foo', 'bar']))), - ('rpc_backend_aliased', - dict(url=None, transport_url=None, rpc_backend='testfoo', - control_exchange=None, allowed=None, - aliases=dict(testfoo='testbackend'), - expect=dict(backend='testbackend', - exchange=None, - url='testbackend:', - allowed=[]))), - ('transport_url_aliased', - dict(url=None, transport_url='testfoo:', rpc_backend=None, - control_exchange=None, allowed=None, - aliases=dict(testfoo='testtransport'), - expect=dict(backend='testtransport', - exchange=None, - url='testtransport:', - allowed=[]))), - ('url_param_aliased', - dict(url='testfoo:', transport_url=None, rpc_backend=None, - control_exchange=None, allowed=None, - aliases=dict(testfoo='testtransport'), - expect=dict(backend='testtransport', - exchange=None, - url='testtransport:', - allowed=[]))), - ] - - def test_get_transport(self): - self.config(rpc_backend=self.rpc_backend, - control_exchange=self.control_exchange, - transport_url=self.transport_url) - - self.mox.StubOutWithMock(driver, 'DriverManager') - - invoke_args = [self.conf, - messaging.TransportURL.parse(self.conf, - self.expect['url'])] - invoke_kwds = dict(default_exchange=self.expect['exchange'], - allowed_remote_exmods=self.expect['allowed']) - - drvr = _FakeDriver(self.conf) - driver.DriverManager('oslo.messaging.drivers', - self.expect['backend'], - invoke_on_load=True, - invoke_args=invoke_args, - invoke_kwds=invoke_kwds).\ - AndReturn(_FakeManager(drvr)) - - self.mox.ReplayAll() - - kwargs = dict(url=self.url) - if self.allowed is not None: - kwargs['allowed_remote_exmods'] = self.allowed - if self.aliases is not None: - kwargs['aliases'] = self.aliases - transport_ = messaging.get_transport(self.conf, **kwargs) - - self.assertIsNotNone(transport_) - self.assertIs(transport_.conf, self.conf) - self.assertIs(transport_._driver, drvr) - - -class GetTransportSadPathTestCase(test_utils.BaseTestCase): - - scenarios = [ - ('invalid_transport_url', - dict(url=None, transport_url='invalid', rpc_backend=None, - ex=dict(cls=messaging.InvalidTransportURL, - msg_contains='No scheme specified', - url='invalid'))), - ('invalid_url_param', - dict(url='invalid', transport_url=None, rpc_backend=None, - ex=dict(cls=messaging.InvalidTransportURL, - msg_contains='No scheme specified', - url='invalid'))), - ('driver_load_failure', - dict(url=None, transport_url=None, rpc_backend='testbackend', - ex=dict(cls=messaging.DriverLoadFailure, - msg_contains='Failed to load', - driver='testbackend'))), - ] - - def test_get_transport_sad(self): - self.config(rpc_backend=self.rpc_backend, - transport_url=self.transport_url) - - if self.rpc_backend: - self.mox.StubOutWithMock(driver, 'DriverManager') - - invoke_args = [self.conf, - messaging.TransportURL.parse(self.conf, - self.url)] - invoke_kwds = dict(default_exchange='openstack', - allowed_remote_exmods=[]) - - driver.DriverManager('oslo.messaging.drivers', - self.rpc_backend, - invoke_on_load=True, - invoke_args=invoke_args, - invoke_kwds=invoke_kwds).\ - AndRaise(RuntimeError()) - - self.mox.ReplayAll() - - try: - messaging.get_transport(self.conf, url=self.url) - self.assertFalse(True) - except Exception as ex: - ex_cls = self.ex.pop('cls') - ex_msg_contains = self.ex.pop('msg_contains') - - self.assertIsInstance(ex, messaging.MessagingException) - self.assertIsInstance(ex, ex_cls) - self.assertIn(ex_msg_contains, six.text_type(ex)) - - for k, v in self.ex.items(): - self.assertTrue(hasattr(ex, k)) - self.assertEqual(v, str(getattr(ex, k))) - - -# FIXME(markmc): this could be used elsewhere -class _SetDefaultsFixture(fixtures.Fixture): - - def __init__(self, set_defaults, opts, *names): - super(_SetDefaultsFixture, self).__init__() - self.set_defaults = set_defaults - self.opts = opts - self.names = names - - def setUp(self): - super(_SetDefaultsFixture, self).setUp() - - # FIXME(markmc): this comes from Id5c1f3ba - def first(seq, default=None, key=None): - if key is None: - key = bool - return next(six.moves.filter(key, seq), default) - - def default(opts, name): - return first(opts, key=lambda o: o.name == name).default - - orig_defaults = {} - for n in self.names: - orig_defaults[n] = default(self.opts, n) - - def restore_defaults(): - self.set_defaults(**orig_defaults) - - self.addCleanup(restore_defaults) - - -class TestSetDefaults(test_utils.BaseTestCase): - - def setUp(self): - super(TestSetDefaults, self).setUp(conf=cfg.ConfigOpts()) - self.useFixture(_SetDefaultsFixture(messaging.set_transport_defaults, - private_transport._transport_opts, - 'control_exchange')) - - def test_set_default_control_exchange(self): - messaging.set_transport_defaults(control_exchange='foo') - - self.mox.StubOutWithMock(driver, 'DriverManager') - invoke_kwds = mox.ContainsKeyValue('default_exchange', 'foo') - driver.DriverManager(mox.IgnoreArg(), - mox.IgnoreArg(), - invoke_on_load=mox.IgnoreArg(), - invoke_args=mox.IgnoreArg(), - invoke_kwds=invoke_kwds).\ - AndReturn(_FakeManager(_FakeDriver(self.conf))) - self.mox.ReplayAll() - - messaging.get_transport(self.conf) - - -class TestTransportMethodArgs(test_utils.BaseTestCase): - - _target = messaging.Target(topic='topic', server='server') - - def test_send_defaults(self): - t = transport.Transport(_FakeDriver(cfg.CONF)) - - self.mox.StubOutWithMock(t._driver, 'send') - t._driver.send(self._target, 'ctxt', 'message', - wait_for_reply=None, - timeout=None, retry=None) - self.mox.ReplayAll() - - t._send(self._target, 'ctxt', 'message') - - def test_send_all_args(self): - t = transport.Transport(_FakeDriver(cfg.CONF)) - - self.mox.StubOutWithMock(t._driver, 'send') - t._driver.send(self._target, 'ctxt', 'message', - wait_for_reply='wait_for_reply', - timeout='timeout', retry='retry') - self.mox.ReplayAll() - - t._send(self._target, 'ctxt', 'message', - wait_for_reply='wait_for_reply', - timeout='timeout', retry='retry') - - def test_send_notification(self): - t = transport.Transport(_FakeDriver(cfg.CONF)) - - self.mox.StubOutWithMock(t._driver, 'send_notification') - t._driver.send_notification(self._target, 'ctxt', 'message', 1.0, - retry=None) - self.mox.ReplayAll() - - t._send_notification(self._target, 'ctxt', 'message', version=1.0) - - def test_send_notification_all_args(self): - t = transport.Transport(_FakeDriver(cfg.CONF)) - - self.mox.StubOutWithMock(t._driver, 'send_notification') - t._driver.send_notification(self._target, 'ctxt', 'message', 1.0, - retry=5) - self.mox.ReplayAll() - - t._send_notification(self._target, 'ctxt', 'message', version=1.0, - retry=5) - - def test_listen(self): - t = transport.Transport(_FakeDriver(cfg.CONF)) - - self.mox.StubOutWithMock(t._driver, 'listen') - t._driver.listen(self._target) - self.mox.ReplayAll() - - t._listen(self._target) - - -class TestTransportUrlCustomisation(test_utils.BaseTestCase): - def setUp(self): - super(TestTransportUrlCustomisation, self).setUp() - self.url1 = transport.TransportURL.parse(self.conf, "fake://vhost1") - self.url2 = transport.TransportURL.parse(self.conf, "fake://vhost2") - self.url3 = transport.TransportURL.parse(self.conf, "fake://vhost1") - - def test_hash(self): - urls = {} - urls[self.url1] = self.url1 - urls[self.url2] = self.url2 - urls[self.url3] = self.url3 - self.assertEqual(2, len(urls)) - - def test_eq(self): - self.assertEqual(self.url1, self.url3) - self.assertNotEqual(self.url1, self.url2) - - -class TestTransportHostCustomisation(test_utils.BaseTestCase): - def setUp(self): - super(TestTransportHostCustomisation, self).setUp() - self.host1 = transport.TransportHost("host1", 5662, "user", "pass") - self.host2 = transport.TransportHost("host1", 5662, "user", "pass") - self.host3 = transport.TransportHost("host1", 5663, "user", "pass") - self.host4 = transport.TransportHost("host1", 5662, "user2", "pass") - self.host5 = transport.TransportHost("host1", 5662, "user", "pass2") - self.host6 = transport.TransportHost("host2", 5662, "user", "pass") - - def test_hash(self): - hosts = {} - hosts[self.host1] = self.host1 - hosts[self.host2] = self.host2 - hosts[self.host3] = self.host3 - hosts[self.host4] = self.host4 - hosts[self.host5] = self.host5 - hosts[self.host6] = self.host6 - self.assertEqual(5, len(hosts)) - - def test_eq(self): - self.assertEqual(self.host1, self.host2) - self.assertNotEqual(self.host1, self.host3) - self.assertNotEqual(self.host1, self.host4) - self.assertNotEqual(self.host1, self.host5) - self.assertNotEqual(self.host1, self.host6) diff --git a/tests/test_urls.py b/tests/test_urls.py deleted file mode 100644 index 956274201..000000000 --- a/tests/test_urls.py +++ /dev/null @@ -1,236 +0,0 @@ - -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import testscenarios - -from oslo import messaging -from oslo_messaging.tests import utils as test_utils - -load_tests = testscenarios.load_tests_apply_scenarios - - -class TestParseURL(test_utils.BaseTestCase): - - scenarios = [ - ('transport', - dict(url='foo:', aliases=None, - expect=dict(transport='foo'))), - ('transport_aliased', - dict(url='bar:', aliases=dict(bar='foo'), - expect=dict(transport='foo'))), - ('virtual_host_slash', - dict(url='foo:////', aliases=None, - expect=dict(transport='foo', virtual_host='/'))), - ('virtual_host', - dict(url='foo:///bar', aliases=None, - expect=dict(transport='foo', virtual_host='bar'))), - ('host', - dict(url='foo://host/bar', aliases=None, - expect=dict(transport='foo', - virtual_host='bar', - hosts=[ - dict(host='host'), - ]))), - ('ipv6_host', - dict(url='foo://[ffff::1]/bar', aliases=None, - expect=dict(transport='foo', - virtual_host='bar', - hosts=[ - dict(host='ffff::1'), - ]))), - ('port', - dict(url='foo://host:1234/bar', aliases=None, - expect=dict(transport='foo', - virtual_host='bar', - hosts=[ - dict(host='host', port=1234), - ]))), - ('ipv6_port', - dict(url='foo://[ffff::1]:1234/bar', aliases=None, - expect=dict(transport='foo', - virtual_host='bar', - hosts=[ - dict(host='ffff::1', port=1234), - ]))), - ('username', - dict(url='foo://u@host:1234/bar', aliases=None, - expect=dict(transport='foo', - virtual_host='bar', - hosts=[ - dict(host='host', port=1234, username='u'), - ]))), - ('password', - dict(url='foo://u:p@host:1234/bar', aliases=None, - expect=dict(transport='foo', - virtual_host='bar', - hosts=[ - dict(host='host', port=1234, - username='u', password='p'), - ]))), - ('creds_no_host', - dict(url='foo://u:p@/bar', aliases=None, - expect=dict(transport='foo', - virtual_host='bar', - hosts=[ - dict(username='u', password='p'), - ]))), - ('multi_host', - dict(url='foo://u:p@host1:1234,host2:4321/bar', aliases=None, - expect=dict(transport='foo', - virtual_host='bar', - hosts=[ - dict(host='host1', port=1234, - username='u', password='p'), - dict(host='host2', port=4321), - ]))), - ('multi_creds', - dict(url='foo://u1:p1@host1:1234,u2:p2@host2:4321/bar', aliases=None, - expect=dict(transport='foo', - virtual_host='bar', - hosts=[ - dict(host='host1', port=1234, - username='u1', password='p1'), - dict(host='host2', port=4321, - username='u2', password='p2'), - ]))), - ('multi_creds_ipv6', - dict(url='foo://u1:p1@[ffff::1]:1234,u2:p2@[ffff::2]:4321/bar', - aliases=None, - expect=dict(transport='foo', - virtual_host='bar', - hosts=[ - dict(host='ffff::1', port=1234, - username='u1', password='p1'), - dict(host='ffff::2', port=4321, - username='u2', password='p2'), - ]))), - ] - - def test_parse_url(self): - self.config(rpc_backend=None) - - url = messaging.TransportURL.parse(self.conf, self.url, self.aliases) - - hosts = [] - for host in self.expect.get('hosts', []): - hosts.append(messaging.TransportHost(host.get('host'), - host.get('port'), - host.get('username'), - host.get('password'))) - expected = messaging.TransportURL(self.conf, - self.expect.get('transport'), - self.expect.get('virtual_host'), - hosts) - - self.assertEqual(expected, url) - - -class TestFormatURL(test_utils.BaseTestCase): - - scenarios = [ - ('rpc_backend', - dict(rpc_backend='testbackend', - transport=None, - virtual_host=None, - hosts=[], - aliases=None, - expected='testbackend:///')), - ('rpc_backend_aliased', - dict(rpc_backend='testfoo', - transport=None, - virtual_host=None, - hosts=[], - aliases=dict(testfoo='testbackend'), - expected='testbackend:///')), - ('transport', - dict(rpc_backend=None, - transport='testtransport', - virtual_host=None, - hosts=[], - aliases=None, - expected='testtransport:///')), - ('transport_aliased', - dict(rpc_backend=None, - transport='testfoo', - virtual_host=None, - hosts=[], - aliases=dict(testfoo='testtransport'), - expected='testtransport:///')), - ('virtual_host', - dict(rpc_backend=None, - transport='testtransport', - virtual_host='/vhost', - hosts=[], - aliases=None, - expected='testtransport:////vhost')), - ('host', - dict(rpc_backend=None, - transport='testtransport', - virtual_host='/', - hosts=[ - dict(hostname='host', - port=10, - username='bob', - password='secret'), - ], - aliases=None, - expected='testtransport://bob:secret@host:10//')), - ('multi_host', - dict(rpc_backend=None, - transport='testtransport', - virtual_host='', - hosts=[ - dict(hostname='h1', - port=1000, - username='b1', - password='s1'), - dict(hostname='h2', - port=2000, - username='b2', - password='s2'), - ], - aliases=None, - expected='testtransport://b1:s1@h1:1000,b2:s2@h2:2000/')), - ('quoting', - dict(rpc_backend=None, - transport='testtransport', - virtual_host='/$', - hosts=[ - dict(hostname='host', - port=10, - username='b$', - password='s&'), - ], - aliases=None, - expected='testtransport://b%24:s%26@host:10//%24')), - ] - - def test_parse_url(self): - self.config(rpc_backend=self.rpc_backend) - - hosts = [] - for host in self.hosts: - hosts.append(messaging.TransportHost(host.get('hostname'), - host.get('port'), - host.get('username'), - host.get('password'))) - - url = messaging.TransportURL(self.conf, - self.transport, - self.virtual_host, - hosts, - self.aliases) - - self.assertEqual(self.expected, str(url)) diff --git a/tests/test_warning.py b/tests/test_warning.py deleted file mode 100644 index 589ed88d1..000000000 --- a/tests/test_warning.py +++ /dev/null @@ -1,61 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import imp -import os -import warnings - -from oslotest import base as test_base -import six -from six.moves import mock - - -class DeprecationWarningTest(test_base.BaseTestCase): - - @mock.patch('warnings.warn') - def test_warning(self, mock_warn): - import oslo.messaging - imp.reload(oslo.messaging) - self.assertTrue(mock_warn.called) - args = mock_warn.call_args - self.assertIn('oslo_messaging', args[0][0]) - self.assertIn('deprecated', args[0][0]) - self.assertTrue(issubclass(args[0][1], DeprecationWarning)) - - def test_real_warning(self): - with warnings.catch_warnings(record=True) as warning_msgs: - warnings.resetwarnings() - warnings.simplefilter('always', DeprecationWarning) - import oslo.messaging - - # Use a separate function to get the stack level correct - # so we know the message points back to this file. This - # corresponds to an import or reload, which isn't working - # inside the test under Python 3.3. That may be due to a - # difference in the import implementation not triggering - # warnings properly when the module is reloaded, or - # because the warnings module is mostly implemented in C - # and something isn't cleanly resetting the global state - # used to track whether a warning needs to be - # emitted. Whatever the cause, we definitely see the - # warnings.warn() being invoked on a reload (see the test - # above) and warnings are reported on the console when we - # run the tests. A simpler test script run outside of - # testr does correctly report the warnings. - def foo(): - oslo.messaging.deprecated() - - foo() - self.assertEqual(1, len(warning_msgs)) - msg = warning_msgs[0] - self.assertIn('oslo_messaging', six.text_type(msg.message)) - self.assertEqual('test_warning.py', os.path.basename(msg.filename)) From ed2d60ff011560078b3bfa6a5097f6e72af6b5dc Mon Sep 17 00:00:00 2001 From: Victor Sergeyev Date: Thu, 30 Jul 2015 17:08:37 +0300 Subject: [PATCH 20/28] ZMQ: `Lazify` driver code Some OpenStack services (e.g. Glance) makes a forks, so there is a sense to initialize socket and thread related stuff `on demand`, not in __init__(). Change-Id: Ie2012b31df86049cc841a0aaed16e6b879e0bcec --- .../_drivers/zmq_driver/poller/green_poller.py | 12 ++++++------ .../zmq_driver/rpc/client/zmq_cast_dealer.py | 2 +- .../zmq_driver/rpc/client/zmq_cast_publisher.py | 9 --------- .../_drivers/zmq_driver/rpc/server/zmq_server.py | 2 +- 4 files changed, 8 insertions(+), 17 deletions(-) diff --git a/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py b/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py index 72429f1f1..dcf9da588 100644 --- a/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py +++ b/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py @@ -29,12 +29,12 @@ class GreenPoller(zmq_poller.ZmqPoller): def __init__(self): self.incoming_queue = six.moves.queue.Queue() self.green_pool = eventlet.GreenPool() - self.threads = [] + self.thread_by_socket = {} def register(self, socket, recv_method=None): - self.threads.append( - self.green_pool.spawn(self._socket_receive, socket, - recv_method)) + if socket not in self.thread_by_socket: + self.thread_by_socket[socket] = self.green_pool.spawn( + self._socket_receive, socket, recv_method) def _socket_receive(self, socket, recv_method=None): while True: @@ -59,10 +59,10 @@ class GreenPoller(zmq_poller.ZmqPoller): return incoming[0], incoming[1] def close(self): - for thread in self.threads: + for thread in self.thread_by_socket.values(): thread.kill() - self.threads = [] + self.thread_by_socket = {} class HoldReplyPoller(GreenPoller): diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py index 379d8ef3a..f1257badb 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py @@ -67,7 +67,7 @@ class DealerCastPublisher(zmq_cast_publisher.CastPublisherBase): if str(target) in self.outbound_sockets: dealer_socket, hosts = self.outbound_sockets[str(target)] else: - dealer_socket = self.zmq_context.socket(zmq.DEALER) + dealer_socket = zmq.Context().socket(zmq.DEALER) hosts = self.matchmaker.get_hosts(target) for host in hosts: self._connect_to_host(dealer_socket, host) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_publisher.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_publisher.py index 098454524..38a470ba8 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_publisher.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_publisher.py @@ -13,24 +13,15 @@ # under the License. import abc -import logging import six -from oslo_messaging._drivers.zmq_driver import zmq_async - - -LOG = logging.getLogger(__name__) - -zmq = zmq_async.import_zmq() - @six.add_metaclass(abc.ABCMeta) class CastPublisherBase(object): def __init__(self, conf): self.conf = conf - self.zmq_context = zmq.Context() self.outbound_sockets = {} super(CastPublisherBase, self).__init__() diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py index 17b04e86c..981966ddc 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py +++ b/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py @@ -44,10 +44,10 @@ class ZmqServer(base.Listener): raise rpc_common.RPCException(errmsg) self.poller = zmq_async.get_poller() - self.poller.register(self.socket, self._receive_message) self.matchmaker = matchmaker def poll(self, timeout=None): + self.poller.register(self.socket, self._receive_message) incoming = self.poller.poll(timeout or self.conf.rpc_poll_timeout) return incoming[0] From dec09ae5ffbe2de415fede0947ad1cb887574e2e Mon Sep 17 00:00:00 2001 From: Doug Royal Date: Wed, 22 Jul 2015 14:29:37 -0500 Subject: [PATCH 21/28] Add unit tests for zmq_async Change option from boolean zmq_native to string zmq_concurrency. This eliminates ambiguity by requiring the user to explicitly name the mechanism they want to use for concurrency. Change-Id: I341a3eee73a0449716d3ee0df690bbe6af39bdf0 --- oslo_messaging/_drivers/impl_zmq.py | 6 +- .../_drivers/zmq_driver/zmq_async.py | 64 ++++--- .../tests/drivers/zmq/test_zmq_async.py | 170 ++++++++++++++++++ 3 files changed, 210 insertions(+), 30 deletions(-) create mode 100644 oslo_messaging/tests/drivers/zmq/test_zmq_async.py diff --git a/oslo_messaging/_drivers/impl_zmq.py b/oslo_messaging/_drivers/impl_zmq.py index 064fe7c60..18086eb43 100644 --- a/oslo_messaging/_drivers/impl_zmq.py +++ b/oslo_messaging/_drivers/impl_zmq.py @@ -48,10 +48,8 @@ zmq_opts = [ default=True, help='Use REQ/REP pattern for all methods CALL/CAST/FANOUT.'), - cfg.BoolOpt('rpc_zmq_native', - default=False, - help='Switches ZeroMQ eventlet/threading way of usage.' - 'Affects pollers, executors etc.'), + cfg.StrOpt('rpc_zmq_concurrency', default='eventlet', + help='Type of concurrency used. Either "native" or "eventlet"'), # The following port is unassigned by IANA as of 2012-05-21 cfg.IntOpt('rpc_zmq_port', default=9501, diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_async.py b/oslo_messaging/_drivers/zmq_driver/zmq_async.py index 261746392..7f437fd82 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_async.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_async.py @@ -14,20 +14,25 @@ import logging +from oslo_messaging._drivers.zmq_driver.poller import green_poller +from oslo_messaging._drivers.zmq_driver.poller import threading_poller +from oslo_messaging._i18n import _, _LE from oslo_utils import importutils -from oslo_messaging._i18n import _LE - LOG = logging.getLogger(__name__) -green_zmq = importutils.try_import('eventlet.green.zmq') +# Map zmq_concurrency config option names to the actual module name. +ZMQ_MODULES = { + 'native': 'zmq', + 'eventlet': 'eventlet.green.zmq', +} -def import_zmq(native_zmq=False): - if native_zmq: - imported_zmq = importutils.try_import('zmq') - else: - imported_zmq = green_zmq or importutils.try_import('zmq') +def import_zmq(zmq_concurrency='eventlet'): + _raise_error_if_invalid_config_value(zmq_concurrency) + + imported_zmq = importutils.try_import(ZMQ_MODULES[zmq_concurrency], + default='zmq') if imported_zmq is None: errmsg = _LE("ZeroMQ not found!") @@ -36,28 +41,35 @@ def import_zmq(native_zmq=False): return imported_zmq -def get_poller(native_zmq=False): - if native_zmq or green_zmq is None: - from oslo_messaging._drivers.zmq_driver.poller import threading_poller - return threading_poller.ThreadingPoller() - else: - from oslo_messaging._drivers.zmq_driver.poller import green_poller +def get_poller(zmq_concurrency='eventlet'): + _raise_error_if_invalid_config_value(zmq_concurrency) + + if zmq_concurrency == 'eventlet' and _is_eventlet_zmq_available(): return green_poller.GreenPoller() + return threading_poller.ThreadingPoller() -def get_reply_poller(native_zmq=False): - if native_zmq or green_zmq is None: - from oslo_messaging._drivers.zmq_driver.poller import threading_poller - return threading_poller.ThreadingPoller() - else: - from oslo_messaging._drivers.zmq_driver.poller import green_poller +def get_reply_poller(zmq_concurrency='eventlet'): + _raise_error_if_invalid_config_value(zmq_concurrency) + + if zmq_concurrency == 'eventlet' and _is_eventlet_zmq_available(): return green_poller.HoldReplyPoller() + return threading_poller.ThreadingPoller() -def get_executor(method, native_zmq=False): - if native_zmq or green_zmq is None: - from oslo_messaging._drivers.zmq_driver.poller import threading_poller - return threading_poller.ThreadingExecutor(method) - else: - from oslo_messaging._drivers.zmq_driver.poller import green_poller +def get_executor(method, zmq_concurrency='eventlet'): + _raise_error_if_invalid_config_value(zmq_concurrency) + + if zmq_concurrency == 'eventlet' and _is_eventlet_zmq_available(): return green_poller.GreenExecutor(method) + return threading_poller.ThreadingExecutor(method) + + +def _is_eventlet_zmq_available(): + return importutils.try_import('eventlet.green.zmq') + + +def _raise_error_if_invalid_config_value(zmq_concurrency): + if zmq_concurrency not in ZMQ_MODULES: + errmsg = _('Invalid zmq_concurrency value: %s') + raise ValueError(errmsg % zmq_concurrency) diff --git a/oslo_messaging/tests/drivers/zmq/test_zmq_async.py b/oslo_messaging/tests/drivers/zmq/test_zmq_async.py new file mode 100644 index 000000000..28e091a0e --- /dev/null +++ b/oslo_messaging/tests/drivers/zmq/test_zmq_async.py @@ -0,0 +1,170 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import mock + +from oslo_messaging._drivers.zmq_driver.poller import green_poller +from oslo_messaging._drivers.zmq_driver.poller import threading_poller +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging.tests import utils as test_utils + + +class TestImportZmq(test_utils.BaseTestCase): + + def setUp(self): + super(TestImportZmq, self).setUp() + + def test_config_short_names_are_converted_to_correct_module_names(self): + mock_try_import = mock.Mock() + zmq_async.importutils.try_import = mock_try_import + + zmq_async.importutils.try_import.return_value = 'mock zmq module' + self.assertEqual('mock zmq module', zmq_async.import_zmq('native')) + mock_try_import.assert_called_with('zmq', default='zmq') + + zmq_async.importutils.try_import.return_value = 'mock eventlet module' + self.assertEqual('mock eventlet module', + zmq_async.import_zmq('eventlet')) + mock_try_import.assert_called_with('eventlet.green.zmq', default='zmq') + + def test_when_no_args_then_default_zmq_module_is_loaded(self): + mock_try_import = mock.Mock() + zmq_async.importutils.try_import = mock_try_import + + zmq_async.import_zmq() + + mock_try_import.assert_called_with('eventlet.green.zmq', default='zmq') + + def test_when_import_fails_then_raise_ImportError(self): + zmq_async.importutils.try_import = mock.Mock() + zmq_async.importutils.try_import.return_value = None + + with self.assertRaisesRegexp(ImportError, "ZeroMQ not found!"): + zmq_async.import_zmq('native') + + def test_invalid_config_value_raise_ValueError(self): + invalid_opt = 'x' + + errmsg = 'Invalid zmq_concurrency value: x' + with self.assertRaisesRegexp(ValueError, errmsg): + zmq_async.import_zmq(invalid_opt) + + +class TestGetPoller(test_utils.BaseTestCase): + + def setUp(self): + super(TestGetPoller, self).setUp() + + def test_when_no_arg_to_get_poller_then_return_default_poller(self): + zmq_async._is_eventlet_zmq_available = lambda: True + + actual = zmq_async.get_poller() + + self.assertTrue(isinstance(actual, green_poller.GreenPoller)) + + def test_when_native_poller_requested_then_return_ThreadingPoller(self): + actual = zmq_async.get_poller('native') + + self.assertTrue(isinstance(actual, threading_poller.ThreadingPoller)) + + def test_when_eventlet_is_unavailable_then_return_ThreadingPoller(self): + zmq_async._is_eventlet_zmq_available = lambda: False + + actual = zmq_async.get_poller('eventlet') + + self.assertTrue(isinstance(actual, threading_poller.ThreadingPoller)) + + def test_when_eventlet_is_available_then_return_GreenPoller(self): + zmq_async._is_eventlet_zmq_available = lambda: True + + actual = zmq_async.get_poller('eventlet') + + self.assertTrue(isinstance(actual, green_poller.GreenPoller)) + + def test_invalid_config_value_raise_ValueError(self): + invalid_opt = 'x' + + errmsg = 'Invalid zmq_concurrency value: x' + with self.assertRaisesRegexp(ValueError, errmsg): + zmq_async.get_poller(invalid_opt) + + +class TestGetReplyPoller(test_utils.BaseTestCase): + + def setUp(self): + super(TestGetReplyPoller, self).setUp() + + def test_default_reply_poller_is_HoldReplyPoller(self): + zmq_async._is_eventlet_zmq_available = lambda: True + + actual = zmq_async.get_reply_poller() + + self.assertTrue(isinstance(actual, green_poller.HoldReplyPoller)) + + def test_when_eventlet_is_available_then_return_HoldReplyPoller(self): + zmq_async._is_eventlet_zmq_available = lambda: True + + actual = zmq_async.get_reply_poller('eventlet') + + self.assertTrue(isinstance(actual, green_poller.HoldReplyPoller)) + + def test_when_eventlet_is_unavailable_then_return_ThreadingPoller(self): + zmq_async._is_eventlet_zmq_available = lambda: False + + actual = zmq_async.get_reply_poller('eventlet') + + self.assertTrue(isinstance(actual, threading_poller.ThreadingPoller)) + + def test_invalid_config_value_raise_ValueError(self): + invalid_opt = 'x' + + errmsg = 'Invalid zmq_concurrency value: x' + with self.assertRaisesRegexp(ValueError, errmsg): + zmq_async.get_reply_poller(invalid_opt) + + +class TestGetExecutor(test_utils.BaseTestCase): + + def setUp(self): + super(TestGetExecutor, self).setUp() + + def test_default_executor_is_GreenExecutor(self): + zmq_async._is_eventlet_zmq_available = lambda: True + + executor = zmq_async.get_executor('any method') + + self.assertTrue(isinstance(executor, green_poller.GreenExecutor)) + self.assertEqual('any method', executor._method) + + def test_when_eventlet_module_is_available_then_return_GreenExecutor(self): + zmq_async._is_eventlet_zmq_available = lambda: True + + executor = zmq_async.get_executor('any method', 'eventlet') + + self.assertTrue(isinstance(executor, green_poller.GreenExecutor)) + self.assertEqual('any method', executor._method) + + def test_when_eventlet_is_unavailable_then_return_ThreadingExecutor(self): + zmq_async._is_eventlet_zmq_available = lambda: False + + executor = zmq_async.get_executor('any method', 'eventlet') + + self.assertTrue(isinstance(executor, + threading_poller.ThreadingExecutor)) + self.assertEqual('any method', executor._method) + + def test_invalid_config_value_raise_ValueError(self): + invalid_opt = 'x' + + errmsg = 'Invalid zmq_concurrency value: x' + with self.assertRaisesRegexp(ValueError, errmsg): + zmq_async.get_executor('any method', invalid_opt) From 64831f29ee96ea5622e67fa51b1e26c09247fd18 Mon Sep 17 00:00:00 2001 From: Victor Sergeyev Date: Tue, 4 Aug 2015 15:13:14 +0300 Subject: [PATCH 22/28] ZMQ: Minor matchmaker improvement Added more information to logs and prefix to key in matchmaker_redis Change-Id: I5d718c4a84dedc6654e13f9ca740a2deba5a7e43 --- .../_drivers/zmq_driver/matchmaker/base.py | 13 ++++++++----- .../zmq_driver/matchmaker/matchmaker_redis.py | 4 +++- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py b/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py index b42221255..f4480f0be 100644 --- a/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py +++ b/oslo_messaging/_drivers/zmq_driver/matchmaker/base.py @@ -67,12 +67,15 @@ class MatchMakerBase(object): raise oslo_messaging.InvalidTarget(err_msg, target) if len(hosts) == 1: - LOG.info(_LI("A single host found for target %s.") % target) - return hosts[0] + host = hosts[0] + LOG.info(_LI("A single host %(host)s found for target %(target)s.") + % {"host": host, "target": target}) else: - LOG.warning(_LW("Multiple hosts were found for target %s. Using " - "the random one.") % target) - return random.choice(hosts) + host = random.choice(hosts) + LOG.warning(_LW("Multiple hosts %(hosts)s were found for target " + " %(target)s. Using the random one - %(host)s.") + % {"hosts": hosts, "target": target, "host": host}) + return host class DummyMatchMaker(MatchMakerBase): diff --git a/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py b/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py index a55e5076f..a2ee9bccf 100644 --- a/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py +++ b/oslo_messaging/_drivers/zmq_driver/matchmaker/matchmaker_redis.py @@ -50,7 +50,9 @@ class RedisMatchMaker(base.MatchMakerBase): def _target_to_key(self, target): attributes = ['topic', 'exchange', 'server'] - return ':'.join((getattr(target, attr) or "*") for attr in attributes) + prefix = "ZMQ-target" + key = ":".join((getattr(target, attr) or "*") for attr in attributes) + return "%s-%s" % (prefix, key) def _get_keys_by_pattern(self, pattern): return self._redis.keys(pattern) From 141f59bd9b9d69a38abc533e4b10e2bf500d0f86 Mon Sep 17 00:00:00 2001 From: Oleksii Zamiatin Date: Wed, 29 Jul 2015 14:55:43 +0300 Subject: [PATCH 23/28] Notifier implementation Notifier implementation for zmq driver (ROUTER/DEALER variant). Publishers/consumers refactoring in order to make them pluggable. Change-Id: I2dd42cc805aa72b929a4dfa17498cd8b9c0ed7af --- oslo_messaging/_drivers/base.py | 2 +- oslo_messaging/_drivers/impl_zmq.py | 21 ++-- .../zmq_driver/{rpc => client}/__init__.py | 0 .../client => client/publishers}/__init__.py | 0 .../client/publishers/zmq_dealer_publisher.py | 77 ++++++++++++++ .../client/publishers/zmq_publisher_base.py | 56 ++++++++++ .../client/publishers/zmq_req_publisher.py | 85 +++++++++++++++ .../_drivers/zmq_driver/client/zmq_client.py | 73 +++++++++++++ .../_drivers/zmq_driver/client/zmq_request.py | 95 +++++++++++++++++ .../_drivers/zmq_driver/notifier/__init__.py | 1 - .../zmq_driver/rpc/client/zmq_call_request.py | 76 ------------- .../zmq_driver/rpc/client/zmq_cast_dealer.py | 100 ------------------ .../rpc/client/zmq_cast_publisher.py | 31 ------ .../zmq_driver/rpc/client/zmq_client.py | 41 ------- .../zmq_driver/rpc/client/zmq_request.py | 66 ------------ .../zmq_driver/{rpc => }/server/__init__.py | 0 .../zmq_driver/server/consumers/__init__.py | 0 .../consumers/zmq_router_consumer.py} | 63 +++++------ .../{rpc => }/server/zmq_incoming_message.py | 29 +++-- .../_drivers/zmq_driver/server/zmq_server.py | 80 ++++++++++++++ .../{zmq_target.py => zmq_address.py} | 0 .../{zmq_serializer.py => zmq_names.py} | 17 ++- .../tests/drivers/zmq/test_impl_zmq.py | 27 ++++- .../tests/functional/test_functional.py | 5 - tools/simulator.py | 8 +- tox.ini | 2 +- 26 files changed, 572 insertions(+), 383 deletions(-) rename oslo_messaging/_drivers/zmq_driver/{rpc => client}/__init__.py (100%) rename oslo_messaging/_drivers/zmq_driver/{rpc/client => client/publishers}/__init__.py (100%) create mode 100644 oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py create mode 100644 oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py create mode 100644 oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_req_publisher.py create mode 100644 oslo_messaging/_drivers/zmq_driver/client/zmq_client.py create mode 100644 oslo_messaging/_drivers/zmq_driver/client/zmq_request.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/notifier/__init__.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_publisher.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py delete mode 100644 oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py rename oslo_messaging/_drivers/zmq_driver/{rpc => }/server/__init__.py (100%) create mode 100644 oslo_messaging/_drivers/zmq_driver/server/consumers/__init__.py rename oslo_messaging/_drivers/zmq_driver/{rpc/server/zmq_server.py => server/consumers/zmq_router_consumer.py} (55%) rename oslo_messaging/_drivers/zmq_driver/{rpc => }/server/zmq_incoming_message.py (71%) create mode 100644 oslo_messaging/_drivers/zmq_driver/server/zmq_server.py rename oslo_messaging/_drivers/zmq_driver/{zmq_target.py => zmq_address.py} (100%) rename oslo_messaging/_drivers/zmq_driver/{zmq_serializer.py => zmq_names.py} (64%) diff --git a/oslo_messaging/_drivers/base.py b/oslo_messaging/_drivers/base.py index 1d2620825..607821faa 100644 --- a/oslo_messaging/_drivers/base.py +++ b/oslo_messaging/_drivers/base.py @@ -111,7 +111,7 @@ class BaseDriver(object): """Construct a Listener for the given target.""" @abc.abstractmethod - def listen_for_notifications(self, targets_and_priorities): + def listen_for_notifications(self, targets_and_priorities, pool): """Construct a notification Listener for the given list of tuple of (target, priority). """ diff --git a/oslo_messaging/_drivers/impl_zmq.py b/oslo_messaging/_drivers/impl_zmq.py index 18086eb43..7a4086009 100644 --- a/oslo_messaging/_drivers/impl_zmq.py +++ b/oslo_messaging/_drivers/impl_zmq.py @@ -21,8 +21,8 @@ from stevedore import driver from oslo_messaging._drivers import base from oslo_messaging._drivers import common as rpc_common -from oslo_messaging._drivers.zmq_driver.rpc.client import zmq_client -from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_server +from oslo_messaging._drivers.zmq_driver.client import zmq_client +from oslo_messaging._drivers.zmq_driver.server import zmq_server from oslo_messaging._executors import base as executor_base @@ -108,21 +108,28 @@ class ZmqDriver(base.BaseDriver): def send(self, target, ctxt, message, wait_for_reply=None, timeout=None, retry=None): + timeout = timeout or self.conf.rpc_response_timeout if wait_for_reply: - return self.client.call(target, ctxt, message, timeout, retry) + return self.client.send_call(target, ctxt, message, timeout, retry) + elif target.fanout: + self.client.send_fanout(target, ctxt, message, timeout, retry) else: - self.client.cast(target, ctxt, message, timeout, retry) - return None + self.client.send_cast(target, ctxt, message, timeout, retry) def send_notification(self, target, ctxt, message, version, retry=None): - return None + if target.fanout: + self.client.send_notify_fanout(target, ctxt, message, version, + retry) + else: + self.client.send_notify(target, ctxt, message, version, retry) def listen(self, target): self.server.listen(target) return self.server def listen_for_notifications(self, targets_and_priorities, pool): - return None + self.server.listen_notification(targets_and_priorities) + return self.server def cleanup(self): self.client.cleanup() diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/__init__.py b/oslo_messaging/_drivers/zmq_driver/client/__init__.py similarity index 100% rename from oslo_messaging/_drivers/zmq_driver/rpc/__init__.py rename to oslo_messaging/_drivers/zmq_driver/client/__init__.py diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/__init__.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/__init__.py similarity index 100% rename from oslo_messaging/_drivers/zmq_driver/rpc/client/__init__.py rename to oslo_messaging/_drivers/zmq_driver/client/publishers/__init__.py diff --git a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py new file mode 100644 index 000000000..bf6f253f9 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py @@ -0,0 +1,77 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from oslo_messaging._drivers import common as rpc_common +from oslo_messaging._drivers.zmq_driver.client.publishers\ + import zmq_publisher_base +from oslo_messaging._drivers.zmq_driver import zmq_address +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_names +from oslo_messaging._i18n import _LE, _LI + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class DealerPublisher(zmq_publisher_base.PublisherBase): + + def send_request(self, request): + + if request.msg_type == zmq_names.CALL_TYPE: + raise zmq_publisher_base.UnsupportedSendPattern(request.msg_type) + + dealer_socket, hosts = self._check_hosts_connections(request.target) + + if request.msg_type in zmq_names.MULTISEND_TYPES: + for _ in range(len(hosts)): + self._send_request(dealer_socket, request) + else: + self._send_request(dealer_socket, request) + + def _send_request(self, socket, request): + + socket.send(b'', zmq.SNDMORE) + super(DealerPublisher, self)._send_request(socket, request) + + LOG.info(_LI("Sending message %(message)s to a target %(target)s") + % {"message": request.message, + "target": request.target}) + + def _check_hosts_connections(self, target): + if str(target) in self.outbound_sockets: + dealer_socket, hosts = self.outbound_sockets[str(target)] + else: + dealer_socket = zmq.Context().socket(zmq.DEALER) + hosts = self.matchmaker.get_hosts(target) + for host in hosts: + self._connect_to_host(dealer_socket, host, target) + self.outbound_sockets[str(target)] = (dealer_socket, hosts) + return dealer_socket, hosts + + @staticmethod + def _connect_to_host(socket, host, target): + address = zmq_address.get_tcp_direct_address(host) + try: + LOG.info(_LI("Connecting DEALER to %(address)s for %(target)s") + % {"address": address, + "target": target}) + socket.connect(address) + except zmq.ZMQError as e: + errmsg = _LE("Failed connecting DEALER to %(address)s: %(e)s")\ + % (address, e) + LOG.error(errmsg) + raise rpc_common.RPCException(errmsg) diff --git a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py new file mode 100644 index 000000000..0f32f5884 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py @@ -0,0 +1,56 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import abc + +import six + +from oslo_messaging._drivers import common as rpc_common +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._i18n import _LE + + +zmq = zmq_async.import_zmq() + + +class UnsupportedSendPattern(rpc_common.RPCException): + + def __init__(self, pattern_name): + errmsg = _LE("Sending pattern %s is unsupported.") % pattern_name + super(UnsupportedSendPattern, self).__init__(errmsg) + + +@six.add_metaclass(abc.ABCMeta) +class PublisherBase(object): + + def __init__(self, conf, matchmaker): + self.conf = conf + self.zmq_context = zmq.Context() + self.matchmaker = matchmaker + self.outbound_sockets = {} + super(PublisherBase, self).__init__() + + @abc.abstractmethod + def send_request(self, request): + """Send request to consumer""" + + def _send_request(self, socket, request): + socket.send_string(request.msg_type, zmq.SNDMORE) + socket.send_json(request.context, zmq.SNDMORE) + socket.send_json(request.message) + + def cleanup(self): + for socket, hosts in self.outbound_sockets.values(): + socket.setsockopt(zmq.LINGER, 0) + socket.close() diff --git a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_req_publisher.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_req_publisher.py new file mode 100644 index 000000000..68beab903 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_req_publisher.py @@ -0,0 +1,85 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import contextlib +import logging + +import oslo_messaging +from oslo_messaging._drivers import common as rpc_common +from oslo_messaging._drivers.zmq_driver.client.publishers\ + import zmq_publisher_base +from oslo_messaging._drivers.zmq_driver import zmq_address +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_names +from oslo_messaging._i18n import _LE, _LI + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class ReqPublisher(zmq_publisher_base.PublisherBase): + + def send_request(self, request): + + if request.msg_type != zmq_names.CALL_TYPE: + raise zmq_publisher_base.UnsupportedSendPattern(request.msg_type) + + socket = self._connect_to_host(request.target) + self._send_request(socket, request) + return self._receive_reply(socket, request) + + def _connect_to_host(self, target): + + try: + self.zmq_context = zmq.Context() + socket = self.zmq_context.socket(zmq.REQ) + + host = self.matchmaker.get_single_host(target) + connect_address = zmq_address.get_tcp_direct_address(host) + + LOG.info(_LI("Connecting REQ to %s") % connect_address) + + socket.connect(connect_address) + self.outbound_sockets[str(target)] = (socket, [host]) + return socket + + except zmq.ZMQError as e: + errmsg = _LE("Error connecting to socket: %s") % str(e) + LOG.error(errmsg) + raise rpc_common.RPCException(errmsg) + + @staticmethod + def _receive_reply(socket, request): + + def _receive_method(socket): + return socket.recv_json() + + # NOTE(ozamiatin): Check for retry here (no retries now) + with contextlib.closing(zmq_async.get_reply_poller()) as poller: + poller.register(socket, recv_method=_receive_method) + reply, socket = poller.poll(timeout=request.timeout) + if reply is None: + raise oslo_messaging.MessagingTimeout( + "Timeout %s seconds was reached" % request.timeout) + if reply[zmq_names.FIELD_FAILURE]: + raise rpc_common.deserialize_remote_exception( + reply[zmq_names.FIELD_FAILURE], + request.allowed_remote_exmods) + else: + return reply[zmq_names.FIELD_REPLY] + + def close(self): + # For contextlib compatibility + self.cleanup() diff --git a/oslo_messaging/_drivers/zmq_driver/client/zmq_client.py b/oslo_messaging/_drivers/zmq_driver/client/zmq_client.py new file mode 100644 index 000000000..23dfd09eb --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/client/zmq_client.py @@ -0,0 +1,73 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import contextlib + + +from oslo_messaging._drivers.zmq_driver.client.publishers\ + import zmq_dealer_publisher +from oslo_messaging._drivers.zmq_driver.client.publishers\ + import zmq_req_publisher +from oslo_messaging._drivers.zmq_driver.client import zmq_request +from oslo_messaging._drivers.zmq_driver import zmq_async + +zmq = zmq_async.import_zmq() + + +class ZmqClient(object): + + def __init__(self, conf, matchmaker=None, allowed_remote_exmods=None): + self.conf = conf + self.context = zmq.Context() + self.matchmaker = matchmaker + self.allowed_remote_exmods = allowed_remote_exmods or [] + self.dealer_publisher = zmq_dealer_publisher.DealerPublisher( + conf, matchmaker) + + def send_call(self, target, context, message, timeout=None, retry=None): + with contextlib.closing(zmq_request.CallRequest( + target, context=context, message=message, + timeout=timeout, retry=retry, + allowed_remote_exmods=self.allowed_remote_exmods)) as request: + with contextlib.closing(zmq_req_publisher.ReqPublisher( + self.conf, self.matchmaker)) as req_publisher: + return req_publisher.send_request(request) + + def send_cast(self, target, context, message, timeout=None, retry=None): + with contextlib.closing(zmq_request.CastRequest( + target, context=context, message=message, + timeout=timeout, retry=retry)) as request: + self.dealer_publisher.send_request(request) + + def send_fanout(self, target, context, message, timeout=None, retry=None): + with contextlib.closing(zmq_request.FanoutRequest( + target, context=context, message=message, + timeout=timeout, retry=retry)) as request: + self.dealer_publisher.send_request(request) + + def send_notify(self, target, context, message, version, retry=None): + with contextlib.closing(zmq_request.NotificationRequest( + target, context, message, version=version, + retry=retry)) as request: + self.dealer_publisher.send_request(request) + + def send_notify_fanout(self, target, context, message, version, + retry=None): + with contextlib.closing(zmq_request.NotificationFanoutRequest( + target, context, message, version=version, + retry=retry)) as request: + self.dealer_publisher.send_request(request) + + def cleanup(self): + self.dealer_publisher.cleanup() diff --git a/oslo_messaging/_drivers/zmq_driver/client/zmq_request.py b/oslo_messaging/_drivers/zmq_driver/client/zmq_request.py new file mode 100644 index 000000000..1caedff3e --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/client/zmq_request.py @@ -0,0 +1,95 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import abc +import logging + +import six + +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_names +from oslo_messaging._i18n import _LE + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +@six.add_metaclass(abc.ABCMeta) +class Request(object): + + def __init__(self, target, context=None, message=None, retry=None): + + if self.msg_type not in zmq_names.MESSAGE_TYPES: + raise RuntimeError("Unknown message type!") + + self.target = target + self.context = context + self.message = message + self.retry = retry + + @abc.abstractproperty + def msg_type(self): + """ZMQ message type""" + + def close(self): + """Nothing to close in base request""" + + +class RpcRequest(Request): + + def __init__(self, *args, **kwargs): + message = kwargs.get("message") + if message['method'] is None: + errmsg = _LE("No method specified for RPC call") + LOG.error(errmsg) + raise KeyError(errmsg) + + self.timeout = kwargs.pop("timeout") + assert self.timeout is not None, "Timeout should be specified!" + + super(RpcRequest, self).__init__(*args, **kwargs) + + +class CallRequest(RpcRequest): + + msg_type = zmq_names.CALL_TYPE + + def __init__(self, *args, **kwargs): + self.allowed_remote_exmods = kwargs.pop("allowed_remote_exmods") + super(CallRequest, self).__init__(*args, **kwargs) + + +class CastRequest(RpcRequest): + + msg_type = zmq_names.CAST_TYPE + + +class FanoutRequest(RpcRequest): + + msg_type = zmq_names.CAST_FANOUT_TYPE + + +class NotificationRequest(Request): + + msg_type = zmq_names.NOTIFY_TYPE + + def __init__(self, *args, **kwargs): + self.version = kwargs.pop("version") + super(NotificationRequest, self).__init__(*args, **kwargs) + + +class NotificationFanoutRequest(NotificationRequest): + + msg_type = zmq_names.NOTIFY_FANOUT_TYPE diff --git a/oslo_messaging/_drivers/zmq_driver/notifier/__init__.py b/oslo_messaging/_drivers/zmq_driver/notifier/__init__.py deleted file mode 100644 index 8af3e63a7..000000000 --- a/oslo_messaging/_drivers/zmq_driver/notifier/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__author__ = 'ozamiatin' diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py deleted file mode 100644 index 0d35c31a6..000000000 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_call_request.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging - -import oslo_messaging -from oslo_messaging._drivers import common as rpc_common -from oslo_messaging._drivers.zmq_driver.rpc.client.zmq_request import Request -from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._drivers.zmq_driver import zmq_target -from oslo_messaging._i18n import _LE, _LI - -LOG = logging.getLogger(__name__) - -zmq = zmq_async.import_zmq() - - -class CallRequest(Request): - - msg_type = zmq_serializer.CALL_TYPE - - def __init__(self, conf, target, context, message, timeout=None, - retry=None, allowed_remote_exmods=None, matchmaker=None): - self.allowed_remote_exmods = allowed_remote_exmods or [] - self.matchmaker = matchmaker - self.reply_poller = zmq_async.get_reply_poller() - - try: - self.zmq_context = zmq.Context() - socket = self.zmq_context.socket(zmq.REQ) - super(CallRequest, self).__init__(conf, target, context, - message, socket, - timeout, retry) - self.host = self.matchmaker.get_single_host(self.target) - self.connect_address = zmq_target.get_tcp_direct_address( - self.host) - LOG.info(_LI("Connecting REQ to %s") % self.connect_address) - self.socket.connect(self.connect_address) - self.reply_poller.register( - self.socket, recv_method=lambda socket: socket.recv_json()) - - except zmq.ZMQError as e: - errmsg = _LE("Error connecting to socket: %s") % str(e) - LOG.error(errmsg) - raise rpc_common.RPCException(errmsg) - - def close(self): - self.reply_poller.close() - self.socket.setsockopt(zmq.LINGER, 0) - self.socket.close() - - def receive_reply(self): - # NOTE(ozamiatin): Check for retry here (no retries now) - reply, socket = self.reply_poller.poll(timeout=self.timeout) - if reply is None: - raise oslo_messaging.MessagingTimeout( - "Timeout %s seconds was reached" % self.timeout) - - if reply[zmq_serializer.FIELD_FAILURE]: - raise rpc_common.deserialize_remote_exception( - reply[zmq_serializer.FIELD_FAILURE], - self.allowed_remote_exmods) - else: - return reply[zmq_serializer.FIELD_REPLY] diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py deleted file mode 100644 index f1257badb..000000000 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_dealer.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging - -from oslo_messaging._drivers import common as rpc_common -from oslo_messaging._drivers.zmq_driver.rpc.client import zmq_cast_publisher -from oslo_messaging._drivers.zmq_driver.rpc.client.zmq_request import Request -from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._drivers.zmq_driver import zmq_target -from oslo_messaging._i18n import _LE, _LI - -LOG = logging.getLogger(__name__) - -zmq = zmq_async.import_zmq() - - -class CastRequest(Request): - - msg_type = zmq_serializer.CAST_TYPE - - def __call__(self, *args, **kwargs): - self.send_request() - - def send_request(self): - self.socket.send(b'', zmq.SNDMORE) - super(CastRequest, self).send_request() - - def receive_reply(self): - # Ignore reply for CAST - pass - - -class FanoutRequest(CastRequest): - - msg_type = zmq_serializer.FANOUT_TYPE - - def __init__(self, *args, **kwargs): - self.hosts_count = kwargs.pop("hosts_count") - super(FanoutRequest, self).__init__(*args, **kwargs) - - def send_request(self): - for _ in range(self.hosts_count): - super(FanoutRequest, self).send_request() - - -class DealerCastPublisher(zmq_cast_publisher.CastPublisherBase): - - def __init__(self, conf, matchmaker): - super(DealerCastPublisher, self).__init__(conf) - self.matchmaker = matchmaker - - def cast(self, target, context, - message, timeout=None, retry=None): - if str(target) in self.outbound_sockets: - dealer_socket, hosts = self.outbound_sockets[str(target)] - else: - dealer_socket = zmq.Context().socket(zmq.DEALER) - hosts = self.matchmaker.get_hosts(target) - for host in hosts: - self._connect_to_host(dealer_socket, host) - self.outbound_sockets[str(target)] = (dealer_socket, hosts) - - if target.fanout: - request = FanoutRequest(self.conf, target, context, message, - dealer_socket, timeout, retry, - hosts_count=len(hosts)) - else: - request = CastRequest(self.conf, target, context, message, - dealer_socket, timeout, retry) - - request.send_request() - - def _connect_to_host(self, socket, host): - address = zmq_target.get_tcp_direct_address(host) - try: - LOG.info(_LI("Connecting DEALER to %s") % address) - socket.connect(address) - except zmq.ZMQError as e: - errmsg = _LE("Failed connecting DEALER to %(address)s: %(e)s")\ - % (address, e) - LOG.error(errmsg) - raise rpc_common.RPCException(errmsg) - - def cleanup(self): - for socket, hosts in self.outbound_sockets.values(): - socket.setsockopt(zmq.LINGER, 0) - socket.close() diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_publisher.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_publisher.py deleted file mode 100644 index 38a470ba8..000000000 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_cast_publisher.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc - -import six - - -@six.add_metaclass(abc.ABCMeta) -class CastPublisherBase(object): - - def __init__(self, conf): - self.conf = conf - self.outbound_sockets = {} - super(CastPublisherBase, self).__init__() - - @abc.abstractmethod - def cast(self, target, context, - message, timeout=None, retry=None): - "Send CAST to target" diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py deleted file mode 100644 index 2bdbee18b..000000000 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_client.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import contextlib - -from oslo_messaging._drivers.zmq_driver.rpc.client import zmq_call_request -from oslo_messaging._drivers.zmq_driver.rpc.client import zmq_cast_dealer - - -class ZmqClient(object): - - def __init__(self, conf, matchmaker=None, allowed_remote_exmods=None): - self.conf = conf - self.matchmaker = matchmaker - self.allowed_remote_exmods = allowed_remote_exmods or [] - self.cast_publisher = zmq_cast_dealer.DealerCastPublisher(conf, - matchmaker) - - def call(self, target, context, message, timeout=None, retry=None): - with contextlib.closing(zmq_call_request.CallRequest( - self.conf, target, context, message, timeout, retry, - self.allowed_remote_exmods, - self.matchmaker)) as request: - return request() - - def cast(self, target, context, message, timeout=None, retry=None): - self.cast_publisher.cast(target, context, message, timeout, retry) - - def cleanup(self): - self.cast_publisher.cleanup() diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py b/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py deleted file mode 100644 index b06699d93..000000000 --- a/oslo_messaging/_drivers/zmq_driver/rpc/client/zmq_request.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright 2015 Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc -import logging - -import six - -from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._i18n import _LE - -LOG = logging.getLogger(__name__) - -zmq = zmq_async.import_zmq() - - -@six.add_metaclass(abc.ABCMeta) -class Request(object): - - def __init__(self, conf, target, context, message, - socket, timeout=None, retry=None): - - if self.msg_type not in zmq_serializer.MESSAGE_TYPES: - raise RuntimeError("Unknown msg type!") - - if message['method'] is None: - errmsg = _LE("No method specified for RPC call") - LOG.error(errmsg) - raise KeyError(errmsg) - - self.target = target - self.context = context - self.message = message - self.timeout = timeout or conf.rpc_response_timeout - self.retry = retry - self.reply = None - self.socket = socket - - @abc.abstractproperty - def msg_type(self): - """ZMQ message type""" - - def send_request(self): - self.socket.send_string(self.msg_type, zmq.SNDMORE) - self.socket.send_json(self.context, zmq.SNDMORE) - self.socket.send_json(self.message) - - def __call__(self): - self.send_request() - return self.receive_reply() - - @abc.abstractmethod - def receive_reply(self): - "Receive reply from server side" diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/__init__.py b/oslo_messaging/_drivers/zmq_driver/server/__init__.py similarity index 100% rename from oslo_messaging/_drivers/zmq_driver/rpc/server/__init__.py rename to oslo_messaging/_drivers/zmq_driver/server/__init__.py diff --git a/oslo_messaging/_drivers/zmq_driver/server/consumers/__init__.py b/oslo_messaging/_drivers/zmq_driver/server/consumers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py b/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py similarity index 55% rename from oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py rename to oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py index 981966ddc..58680da90 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_server.py +++ b/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py @@ -14,60 +14,50 @@ import logging -from oslo_messaging._drivers import base from oslo_messaging._drivers import common as rpc_common -from oslo_messaging._drivers.zmq_driver.rpc.server import zmq_incoming_message +from oslo_messaging._drivers.zmq_driver.server import zmq_incoming_message +from oslo_messaging._drivers.zmq_driver import zmq_address from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._drivers.zmq_driver import zmq_serializer -from oslo_messaging._drivers.zmq_driver import zmq_target -from oslo_messaging._i18n import _LE +from oslo_messaging._drivers.zmq_driver import zmq_names +from oslo_messaging._i18n import _LE, _LI LOG = logging.getLogger(__name__) zmq = zmq_async.import_zmq() -class ZmqServer(base.Listener): +class RouterConsumer(object): + + def __init__(self, conf, poller, server): + + self.poller = poller + self.server = server - def __init__(self, conf, matchmaker=None): - self.conf = conf try: self.context = zmq.Context() self.socket = self.context.socket(zmq.ROUTER) - self.address = zmq_target.get_tcp_random_address(conf) + self.address = zmq_address.get_tcp_random_address(conf) self.port = self.socket.bind_to_random_port(self.address) - LOG.info("Run server on %s:%d" % (self.address, self.port)) + LOG.info(_LI("Run ROUTER consumer on %(addr)s:%(port)d"), + {"addr": self.address, + "port": self.port}) except zmq.ZMQError as e: errmsg = _LE("Failed binding to port %(port)d: %(e)s")\ % (self.port, e) LOG.error(errmsg) raise rpc_common.RPCException(errmsg) - self.poller = zmq_async.get_poller() - self.matchmaker = matchmaker - - def poll(self, timeout=None): + def listen(self, target): + LOG.info(_LI("Listen to target %s") % str(target)) self.poller.register(self.socket, self._receive_message) - incoming = self.poller.poll(timeout or self.conf.rpc_poll_timeout) - return incoming[0] - - def stop(self): - LOG.info("Stop server tcp://%s:%d" % (self.address, self.port)) def cleanup(self): - self.poller.close() if not self.socket.closed: self.socket.setsockopt(zmq.LINGER, 0) self.socket.close() - def listen(self, target): - LOG.info("Listen to Target %s on tcp://%s:%d" % - (target, self.address, self.port)) - host = zmq_target.combine_address(self.conf.rpc_zmq_host, self.port) - self.matchmaker.register(target=target, - hostname=host) - def _receive_message(self, socket): + try: reply_id = socket.recv() empty = socket.recv() @@ -76,15 +66,20 @@ class ZmqServer(base.Listener): assert msg_type is not None, 'Bad format: msg type expected' context = socket.recv_json() message = socket.recv_json() - LOG.debug("Received CALL message %s" % str(message)) + LOG.debug("Received %s message %s" % (msg_type, str(message))) - direct_type = (zmq_serializer.CALL_TYPE, zmq_serializer.CAST_TYPE) - if msg_type in direct_type: + if msg_type == zmq_names.CALL_TYPE: return zmq_incoming_message.ZmqIncomingRequest( - self, context, message, socket, reply_id, self.poller) - elif msg_type == zmq_serializer.FANOUT_TYPE: - return zmq_incoming_message.ZmqFanoutMessage( - self, context, message, socket, self.poller) + self.server, context, message, socket, reply_id, + self.poller) + elif msg_type in zmq_names.CAST_TYPES: + return zmq_incoming_message.ZmqCastMessage( + self.server, context, message, socket, self.poller) + elif msg_type in zmq_names.NOTIFY_TYPES: + return zmq_incoming_message.ZmqNotificationMessage( + self.server, context, message, socket, self.poller) + else: + LOG.error(_LE("Unknown message type: %s") % msg_type) except zmq.ZMQError as e: LOG.error(_LE("Receiving message failed: %s") % str(e)) diff --git a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_incoming_message.py b/oslo_messaging/_drivers/zmq_driver/server/zmq_incoming_message.py similarity index 71% rename from oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_incoming_message.py rename to oslo_messaging/_drivers/zmq_driver/server/zmq_incoming_message.py index 1373019e1..d953e9334 100644 --- a/oslo_messaging/_drivers/zmq_driver/rpc/server/zmq_incoming_message.py +++ b/oslo_messaging/_drivers/zmq_driver/server/zmq_incoming_message.py @@ -18,7 +18,7 @@ import logging from oslo_messaging._drivers import base from oslo_messaging._drivers import common as rpc_common from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._drivers.zmq_driver import zmq_serializer +from oslo_messaging._drivers.zmq_driver import zmq_names LOG = logging.getLogger(__name__) @@ -39,9 +39,9 @@ class ZmqIncomingRequest(base.IncomingMessage): if failure is not None: failure = rpc_common.serialize_remote_exception(failure, log_failure) - message_reply = {zmq_serializer.FIELD_REPLY: reply, - zmq_serializer.FIELD_FAILURE: failure, - zmq_serializer.FIELD_LOG_FAILURE: log_failure} + message_reply = {zmq_names.FIELD_REPLY: reply, + zmq_names.FIELD_FAILURE: failure, + zmq_names.FIELD_LOG_FAILURE: log_failure} LOG.debug("Replying %s REP", (str(message_reply))) self.received = True self.reply_socket.send(self.reply_id, zmq.SNDMORE) @@ -56,10 +56,10 @@ class ZmqIncomingRequest(base.IncomingMessage): pass -class ZmqFanoutMessage(base.IncomingMessage): +class ZmqCastMessage(base.IncomingMessage): def __init__(self, listener, context, message, socket, poller): - super(ZmqFanoutMessage, self).__init__(listener, context, message) + super(ZmqCastMessage, self).__init__(listener, context, message) poller.resume_polling(socket) def reply(self, reply=None, failure=None, log_failure=True): @@ -70,3 +70,20 @@ class ZmqFanoutMessage(base.IncomingMessage): def requeue(self): pass + + +class ZmqNotificationMessage(base.IncomingMessage): + + def __init__(self, listener, context, message, socket, poller): + super(ZmqNotificationMessage, self).__init__(listener, context, + message) + poller.resume_polling(socket) + + def reply(self, reply=None, failure=None, log_failure=True): + """Reply is not needed for notification messages""" + + def acknowledge(self): + pass + + def requeue(self): + pass diff --git a/oslo_messaging/_drivers/zmq_driver/server/zmq_server.py b/oslo_messaging/_drivers/zmq_driver/server/zmq_server.py new file mode 100644 index 000000000..30cacd409 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/server/zmq_server.py @@ -0,0 +1,80 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import copy +import logging + +from oslo_messaging._drivers import base +from oslo_messaging._drivers.zmq_driver.server.consumers\ + import zmq_router_consumer +from oslo_messaging._drivers.zmq_driver import zmq_address +from oslo_messaging._drivers.zmq_driver import zmq_async + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class ZmqServer(base.Listener): + + def __init__(self, conf, matchmaker=None): + self.conf = conf + self.matchmaker = matchmaker + self.poller = zmq_async.get_poller() + self.rpc_consumer = zmq_router_consumer.RouterConsumer( + conf, self.poller, self) + self.notify_consumer = self.rpc_consumer + self.consumers = [self.rpc_consumer] + + def poll(self, timeout=None): + message, socket = self.poller.poll( + timeout or self.conf.rpc_poll_timeout) + return message + + def stop(self): + consumer = self.rpc_consumer + LOG.info("Stop server %s:%d" % (consumer.address, consumer.port)) + + def cleanup(self): + self.poller.close() + for consumer in self.consumers: + consumer.cleanup() + + def listen(self, target): + + consumer = self.rpc_consumer + consumer.listen(target) + + LOG.info("Listen to target %s on %s:%d" % + (target, consumer.address, consumer.port)) + + host = zmq_address.combine_address(self.conf.rpc_zmq_host, + consumer.port) + self.matchmaker.register(target=target, + hostname=host) + + def listen_notification(self, targets_and_priorities): + + consumer = self.notify_consumer + + LOG.info("Listen for notifications on %s:%d" + % (consumer.address, consumer.port)) + + for target, priority in targets_and_priorities: + host = zmq_address.combine_address(self.conf.rpc_zmq_host, + consumer.port) + t = copy.deepcopy(target) + t.topic = target.topic + '.' + priority + self.matchmaker.register(target=t, hostname=host) + consumer.listen(t) diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_target.py b/oslo_messaging/_drivers/zmq_driver/zmq_address.py similarity index 100% rename from oslo_messaging/_drivers/zmq_driver/zmq_target.py rename to oslo_messaging/_drivers/zmq_driver/zmq_address.py diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py b/oslo_messaging/_drivers/zmq_driver/zmq_names.py similarity index 64% rename from oslo_messaging/_drivers/zmq_driver/zmq_serializer.py rename to oslo_messaging/_drivers/zmq_driver/zmq_names.py index 6026ca655..583600ec4 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_serializer.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_names.py @@ -12,9 +12,6 @@ # License for the specific language governing permissions and limitations # under the License. -MESSAGE_CALL_TYPE_POSITION = 1 -MESSAGE_CALL_TARGET_POSITION = 2 -MESSAGE_CALL_TOPIC_POSITION = 3 FIELD_FAILURE = 'failure' FIELD_REPLY = 'reply' @@ -22,7 +19,17 @@ FIELD_LOG_FAILURE = 'log_failure' CALL_TYPE = 'call' CAST_TYPE = 'cast' -FANOUT_TYPE = 'fanout' +CAST_FANOUT_TYPE = 'cast-f' NOTIFY_TYPE = 'notify' +NOTIFY_FANOUT_TYPE = 'notify-f' -MESSAGE_TYPES = (CALL_TYPE, CAST_TYPE, FANOUT_TYPE, NOTIFY_TYPE) +MESSAGE_TYPES = (CALL_TYPE, + CAST_TYPE, + CAST_FANOUT_TYPE, + NOTIFY_TYPE, + NOTIFY_FANOUT_TYPE) + +MULTISEND_TYPES = (CAST_FANOUT_TYPE, NOTIFY_FANOUT_TYPE) +DIRECT_TYPES = (CALL_TYPE, CAST_TYPE, NOTIFY_TYPE) +CAST_TYPES = (CAST_TYPE, CAST_FANOUT_TYPE) +NOTIFY_TYPES = (NOTIFY_TYPE, NOTIFY_FANOUT_TYPE) diff --git a/oslo_messaging/tests/drivers/zmq/test_impl_zmq.py b/oslo_messaging/tests/drivers/zmq/test_impl_zmq.py index d191ae64c..ca15f61ca 100644 --- a/oslo_messaging/tests/drivers/zmq/test_impl_zmq.py +++ b/oslo_messaging/tests/drivers/zmq/test_impl_zmq.py @@ -29,11 +29,10 @@ LOG = logging.getLogger(__name__) zmq = zmq_async.import_zmq() -class TestRPCServerListener(object): +class TestServerListener(object): def __init__(self, driver): self.driver = driver - self.target = None self.listener = None self.executor = zmq_async.get_executor(self._run) self._stop = threading.Event() @@ -41,8 +40,12 @@ class TestRPCServerListener(object): self.message = None def listen(self, target): - self.target = target - self.listener = self.driver.listen(self.target) + self.listener = self.driver.listen(target) + self.executor.execute() + + def listen_notifications(self, targets_and_priorities): + self.listener = self.driver.listen_for_notifications( + targets_and_priorities, {}) self.executor.execute() def _run(self): @@ -80,7 +83,7 @@ class ZmqBaseTestCase(test_utils.BaseTestCase): transport = oslo_messaging.get_transport(self.conf) self.driver = transport._driver - self.listener = TestRPCServerListener(self.driver) + self.listener = TestServerListener(self.driver) self.addCleanup(stopRpc(self.__dict__)) @@ -174,6 +177,20 @@ class TestZmqBasics(ZmqBaseTestCase): wait_for_reply=True) self.assertTrue(result) + def test_send_receive_notification(self): + """Notify() test""" + + target = oslo_messaging.Target(topic='t1', + server='notification@server') + self.listener.listen_notifications([(target, 'info')]) + + message = {'method': 'hello-world', 'tx_id': 1} + context = {} + target.topic = target.topic + '.info' + self.driver.send_notification(target, context, message, '3.0') + self.listener._received.wait() + self.assertTrue(self.listener._received.isSet()) + class TestPoller(test_utils.BaseTestCase): diff --git a/oslo_messaging/tests/functional/test_functional.py b/oslo_messaging/tests/functional/test_functional.py index 0e56e0c1c..ebca74aea 100644 --- a/oslo_messaging/tests/functional/test_functional.py +++ b/oslo_messaging/tests/functional/test_functional.py @@ -187,11 +187,6 @@ class NotifyTestCase(utils.SkipIfNoTransportURL): # NOTE(sileht): Each test must not use the same topics # to be run in parallel - def setUp(self): - super(NotifyTestCase, self).setUp() - if self.url.startswith("zmq"): - self.skipTest("Skip NotifyTestCase for ZMQ driver") - def test_simple(self): listener = self.useFixture( utils.NotificationFixture(self.url, ['test_simple'])) diff --git a/tools/simulator.py b/tools/simulator.py index 0a8309e17..8098fc654 100755 --- a/tools/simulator.py +++ b/tools/simulator.py @@ -25,10 +25,10 @@ import logging import sys import time -from oslo.config import cfg -from oslo import messaging -from oslo.messaging import notify -from oslo.messaging import rpc +from oslo_config import cfg +import oslo_messaging as messaging +from oslo_messaging import notify +from oslo_messaging import rpc LOG = logging.getLogger() diff --git a/tox.ini b/tox.ini index 6a92dbd23..6c86be990 100644 --- a/tox.ini +++ b/tox.ini @@ -41,7 +41,7 @@ setenv = TRANSPORT_URL=amqp://stackqpid:secretqpid@127.0.0.1:65123// commands = {toxinidir}/setup-test-env-qpid.sh python setup.py testr --slowest --testr-args='oslo_messaging.tests.functional' [testenv:py27-func-zeromq] -commands = {toxinidir}/setup-test-env-zmq.sh python setup.py testr --slowest --testr-args='oslo_messaging.tests.functional.test_functional' +commands = {toxinidir}/setup-test-env-zmq.sh python setup.py testr --slowest --testr-args='oslo_messaging.tests.functional' [flake8] show-source = True From da4ee6361baeaf34ce3bfe24b53a2fc371ae9e75 Mon Sep 17 00:00:00 2001 From: Oleksii Zamiatin Date: Wed, 5 Aug 2015 16:49:23 +0300 Subject: [PATCH 24/28] Documenting main driver classes Main classes destination and methods parameters. Change-Id: I23d906855d616830dfc12c5e9e32881a4600b6ff --- oslo_messaging/_drivers/impl_zmq.py | 73 ++++++++++++++++++- .../client/publishers/zmq_publisher_base.py | 44 ++++++++++- .../_drivers/zmq_driver/client/zmq_request.py | 23 ++++++ .../_drivers/zmq_driver/zmq_poller.py | 63 ++++++++++++++-- 4 files changed, 194 insertions(+), 9 deletions(-) diff --git a/oslo_messaging/_drivers/impl_zmq.py b/oslo_messaging/_drivers/impl_zmq.py index 7a4086009..f0ff1a647 100644 --- a/oslo_messaging/_drivers/impl_zmq.py +++ b/oslo_messaging/_drivers/impl_zmq.py @@ -83,14 +83,35 @@ zmq_opts = [ class ZmqDriver(base.BaseDriver): - """ZeroMQ Driver + + """ZeroMQ Driver implementation. + + Provides implementation of RPC and Notifier APIs by means + of ZeroMQ library. See :doc:`zmq_driver` for details. - """ def __init__(self, conf, url, default_exchange=None, allowed_remote_exmods=None): + """Construct ZeroMQ driver. + + Intialize driver options. + + Construct matchmaker - pluggable interface to targets management + Name Service + + Construct client and server controllers + + :param conf: oslo messaging configuration object + :type conf: oslo_config.CONF + :param url: transport URL + :type url: TransportUrl + :param default_exchange: Not used in zmq implementation + :type default_exchange: None + :param allowed_remote_exmods: remote exception passing options + :type allowed_remote_exmods: list + """ conf.register_opts(zmq_opts) conf.register_opts(executor_base._pool_opts) self.conf = conf @@ -108,6 +129,24 @@ class ZmqDriver(base.BaseDriver): def send(self, target, ctxt, message, wait_for_reply=None, timeout=None, retry=None): + """Send RPC message to server + + :param target: Message destination target + :type target: oslo_messaging.Target + :param ctxt: Message context + :type ctxt: dict + :param message: Message payload to pass + :type message: dict + :param wait_for_reply: Waiting for reply flag + :type wait_for_reply: bool + :param timeout: Reply waiting timeout in seconds + :type timeout: int + :param retry: an optional default connection retries configuration + None or -1 means to retry forever + 0 means no retry + N means N retries + :type retry: int + """ timeout = timeout or self.conf.rpc_response_timeout if wait_for_reply: return self.client.send_call(target, ctxt, message, timeout, retry) @@ -117,6 +156,22 @@ class ZmqDriver(base.BaseDriver): self.client.send_cast(target, ctxt, message, timeout, retry) def send_notification(self, target, ctxt, message, version, retry=None): + """Send notification to server + + :param target: Message destination target + :type target: oslo_messaging.Target + :param ctxt: Message context + :type ctxt: dict + :param message: Message payload to pass + :type message: dict + :param version: Messaging API version + :type version: str + :param retry: an optional default connection retries configuration + None or -1 means to retry forever + 0 means no retry + N means N retries + :type retry: int + """ if target.fanout: self.client.send_notify_fanout(target, ctxt, message, version, retry) @@ -124,13 +179,27 @@ class ZmqDriver(base.BaseDriver): self.client.send_notify(target, ctxt, message, version, retry) def listen(self, target): + """Listen to a specified target on a server side + + :param target: Message destination target + :type target: oslo_messaging.Target + """ self.server.listen(target) return self.server def listen_for_notifications(self, targets_and_priorities, pool): + """Listen to a specified list of targets on a server side + + :param targets_and_priorities: List of pairs (target, priority) + :type targets_and_priorities: list + :param pool: Not used for zmq implementation + :type pool: object + """ self.server.listen_notification(targets_and_priorities) return self.server def cleanup(self): + """Cleanup all driver's connections finally + """ self.client.cleanup() self.server.cleanup() diff --git a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py index 0f32f5884..a367e9ed3 100644 --- a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py +++ b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py @@ -26,7 +26,16 @@ zmq = zmq_async.import_zmq() class UnsupportedSendPattern(rpc_common.RPCException): + """Exception to raise from publishers in case of unsupported + sending pattern called. + """ + def __init__(self, pattern_name): + """Construct exception object + + :param pattern_name: Message type name from zmq_names + :type pattern_name: str + """ errmsg = _LE("Sending pattern %s is unsupported.") % pattern_name super(UnsupportedSendPattern, self).__init__(errmsg) @@ -34,7 +43,27 @@ class UnsupportedSendPattern(rpc_common.RPCException): @six.add_metaclass(abc.ABCMeta) class PublisherBase(object): + """Abstract publisher class + + Each publisher from zmq-driver client should implement + this interface to serve as a messages publisher. + + Publisher can send request objects from zmq_request. + """ + def __init__(self, conf, matchmaker): + + """Construct publisher + + Accept configuration object and Name Service interface object. + Create zmq.Context and connected sockets dictionary. + + :param conf: configuration object + :type conf: oslo_config.CONF + :param matchmaker: Name Service interface object + :type matchmaker: matchmaker.MatchMakerBase + """ + self.conf = conf self.zmq_context = zmq.Context() self.matchmaker = matchmaker @@ -43,14 +72,27 @@ class PublisherBase(object): @abc.abstractmethod def send_request(self, request): - """Send request to consumer""" + """Send request to consumer + + :param request: Message data and destination container object + :type request: zmq_request.Request + """ def _send_request(self, socket, request): + """Send request to consumer. + Helper private method which defines basic sending behavior. + + :param socket: Socket to publish message on + :type socket: zmq.Socket + :param request: Message data and destination container object + :type request: zmq_request.Request + """ socket.send_string(request.msg_type, zmq.SNDMORE) socket.send_json(request.context, zmq.SNDMORE) socket.send_json(request.message) def cleanup(self): + """Cleanup publisher. Close allocated connections.""" for socket, hosts in self.outbound_sockets.values(): socket.setsockopt(zmq.LINGER, 0) socket.close() diff --git a/oslo_messaging/_drivers/zmq_driver/client/zmq_request.py b/oslo_messaging/_drivers/zmq_driver/client/zmq_request.py index 1caedff3e..accebae16 100644 --- a/oslo_messaging/_drivers/zmq_driver/client/zmq_request.py +++ b/oslo_messaging/_drivers/zmq_driver/client/zmq_request.py @@ -29,8 +29,31 @@ zmq = zmq_async.import_zmq() @six.add_metaclass(abc.ABCMeta) class Request(object): + """Zmq request abstract class + + Represents socket (publisher) independent data object to publish. + Request object should contain all needed information for a publisher + to publish it, for instance: message payload, target, timeout + and retries etc. + """ + def __init__(self, target, context=None, message=None, retry=None): + """Construct request object + + :param target: Message destination target + :type target: oslo_messaging.Target + :param context: Message context + :type context: dict + :param message: Message payload to pass + :type message: dict + :param retry: an optional default connection retries configuration + None or -1 means to retry forever + 0 means no retry + N means N retries + :type retry: int + """ + if self.msg_type not in zmq_names.MESSAGE_TYPES: raise RuntimeError("Unknown message type!") diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_poller.py b/oslo_messaging/_drivers/zmq_driver/zmq_poller.py index 437c841ab..a62ea8a6f 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_poller.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_poller.py @@ -20,36 +20,87 @@ import six @six.add_metaclass(abc.ABCMeta) class ZmqPoller(object): + """Base poller interface + + Needed to poll on zmq sockets in green and native async manner. + Native poller implementation wraps zmq.Poller helper class. + Wrapping is needed to provide unified poller interface + in zmq-driver (for both native and zmq pollers). It makes some + difference with poller-helper from zmq library which doesn't actually + receive message. + + The poller object should be obtained over: + + poller = zmq_async.get_poller() + + Then we have to register sockets for polling. We are able + to provide specific receiving method. By default poller calls + socket.recv_multipart. + + def receive_message(socket): + id = socket.recv_string() + ctxt = socket.recv_json() + msg = socket.recv_json() + return (id, ctxt, msg) + + poller.register(socket, recv_method=receive_message) + + Further to receive a message we should call: + + message, socket = poller.poll() + + The 'message' here contains (id, ctxt, msg) tuple. + """ + @abc.abstractmethod def register(self, socket, recv_method=None): - """Register socket to poll""" + """Register socket to poll + + :param socket: Socket to subscribe for polling + :type socket: zmq.Socket + :param recv_method: Optional specific receiver procedure + Should return received message object + :type recv_method: callable + """ @abc.abstractmethod def poll(self, timeout=None): - """Poll for messages""" + """Poll for messages + + :param timeout: Optional polling timeout + None or -1 means poll forever + any positive value means timeout in seconds + :type timeout: int + :returns: (message, socket) tuple + """ @abc.abstractmethod def close(self): """Terminate polling""" def resume_polling(self, socket): - """Resume with polling""" + """Resume with polling + + Some implementations of poller may provide hold polling before reply + This method is intended to excplicitly resume polling aftewards. + """ @six.add_metaclass(abc.ABCMeta) class Executor(object): + """Base executor interface for threading/green async executors""" def __init__(self, thread): self.thread = thread @abc.abstractmethod def execute(self): - 'Run execution' + """Run execution""" @abc.abstractmethod def stop(self): - 'Stop execution' + """Stop execution""" @abc.abstractmethod def wait(self): - 'Wait until pass' + """Wait until pass""" From c5a6bfdca30a5111e641ebe4b2eac40b21b8ce74 Mon Sep 17 00:00:00 2001 From: Victor Sergeyev Date: Fri, 7 Aug 2015 15:23:41 +0300 Subject: [PATCH 25/28] FIx CPU time consuming in green_poller poll() The current implementation of GreenPoller.poll() calls eventlet.sleep() in `while True:` loop. It causes high CPU load, so should be refactored to use queue.get() with timeout. Change-Id: I48f1d8db39c4d7df8bd7f0bc9898ebefcd8df9e8 --- .../_drivers/zmq_driver/poller/green_poller.py | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py b/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py index dcf9da588..58f8d8af1 100644 --- a/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py +++ b/oslo_messaging/_drivers/zmq_driver/poller/green_poller.py @@ -16,9 +16,7 @@ import logging import threading import eventlet -import six -from oslo_messaging._drivers import common as rpc_common from oslo_messaging._drivers.zmq_driver import zmq_poller LOG = logging.getLogger(__name__) @@ -27,7 +25,7 @@ LOG = logging.getLogger(__name__) class GreenPoller(zmq_poller.ZmqPoller): def __init__(self): - self.incoming_queue = six.moves.queue.Queue() + self.incoming_queue = eventlet.queue.LightQueue() self.green_pool = eventlet.GreenPool() self.thread_by_socket = {} @@ -46,17 +44,10 @@ class GreenPoller(zmq_poller.ZmqPoller): eventlet.sleep() def poll(self, timeout=None): - incoming = None try: - with eventlet.Timeout(timeout, exception=rpc_common.Timeout): - while incoming is None: - try: - incoming = self.incoming_queue.get_nowait() - except six.moves.queue.Empty: - eventlet.sleep() - except rpc_common.Timeout: - return None, None - return incoming[0], incoming[1] + return self.incoming_queue.get(timeout=timeout) + except eventlet.queue.Empty: + return (None, None) def close(self): for thread in self.thread_by_socket.values(): From 1adf880a23562906f0549d389de3962697aa65e4 Mon Sep 17 00:00:00 2001 From: Oleksii Zamiatin Date: Mon, 10 Aug 2015 18:07:38 +0300 Subject: [PATCH 26/28] Fix fork-related issues Many services make use of 'fork' system call to start new instances of 'workers'. Such approach forces messaging drivers to perform their initialization in lazy manner. Added LazyDriverItem object to init any part of the driver by first request. Fixed DEALER-publisher not to block on sending when no listener connected. Introduced ZmqSocket wrapper to track connections in outgoing sockets. Refactoring of publishers, introduced PublisherMultisend. Change-Id: I125c946ee9e36061d1b21aa29adcef0611dff201 --- oslo_messaging/_drivers/impl_zmq.py | 74 +++++++++++++++---- .../client/publishers/zmq_dealer_publisher.py | 45 ++++------- .../client/publishers/zmq_publisher_base.py | 45 ++++++++++- .../client/publishers/zmq_req_publisher.py | 2 +- .../server/consumers/zmq_router_consumer.py | 8 +- .../_drivers/zmq_driver/server/zmq_server.py | 4 +- .../_drivers/zmq_driver/zmq_names.py | 15 ++++ .../_drivers/zmq_driver/zmq_socket.py | 57 ++++++++++++++ 8 files changed, 200 insertions(+), 50 deletions(-) create mode 100644 oslo_messaging/_drivers/zmq_driver/zmq_socket.py diff --git a/oslo_messaging/_drivers/impl_zmq.py b/oslo_messaging/_drivers/impl_zmq.py index f0ff1a647..09dde4b1a 100644 --- a/oslo_messaging/_drivers/impl_zmq.py +++ b/oslo_messaging/_drivers/impl_zmq.py @@ -15,6 +15,7 @@ import logging import pprint import socket +import threading from oslo_config import cfg from stevedore import driver @@ -82,6 +83,36 @@ zmq_opts = [ ] +class LazyDriverItem(object): + + def __init__(self, item_cls, *args, **kwargs): + self._lock = threading.Lock() + self.item = None + self.item_class = item_cls + self.args = args + self.kwargs = kwargs + + def get(self): + # NOTE(ozamiatin): Lazy initialization. + # All init stuff moved closer to usage point - lazy init. + # Better design approach is to initialize in the driver's + # __init__, but 'fork' extensively used by services + # breaks all things. + + if self.item is not None: + return self.item + + self._lock.acquire() + if self.item is None: + self.item = self.item_class(*self.args, **self.kwargs) + self._lock.release() + return self.item + + def cleanup(self): + if self.item: + self.item.cleanup() + + class ZmqDriver(base.BaseDriver): """ZeroMQ Driver implementation. @@ -115,15 +146,27 @@ class ZmqDriver(base.BaseDriver): conf.register_opts(zmq_opts) conf.register_opts(executor_base._pool_opts) self.conf = conf + self.allowed_remote_exmods = allowed_remote_exmods self.matchmaker = driver.DriverManager( 'oslo.messaging.zmq.matchmaker', self.conf.rpc_zmq_matchmaker, ).driver(self.conf) - self.server = zmq_server.ZmqServer(self.conf, self.matchmaker) - self.client = zmq_client.ZmqClient(self.conf, self.matchmaker, - allowed_remote_exmods) + self.server = LazyDriverItem( + zmq_server.ZmqServer, self, self.conf, self.matchmaker) + + self.notify_server = LazyDriverItem( + zmq_server.ZmqServer, self, self.conf, self.matchmaker) + + self.client = LazyDriverItem( + zmq_client.ZmqClient, self.conf, self.matchmaker, + self.allowed_remote_exmods) + + self.notifier = LazyDriverItem( + zmq_client.ZmqClient, self.conf, self.matchmaker, + self.allowed_remote_exmods) + super(ZmqDriver, self).__init__(conf, url, default_exchange, allowed_remote_exmods) @@ -147,13 +190,14 @@ class ZmqDriver(base.BaseDriver): N means N retries :type retry: int """ + client = self.client.get() timeout = timeout or self.conf.rpc_response_timeout if wait_for_reply: - return self.client.send_call(target, ctxt, message, timeout, retry) + return client.send_call(target, ctxt, message, timeout, retry) elif target.fanout: - self.client.send_fanout(target, ctxt, message, timeout, retry) + client.send_fanout(target, ctxt, message, timeout, retry) else: - self.client.send_cast(target, ctxt, message, timeout, retry) + client.send_cast(target, ctxt, message, timeout, retry) def send_notification(self, target, ctxt, message, version, retry=None): """Send notification to server @@ -172,11 +216,11 @@ class ZmqDriver(base.BaseDriver): N means N retries :type retry: int """ + client = self.notifier.get() if target.fanout: - self.client.send_notify_fanout(target, ctxt, message, version, - retry) + client.send_notify_fanout(target, ctxt, message, version, retry) else: - self.client.send_notify(target, ctxt, message, version, retry) + client.send_notify(target, ctxt, message, version, retry) def listen(self, target): """Listen to a specified target on a server side @@ -184,8 +228,9 @@ class ZmqDriver(base.BaseDriver): :param target: Message destination target :type target: oslo_messaging.Target """ - self.server.listen(target) - return self.server + server = self.server.get() + server.listen(target) + return server def listen_for_notifications(self, targets_and_priorities, pool): """Listen to a specified list of targets on a server side @@ -195,11 +240,14 @@ class ZmqDriver(base.BaseDriver): :param pool: Not used for zmq implementation :type pool: object """ - self.server.listen_notification(targets_and_priorities) - return self.server + server = self.notify_server.get() + server.listen_notification(targets_and_priorities) + return server def cleanup(self): """Cleanup all driver's connections finally """ self.client.cleanup() self.server.cleanup() + self.notify_server.cleanup() + self.notifier.cleanup() diff --git a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py index bf6f253f9..9fdd6d7a8 100644 --- a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py +++ b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py @@ -14,20 +14,21 @@ import logging -from oslo_messaging._drivers import common as rpc_common from oslo_messaging._drivers.zmq_driver.client.publishers\ import zmq_publisher_base -from oslo_messaging._drivers.zmq_driver import zmq_address from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._drivers.zmq_driver import zmq_names -from oslo_messaging._i18n import _LE, _LI +from oslo_messaging._i18n import _LI, _LW LOG = logging.getLogger(__name__) zmq = zmq_async.import_zmq() -class DealerPublisher(zmq_publisher_base.PublisherBase): +class DealerPublisher(zmq_publisher_base.PublisherMultisend): + + def __init__(self, conf, matchmaker): + super(DealerPublisher, self).__init__(conf, matchmaker, zmq.DEALER) def send_request(self, request): @@ -37,41 +38,25 @@ class DealerPublisher(zmq_publisher_base.PublisherBase): dealer_socket, hosts = self._check_hosts_connections(request.target) if request.msg_type in zmq_names.MULTISEND_TYPES: - for _ in range(len(hosts)): + for _ in range(dealer_socket.connections_count()): self._send_request(dealer_socket, request) else: self._send_request(dealer_socket, request) def _send_request(self, socket, request): + if not socket.connections: + # NOTE(ozamiatin): Here we can provide + # a queue for keeping messages to send them later + # when some listener appears. However such approach + # being more reliable will consume additional memory. + LOG.warning(_LW("Request %s was dropped because no connection") + % request.msg_type) + return + socket.send(b'', zmq.SNDMORE) super(DealerPublisher, self)._send_request(socket, request) LOG.info(_LI("Sending message %(message)s to a target %(target)s") % {"message": request.message, "target": request.target}) - - def _check_hosts_connections(self, target): - if str(target) in self.outbound_sockets: - dealer_socket, hosts = self.outbound_sockets[str(target)] - else: - dealer_socket = zmq.Context().socket(zmq.DEALER) - hosts = self.matchmaker.get_hosts(target) - for host in hosts: - self._connect_to_host(dealer_socket, host, target) - self.outbound_sockets[str(target)] = (dealer_socket, hosts) - return dealer_socket, hosts - - @staticmethod - def _connect_to_host(socket, host, target): - address = zmq_address.get_tcp_direct_address(host) - try: - LOG.info(_LI("Connecting DEALER to %(address)s for %(target)s") - % {"address": address, - "target": target}) - socket.connect(address) - except zmq.ZMQError as e: - errmsg = _LE("Failed connecting DEALER to %(address)s: %(e)s")\ - % (address, e) - LOG.error(errmsg) - raise rpc_common.RPCException(errmsg) diff --git a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py index a367e9ed3..51de8a5e6 100644 --- a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py +++ b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py @@ -13,13 +13,18 @@ # under the License. import abc +import logging import six from oslo_messaging._drivers import common as rpc_common +from oslo_messaging._drivers.zmq_driver import zmq_address from oslo_messaging._drivers.zmq_driver import zmq_async -from oslo_messaging._i18n import _LE +from oslo_messaging._drivers.zmq_driver import zmq_names +from oslo_messaging._drivers.zmq_driver import zmq_socket +from oslo_messaging._i18n import _LE, _LI +LOG = logging.getLogger(__name__) zmq = zmq_async.import_zmq() @@ -93,6 +98,42 @@ class PublisherBase(object): def cleanup(self): """Cleanup publisher. Close allocated connections.""" - for socket, hosts in self.outbound_sockets.values(): + for socket in self.outbound_sockets.values(): socket.setsockopt(zmq.LINGER, 0) socket.close() + + +class PublisherMultisend(PublisherBase): + + def __init__(self, conf, matchmaker, socket_type): + self.socket_type = socket_type + super(PublisherMultisend, self).__init__(conf, matchmaker) + + def _check_hosts_connections(self, target): + hosts = self.matchmaker.get_hosts(target) + + if str(target) in self.outbound_sockets: + socket = self.outbound_sockets[str(target)] + else: + socket = zmq_socket.ZmqSocket(self.zmq_context, self.socket_type) + self.outbound_sockets[str(target)] = socket + + for host in hosts: + self._connect_to_host(socket, host, target) + + return socket, hosts + + def _connect_to_host(self, socket, host, target): + address = zmq_address.get_tcp_direct_address(host) + stype = zmq_names.socket_type_str(self.socket_type) + try: + LOG.info(_LI("Connecting %(stype)s to %(address)s for %(target)s") + % {"stype": stype, + "address": address, + "target": target}) + socket.connect(address) + except zmq.ZMQError as e: + errmsg = _LE("Failed connecting %(stype) to %(address)s: %(e)s")\ + % (stype, address, e) + LOG.error(errmsg) + raise rpc_common.RPCException(errmsg) diff --git a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_req_publisher.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_req_publisher.py index 68beab903..a3096959c 100644 --- a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_req_publisher.py +++ b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_req_publisher.py @@ -52,7 +52,7 @@ class ReqPublisher(zmq_publisher_base.PublisherBase): LOG.info(_LI("Connecting REQ to %s") % connect_address) socket.connect(connect_address) - self.outbound_sockets[str(target)] = (socket, [host]) + self.outbound_sockets[str(target)] = socket return socket except zmq.ZMQError as e: diff --git a/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py b/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py index 58680da90..92b9364ba 100644 --- a/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py +++ b/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py @@ -30,6 +30,7 @@ class RouterConsumer(object): def __init__(self, conf, poller, server): + self.conf = conf self.poller = poller self.server = server @@ -38,6 +39,7 @@ class RouterConsumer(object): self.socket = self.context.socket(zmq.ROUTER) self.address = zmq_address.get_tcp_random_address(conf) self.port = self.socket.bind_to_random_port(self.address) + self.poller.register(self.socket, self._receive_message) LOG.info(_LI("Run ROUTER consumer on %(addr)s:%(port)d"), {"addr": self.address, "port": self.port}) @@ -49,7 +51,7 @@ class RouterConsumer(object): def listen(self, target): LOG.info(_LI("Listen to target %s") % str(target)) - self.poller.register(self.socket, self._receive_message) + # Do nothing here because we have single socket def cleanup(self): if not self.socket.closed: @@ -66,7 +68,9 @@ class RouterConsumer(object): assert msg_type is not None, 'Bad format: msg type expected' context = socket.recv_json() message = socket.recv_json() - LOG.debug("Received %s message %s" % (msg_type, str(message))) + LOG.info(_LI("Received %(msg_type)s message %(msg)s") + % {"msg_type": msg_type, + "msg": str(message)}) if msg_type == zmq_names.CALL_TYPE: return zmq_incoming_message.ZmqIncomingRequest( diff --git a/oslo_messaging/_drivers/zmq_driver/server/zmq_server.py b/oslo_messaging/_drivers/zmq_driver/server/zmq_server.py index 30cacd409..8f7f12657 100644 --- a/oslo_messaging/_drivers/zmq_driver/server/zmq_server.py +++ b/oslo_messaging/_drivers/zmq_driver/server/zmq_server.py @@ -28,8 +28,8 @@ zmq = zmq_async.import_zmq() class ZmqServer(base.Listener): - def __init__(self, conf, matchmaker=None): - self.conf = conf + def __init__(self, driver, conf, matchmaker=None): + super(ZmqServer, self).__init__(driver) self.matchmaker = matchmaker self.poller = zmq_async.get_poller() self.rpc_consumer = zmq_router_consumer.RouterConsumer( diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_names.py b/oslo_messaging/_drivers/zmq_driver/zmq_names.py index 583600ec4..33fe9247c 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_names.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_names.py @@ -12,6 +12,17 @@ # License for the specific language governing permissions and limitations # under the License. +from oslo_messaging._drivers.zmq_driver import zmq_async + +zmq = zmq_async.import_zmq() + + +ZMQ_SOCKET_STR = {zmq.DEALER: "DEALER", + zmq.ROUTER: "ROUTER", + zmq.REQ: "REQ", + zmq.REP: "REP", + zmq.PUB: "PUB", + zmq.SUB: "SUB"} FIELD_FAILURE = 'failure' FIELD_REPLY = 'reply' @@ -33,3 +44,7 @@ MULTISEND_TYPES = (CAST_FANOUT_TYPE, NOTIFY_FANOUT_TYPE) DIRECT_TYPES = (CALL_TYPE, CAST_TYPE, NOTIFY_TYPE) CAST_TYPES = (CAST_TYPE, CAST_FANOUT_TYPE) NOTIFY_TYPES = (NOTIFY_TYPE, NOTIFY_FANOUT_TYPE) + + +def socket_type_str(socket_type): + return ZMQ_SOCKET_STR[socket_type] diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_socket.py b/oslo_messaging/_drivers/zmq_driver/zmq_socket.py new file mode 100644 index 000000000..a4f77b7e8 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/zmq_socket.py @@ -0,0 +1,57 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_names + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class ZmqSocket(object): + + def __init__(self, context, socket_type): + self.context = context + self.socket_type = socket_type + self.handle = context.socket(socket_type) + self.connections = set() + + def type_name(self): + return zmq_names(self.socket_type) + + def connections_count(self): + return len(self.connections) + + def connect(self, address): + if address not in self.connections: + self.handle.connect(address) + self.connections.add(address) + + def setsockopt(self, *args, **kwargs): + self.handle.setsockopt(*args, **kwargs) + + def send(self, *args, **kwargs): + self.handle.send(*args, **kwargs) + + def send_string(self, *args, **kwargs): + self.handle.send_string(*args, **kwargs) + + def send_json(self, *args, **kwargs): + self.handle.send_json(*args, **kwargs) + + def close(self, *args, **kwargs): + self.handle.close(*args, **kwargs) From eb7552bb0088d1448ec569e248ccc0d2362faa96 Mon Sep 17 00:00:00 2001 From: Oleksii Zamiatin Date: Thu, 13 Aug 2015 18:07:35 +0300 Subject: [PATCH 27/28] Acknowledgements implementation In order to make zmq driver implementation reliable support acknowledgements receiving from server side. Acknowledgements feature is supported only by DEALER/ROUTER publisher/consumer pair because other socket types don't support back-chatter. More pluggable publishers/consumers added (PUSH/PULL). Change-Id: I0d02394561c895575045668b43b4b7946f3a8239 --- .../client/publishers/zmq_dealer_publisher.py | 57 +++++++++++-- .../client/publishers/zmq_pub_publisher.py | 47 ++++++++++ .../client/publishers/zmq_publisher_base.py | 3 +- .../client/publishers/zmq_push_publisher.py | 57 +++++++++++++ .../_drivers/zmq_driver/client/zmq_client.py | 1 - .../server/consumers/zmq_consumer_base.py | 85 +++++++++++++++++++ .../server/consumers/zmq_pull_consumer.py | 69 +++++++++++++++ .../server/consumers/zmq_router_consumer.py | 77 +++++++++-------- .../zmq_driver/server/zmq_incoming_message.py | 42 +-------- .../_drivers/zmq_driver/zmq_names.py | 3 + .../_drivers/zmq_driver/zmq_socket.py | 21 ++++- .../tests/drivers/zmq/test_impl_zmq.py | 3 +- 12 files changed, 378 insertions(+), 87 deletions(-) create mode 100644 oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_pub_publisher.py create mode 100644 oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_push_publisher.py create mode 100644 oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_consumer_base.py create mode 100644 oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_pull_consumer.py diff --git a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py index 9fdd6d7a8..805f1e3de 100644 --- a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py +++ b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py @@ -13,6 +13,7 @@ # under the License. import logging +import uuid from oslo_messaging._drivers.zmq_driver.client.publishers\ import zmq_publisher_base @@ -29,6 +30,7 @@ class DealerPublisher(zmq_publisher_base.PublisherMultisend): def __init__(self, conf, matchmaker): super(DealerPublisher, self).__init__(conf, matchmaker, zmq.DEALER) + self.ack_receiver = AcknowledgementReceiver() def send_request(self, request): @@ -37,6 +39,17 @@ class DealerPublisher(zmq_publisher_base.PublisherMultisend): dealer_socket, hosts = self._check_hosts_connections(request.target) + if not dealer_socket.connections: + # NOTE(ozamiatin): Here we can provide + # a queue for keeping messages to send them later + # when some listener appears. However such approach + # being more reliable will consume additional memory. + LOG.warning(_LW("Request %s was dropped because no connection") + % request.msg_type) + return + + self.ack_receiver.track_socket(dealer_socket.handle) + if request.msg_type in zmq_names.MULTISEND_TYPES: for _ in range(dealer_socket.connections_count()): self._send_request(dealer_socket, request) @@ -45,18 +58,44 @@ class DealerPublisher(zmq_publisher_base.PublisherMultisend): def _send_request(self, socket, request): - if not socket.connections: - # NOTE(ozamiatin): Here we can provide - # a queue for keeping messages to send them later - # when some listener appears. However such approach - # being more reliable will consume additional memory. - LOG.warning(_LW("Request %s was dropped because no connection") - % request.msg_type) - return + message_id = str(uuid.uuid1()) socket.send(b'', zmq.SNDMORE) - super(DealerPublisher, self)._send_request(socket, request) + socket.send_string(request.msg_type, zmq.SNDMORE) + socket.send_string(message_id, zmq.SNDMORE) + socket.send_json(request.context, zmq.SNDMORE) + socket.send_json(request.message) LOG.info(_LI("Sending message %(message)s to a target %(target)s") % {"message": request.message, "target": request.target}) + + def cleanup(self): + self.ack_receiver.cleanup() + super(DealerPublisher, self).cleanup() + + +class AcknowledgementReceiver(object): + + def __init__(self): + self.poller = zmq_async.get_poller() + self.thread = zmq_async.get_executor(self.poll_for_acknowledgements) + self.thread.execute() + + def _receive_acknowledgement(self, socket): + empty = socket.recv() + assert empty == b"", "Empty delimiter expected" + ack_message = socket.recv_json() + return ack_message + + def track_socket(self, socket): + self.poller.register(socket, self._receive_acknowledgement) + + def poll_for_acknowledgements(self): + ack_message, socket = self.poller.poll() + LOG.info(_LI("Message %s acknowledged") + % ack_message[zmq_names.FIELD_ID]) + + def cleanup(self): + self.thread.stop() + self.poller.close() diff --git a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_pub_publisher.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_pub_publisher.py new file mode 100644 index 000000000..228724b6c --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_pub_publisher.py @@ -0,0 +1,47 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from oslo_messaging._drivers.zmq_driver.client.publishers\ + import zmq_publisher_base +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_names +from oslo_messaging._i18n import _LI + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class PubPublisher(zmq_publisher_base.PublisherMultisend): + + def __init__(self, conf, matchmaker): + super(PubPublisher, self).__init__(conf, matchmaker, zmq.PUB) + + def send_request(self, request): + + if request.msg_type not in zmq_names.NOTIFY_TYPES: + raise zmq_publisher_base.UnsupportedSendPattern(request.msg_type) + + pub_socket, hosts = self._check_hosts_connections(request.target) + self._send_request(pub_socket, request) + + def _send_request(self, socket, request): + + super(PubPublisher, self)._send_request(socket, request) + + LOG.info(_LI("Publishing message %(message)s to a target %(target)s") + % {"message": request.message, + "target": request.target}) diff --git a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py index 51de8a5e6..eff59dab9 100644 --- a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py +++ b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py @@ -110,8 +110,9 @@ class PublisherMultisend(PublisherBase): super(PublisherMultisend, self).__init__(conf, matchmaker) def _check_hosts_connections(self, target): + # TODO(ozamiatin): Place for significant optimization + # Matchmaker cache should be implemented hosts = self.matchmaker.get_hosts(target) - if str(target) in self.outbound_sockets: socket = self.outbound_sockets[str(target)] else: diff --git a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_push_publisher.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_push_publisher.py new file mode 100644 index 000000000..b8fc4fe51 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_push_publisher.py @@ -0,0 +1,57 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from oslo_messaging._drivers.zmq_driver.client.publishers\ + import zmq_publisher_base +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_names +from oslo_messaging._i18n import _LI, _LW + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class PushPublisher(zmq_publisher_base.PublisherMultisend): + + def __init__(self, conf, matchmaker): + super(PushPublisher, self).__init__(conf, matchmaker, zmq.PUSH) + + def send_request(self, request): + + if request.msg_type == zmq_names.CALL_TYPE: + raise zmq_publisher_base.UnsupportedSendPattern(request.msg_type) + + push_socket, hosts = self._check_hosts_connections(request.target) + + if not push_socket.connections: + LOG.warning(_LW("Request %s was dropped because no connection") + % request.msg_type) + return + + if request.msg_type in zmq_names.MULTISEND_TYPES: + for _ in range(push_socket.connections_count()): + self._send_request(push_socket, request) + else: + self._send_request(push_socket, request) + + def _send_request(self, socket, request): + + super(PushPublisher, self)._send_request(socket, request) + + LOG.info(_LI("Publishing message %(message)s to a target %(target)s") + % {"message": request.message, + "target": request.target}) diff --git a/oslo_messaging/_drivers/zmq_driver/client/zmq_client.py b/oslo_messaging/_drivers/zmq_driver/client/zmq_client.py index 23dfd09eb..26a358f67 100644 --- a/oslo_messaging/_drivers/zmq_driver/client/zmq_client.py +++ b/oslo_messaging/_drivers/zmq_driver/client/zmq_client.py @@ -14,7 +14,6 @@ import contextlib - from oslo_messaging._drivers.zmq_driver.client.publishers\ import zmq_dealer_publisher from oslo_messaging._drivers.zmq_driver.client.publishers\ diff --git a/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_consumer_base.py b/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_consumer_base.py new file mode 100644 index 000000000..153f03d22 --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_consumer_base.py @@ -0,0 +1,85 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import abc +import logging + +import six + +from oslo_messaging._drivers import common as rpc_common +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_socket +from oslo_messaging._i18n import _LE, _LI + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +@six.add_metaclass(abc.ABCMeta) +class ConsumerBase(object): + + def __init__(self, conf, poller, server): + self.conf = conf + self.poller = poller + self.server = server + self.sockets = [] + self.context = zmq.Context() + + def subscribe_socket(self, socket_type): + try: + socket = zmq_socket.ZmqRandomPortSocket( + self.conf, self.context, socket_type) + self.sockets.append(socket) + self.poller.register(socket, self.receive_message) + LOG.info(_LI("Run %(stype)s consumer on %(addr)s:%(port)d"), + {"stype": socket_type, + "addr": socket.bind_address, + "port": socket.port}) + return socket + except zmq.ZMQError as e: + errmsg = _LE("Failed binding to port %(port)d: %(e)s")\ + % (self.port, e) + LOG.error(errmsg) + raise rpc_common.RPCException(errmsg) + + @abc.abstractmethod + def listen(self, target): + """Associate new sockets with targets here""" + + @abc.abstractmethod + def receive_message(self, target): + """Method for poller - receiving message routine""" + + def cleanup(self): + for socket in self.sockets: + if not socket.handle.closed: + socket.setsockopt(zmq.LINGER, 0) + socket.close() + self.sockets = [] + + +class SingleSocketConsumer(ConsumerBase): + + def __init__(self, conf, poller, server, socket_type): + super(SingleSocketConsumer, self).__init__(conf, poller, server) + self.socket = self.subscribe_socket(socket_type) + + @property + def address(self): + return self.socket.bind_address + + @property + def port(self): + return self.socket.port diff --git a/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_pull_consumer.py b/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_pull_consumer.py new file mode 100644 index 000000000..a90f71b5a --- /dev/null +++ b/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_pull_consumer.py @@ -0,0 +1,69 @@ +# Copyright 2015 Mirantis, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import logging + +from oslo_messaging._drivers import base +from oslo_messaging._drivers.zmq_driver.server.consumers\ + import zmq_consumer_base +from oslo_messaging._drivers.zmq_driver import zmq_async +from oslo_messaging._drivers.zmq_driver import zmq_names +from oslo_messaging._i18n import _LE, _LI + +LOG = logging.getLogger(__name__) + +zmq = zmq_async.import_zmq() + + +class PullIncomingMessage(base.IncomingMessage): + + def __init__(self, listener, context, message): + super(PullIncomingMessage, self).__init__(listener, context, message) + + def reply(self, reply=None, failure=None, log_failure=True): + """Reply is not needed for non-call messages.""" + + def acknowledge(self): + """Acknowledgments are not supported by this type of consumer.""" + + def requeue(self): + """Requeueing is not supported.""" + + +class PullConsumer(zmq_consumer_base.SingleSocketConsumer): + + def __init__(self, conf, poller, server): + super(PullConsumer, self).__init__(conf, poller, server, zmq.PULL) + + def listen(self, target): + LOG.info(_LI("Listen to target %s") % str(target)) + # Do nothing here because we have a single socket + + def receive_message(self, socket): + try: + msg_type = socket.recv_string() + assert msg_type is not None, 'Bad format: msg type expected' + context = socket.recv_json() + message = socket.recv_json() + LOG.info(_LI("Received %(msg_type)s message %(msg)s") + % {"msg_type": msg_type, + "msg": str(message)}) + + if msg_type in (zmq_names.CAST_TYPES + zmq_names.NOTIFY_TYPES): + return PullIncomingMessage(self.server, context, message) + else: + LOG.error(_LE("Unknown message type: %s") % msg_type) + + except zmq.ZMQError as e: + LOG.error(_LE("Receiving message failed: %s") % str(e)) diff --git a/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py b/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py index 92b9364ba..2219b0c27 100644 --- a/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py +++ b/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py @@ -14,9 +14,10 @@ import logging -from oslo_messaging._drivers import common as rpc_common +from oslo_messaging._drivers import base +from oslo_messaging._drivers.zmq_driver.server.consumers\ + import zmq_consumer_base from oslo_messaging._drivers.zmq_driver.server import zmq_incoming_message -from oslo_messaging._drivers.zmq_driver import zmq_address from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._drivers.zmq_driver import zmq_names from oslo_messaging._i18n import _LE, _LI @@ -26,46 +27,52 @@ LOG = logging.getLogger(__name__) zmq = zmq_async.import_zmq() -class RouterConsumer(object): +class RouterIncomingMessage(base.IncomingMessage): + + def __init__(self, listener, context, message, socket, reply_id, msg_id, + poller): + super(RouterIncomingMessage, self).__init__(listener, context, message) + self.socket = socket + self.reply_id = reply_id + self.msg_id = msg_id + self.message = message + poller.resume_polling(socket) + + def reply(self, reply=None, failure=None, log_failure=True): + """Reply is not needed for non-call messages""" + + def acknowledge(self): + LOG.info("Sending acknowledge for %s", self.msg_id) + ack_message = {zmq_names.FIELD_ID: self.msg_id} + self.socket.send(self.reply_id, zmq.SNDMORE) + self.socket.send(b'', zmq.SNDMORE) + self.socket.send_json(ack_message) + + def requeue(self): + """Requeue is not supported""" + + +class RouterConsumer(zmq_consumer_base.SingleSocketConsumer): def __init__(self, conf, poller, server): - - self.conf = conf - self.poller = poller - self.server = server - - try: - self.context = zmq.Context() - self.socket = self.context.socket(zmq.ROUTER) - self.address = zmq_address.get_tcp_random_address(conf) - self.port = self.socket.bind_to_random_port(self.address) - self.poller.register(self.socket, self._receive_message) - LOG.info(_LI("Run ROUTER consumer on %(addr)s:%(port)d"), - {"addr": self.address, - "port": self.port}) - except zmq.ZMQError as e: - errmsg = _LE("Failed binding to port %(port)d: %(e)s")\ - % (self.port, e) - LOG.error(errmsg) - raise rpc_common.RPCException(errmsg) + super(RouterConsumer, self).__init__(conf, poller, server, zmq.ROUTER) def listen(self, target): LOG.info(_LI("Listen to target %s") % str(target)) - # Do nothing here because we have single socket - - def cleanup(self): - if not self.socket.closed: - self.socket.setsockopt(zmq.LINGER, 0) - self.socket.close() - - def _receive_message(self, socket): + # Do nothing here because we have a single socket + def receive_message(self, socket): try: reply_id = socket.recv() empty = socket.recv() assert empty == b'', 'Bad format: empty delimiter expected' msg_type = socket.recv_string() assert msg_type is not None, 'Bad format: msg type expected' + + msg_id = None + if msg_type != zmq_names.CALL_TYPE: + msg_id = socket.recv_string() + context = socket.recv_json() message = socket.recv_json() LOG.info(_LI("Received %(msg_type)s message %(msg)s") @@ -76,12 +83,10 @@ class RouterConsumer(object): return zmq_incoming_message.ZmqIncomingRequest( self.server, context, message, socket, reply_id, self.poller) - elif msg_type in zmq_names.CAST_TYPES: - return zmq_incoming_message.ZmqCastMessage( - self.server, context, message, socket, self.poller) - elif msg_type in zmq_names.NOTIFY_TYPES: - return zmq_incoming_message.ZmqNotificationMessage( - self.server, context, message, socket, self.poller) + elif msg_type in (zmq_names.CAST_TYPES + zmq_names.NOTIFY_TYPES): + return RouterIncomingMessage( + self.server, context, message, socket, reply_id, + msg_id, self.poller) else: LOG.error(_LE("Unknown message type: %s") % msg_type) diff --git a/oslo_messaging/_drivers/zmq_driver/server/zmq_incoming_message.py b/oslo_messaging/_drivers/zmq_driver/server/zmq_incoming_message.py index d953e9334..9d1351225 100644 --- a/oslo_messaging/_drivers/zmq_driver/server/zmq_incoming_message.py +++ b/oslo_messaging/_drivers/zmq_driver/server/zmq_incoming_message.py @@ -42,48 +42,14 @@ class ZmqIncomingRequest(base.IncomingMessage): message_reply = {zmq_names.FIELD_REPLY: reply, zmq_names.FIELD_FAILURE: failure, zmq_names.FIELD_LOG_FAILURE: log_failure} - LOG.debug("Replying %s REP", (str(message_reply))) + + LOG.info("Replying %s REP", (str(message_reply))) + self.received = True self.reply_socket.send(self.reply_id, zmq.SNDMORE) self.reply_socket.send(b'', zmq.SNDMORE) self.reply_socket.send_json(message_reply) self.poller.resume_polling(self.reply_socket) - def acknowledge(self): - pass - def requeue(self): - pass - - -class ZmqCastMessage(base.IncomingMessage): - - def __init__(self, listener, context, message, socket, poller): - super(ZmqCastMessage, self).__init__(listener, context, message) - poller.resume_polling(socket) - - def reply(self, reply=None, failure=None, log_failure=True): - """Reply is not needed for fanout(cast) messages""" - - def acknowledge(self): - pass - - def requeue(self): - pass - - -class ZmqNotificationMessage(base.IncomingMessage): - - def __init__(self, listener, context, message, socket, poller): - super(ZmqNotificationMessage, self).__init__(listener, context, - message) - poller.resume_polling(socket) - - def reply(self, reply=None, failure=None, log_failure=True): - """Reply is not needed for notification messages""" - - def acknowledge(self): - pass - - def requeue(self): - pass + """Requeue is not supported""" diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_names.py b/oslo_messaging/_drivers/zmq_driver/zmq_names.py index 33fe9247c..1c3c33440 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_names.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_names.py @@ -19,6 +19,8 @@ zmq = zmq_async.import_zmq() ZMQ_SOCKET_STR = {zmq.DEALER: "DEALER", zmq.ROUTER: "ROUTER", + zmq.PUSH: "PUSH", + zmq.PULL: "PULL", zmq.REQ: "REQ", zmq.REP: "REP", zmq.PUB: "PUB", @@ -27,6 +29,7 @@ ZMQ_SOCKET_STR = {zmq.DEALER: "DEALER", FIELD_FAILURE = 'failure' FIELD_REPLY = 'reply' FIELD_LOG_FAILURE = 'log_failure' +FIELD_ID = 'id' CALL_TYPE = 'call' CAST_TYPE = 'cast' diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_socket.py b/oslo_messaging/_drivers/zmq_driver/zmq_socket.py index a4f77b7e8..59dee614e 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_socket.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_socket.py @@ -14,6 +14,7 @@ import logging +from oslo_messaging._drivers.zmq_driver import zmq_address from oslo_messaging._drivers.zmq_driver import zmq_async from oslo_messaging._drivers.zmq_driver import zmq_names @@ -31,7 +32,7 @@ class ZmqSocket(object): self.connections = set() def type_name(self): - return zmq_names(self.socket_type) + return zmq_names.socket_type_str(self.socket_type) def connections_count(self): return len(self.connections) @@ -53,5 +54,23 @@ class ZmqSocket(object): def send_json(self, *args, **kwargs): self.handle.send_json(*args, **kwargs) + def recv(self, *args, **kwargs): + return self.handle.recv(*args, **kwargs) + + def recv_string(self, *args, **kwargs): + return self.handle.recv_string(*args, **kwargs) + + def recv_json(self, *args, **kwargs): + return self.handle.recv_json(*args, **kwargs) + def close(self, *args, **kwargs): self.handle.close(*args, **kwargs) + + +class ZmqRandomPortSocket(ZmqSocket): + + def __init__(self, conf, context, socket_type): + super(ZmqRandomPortSocket, self).__init__(context, socket_type) + self.conf = conf + self.bind_address = zmq_address.get_tcp_random_address(self.conf) + self.port = self.handle.bind_to_random_port(self.bind_address) diff --git a/oslo_messaging/tests/drivers/zmq/test_impl_zmq.py b/oslo_messaging/tests/drivers/zmq/test_impl_zmq.py index ca15f61ca..21641dd51 100644 --- a/oslo_messaging/tests/drivers/zmq/test_impl_zmq.py +++ b/oslo_messaging/tests/drivers/zmq/test_impl_zmq.py @@ -52,6 +52,7 @@ class TestServerListener(object): try: message = self.listener.poll() if message is not None: + message.acknowledge() self._received.set() self.message = message message.reply(reply=True) @@ -188,7 +189,7 @@ class TestZmqBasics(ZmqBaseTestCase): context = {} target.topic = target.topic + '.info' self.driver.send_notification(target, context, message, '3.0') - self.listener._received.wait() + self.listener._received.wait(5) self.assertTrue(self.listener._received.isSet()) From fed1f734df42a9a6969628ac7d6a8b4c47f571d8 Mon Sep 17 00:00:00 2001 From: Oleksii Zamiatin Date: Sun, 16 Aug 2015 16:22:13 +0300 Subject: [PATCH 28/28] Use pickle instead of jsonutils for serialization Pickle way of serialization is more preferrable because sometimes we have non json-serializable object in reply (e.g. cinder VolumeAttachment). Change-Id: I56634d4b4b9817833044a0d8f15cc6362d599a4f --- .../zmq_driver/client/publishers/zmq_dealer_publisher.py | 6 +++--- .../zmq_driver/client/publishers/zmq_publisher_base.py | 4 ++-- .../zmq_driver/client/publishers/zmq_req_publisher.py | 2 +- .../zmq_driver/server/consumers/zmq_pull_consumer.py | 4 ++-- .../zmq_driver/server/consumers/zmq_router_consumer.py | 6 +++--- .../_drivers/zmq_driver/server/zmq_incoming_message.py | 2 +- oslo_messaging/_drivers/zmq_driver/zmq_socket.py | 6 ++++++ 7 files changed, 18 insertions(+), 12 deletions(-) diff --git a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py index 805f1e3de..a5c3f0fdf 100644 --- a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py +++ b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py @@ -63,8 +63,8 @@ class DealerPublisher(zmq_publisher_base.PublisherMultisend): socket.send(b'', zmq.SNDMORE) socket.send_string(request.msg_type, zmq.SNDMORE) socket.send_string(message_id, zmq.SNDMORE) - socket.send_json(request.context, zmq.SNDMORE) - socket.send_json(request.message) + socket.send_pyobj(request.context, zmq.SNDMORE) + socket.send_pyobj(request.message) LOG.info(_LI("Sending message %(message)s to a target %(target)s") % {"message": request.message, @@ -85,7 +85,7 @@ class AcknowledgementReceiver(object): def _receive_acknowledgement(self, socket): empty = socket.recv() assert empty == b"", "Empty delimiter expected" - ack_message = socket.recv_json() + ack_message = socket.recv_pyobj() return ack_message def track_socket(self, socket): diff --git a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py index eff59dab9..fccd74bd1 100644 --- a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py +++ b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_publisher_base.py @@ -93,8 +93,8 @@ class PublisherBase(object): :type request: zmq_request.Request """ socket.send_string(request.msg_type, zmq.SNDMORE) - socket.send_json(request.context, zmq.SNDMORE) - socket.send_json(request.message) + socket.send_pyobj(request.context, zmq.SNDMORE) + socket.send_pyobj(request.message) def cleanup(self): """Cleanup publisher. Close allocated connections.""" diff --git a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_req_publisher.py b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_req_publisher.py index a3096959c..066be8bd5 100644 --- a/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_req_publisher.py +++ b/oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_req_publisher.py @@ -64,7 +64,7 @@ class ReqPublisher(zmq_publisher_base.PublisherBase): def _receive_reply(socket, request): def _receive_method(socket): - return socket.recv_json() + return socket.recv_pyobj() # NOTE(ozamiatin): Check for retry here (no retries now) with contextlib.closing(zmq_async.get_reply_poller()) as poller: diff --git a/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_pull_consumer.py b/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_pull_consumer.py index a90f71b5a..98ef3a73c 100644 --- a/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_pull_consumer.py +++ b/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_pull_consumer.py @@ -54,8 +54,8 @@ class PullConsumer(zmq_consumer_base.SingleSocketConsumer): try: msg_type = socket.recv_string() assert msg_type is not None, 'Bad format: msg type expected' - context = socket.recv_json() - message = socket.recv_json() + context = socket.recv_pyobj() + message = socket.recv_pyobj() LOG.info(_LI("Received %(msg_type)s message %(msg)s") % {"msg_type": msg_type, "msg": str(message)}) diff --git a/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py b/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py index 2219b0c27..bfbfe9fdc 100644 --- a/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py +++ b/oslo_messaging/_drivers/zmq_driver/server/consumers/zmq_router_consumer.py @@ -46,7 +46,7 @@ class RouterIncomingMessage(base.IncomingMessage): ack_message = {zmq_names.FIELD_ID: self.msg_id} self.socket.send(self.reply_id, zmq.SNDMORE) self.socket.send(b'', zmq.SNDMORE) - self.socket.send_json(ack_message) + self.socket.send_pyobj(ack_message) def requeue(self): """Requeue is not supported""" @@ -73,8 +73,8 @@ class RouterConsumer(zmq_consumer_base.SingleSocketConsumer): if msg_type != zmq_names.CALL_TYPE: msg_id = socket.recv_string() - context = socket.recv_json() - message = socket.recv_json() + context = socket.recv_pyobj() + message = socket.recv_pyobj() LOG.info(_LI("Received %(msg_type)s message %(msg)s") % {"msg_type": msg_type, "msg": str(message)}) diff --git a/oslo_messaging/_drivers/zmq_driver/server/zmq_incoming_message.py b/oslo_messaging/_drivers/zmq_driver/server/zmq_incoming_message.py index 9d1351225..f43ec2325 100644 --- a/oslo_messaging/_drivers/zmq_driver/server/zmq_incoming_message.py +++ b/oslo_messaging/_drivers/zmq_driver/server/zmq_incoming_message.py @@ -48,7 +48,7 @@ class ZmqIncomingRequest(base.IncomingMessage): self.received = True self.reply_socket.send(self.reply_id, zmq.SNDMORE) self.reply_socket.send(b'', zmq.SNDMORE) - self.reply_socket.send_json(message_reply) + self.reply_socket.send_pyobj(message_reply) self.poller.resume_polling(self.reply_socket) def requeue(self): diff --git a/oslo_messaging/_drivers/zmq_driver/zmq_socket.py b/oslo_messaging/_drivers/zmq_driver/zmq_socket.py index 59dee614e..2a4144c5a 100644 --- a/oslo_messaging/_drivers/zmq_driver/zmq_socket.py +++ b/oslo_messaging/_drivers/zmq_driver/zmq_socket.py @@ -54,6 +54,9 @@ class ZmqSocket(object): def send_json(self, *args, **kwargs): self.handle.send_json(*args, **kwargs) + def send_pyobj(self, *args, **kwargs): + self.handle.send_pyobj(*args, **kwargs) + def recv(self, *args, **kwargs): return self.handle.recv(*args, **kwargs) @@ -63,6 +66,9 @@ class ZmqSocket(object): def recv_json(self, *args, **kwargs): return self.handle.recv_json(*args, **kwargs) + def recv_pyobj(self, *args, **kwargs): + return self.handle.recv_pyobj(*args, **kwargs) + def close(self, *args, **kwargs): self.handle.close(*args, **kwargs)