2010-06-23 22:04:16 -07:00
|
|
|
# Copyright 2010 United States Government as represented by the
|
2010-06-23 23:15:06 -07:00
|
|
|
# Administrator of the National Aeronautics and Space Administration.
|
2010-06-23 22:04:16 -07:00
|
|
|
# All Rights Reserved.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
# not use this file except in compliance with the License. You may obtain
|
|
|
|
# a copy of the License at
|
2010-06-24 04:11:59 +01:00
|
|
|
#
|
2010-06-23 22:04:16 -07:00
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
2010-06-24 04:11:59 +01:00
|
|
|
#
|
2010-05-27 23:05:26 -07:00
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
2010-06-23 22:04:16 -07:00
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
# License for the specific language governing permissions and limitations
|
|
|
|
# under the License.
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2011-04-20 12:08:22 -07:00
|
|
|
"""Base classes for our unit tests.
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2011-04-20 12:08:22 -07:00
|
|
|
Allows overriding of flags for use of fakes, and some black magic for
|
|
|
|
inline callbacks.
|
|
|
|
|
|
|
|
"""
|
2015-06-02 17:53:35 -04:00
|
|
|
import contextlib
|
|
|
|
|
2015-03-13 10:59:51 -04:00
|
|
|
import datetime
|
2012-08-15 15:02:51 -04:00
|
|
|
import eventlet
|
|
|
|
eventlet.monkey_patch(os=False)
|
|
|
|
|
2013-06-13 09:35:10 -07:00
|
|
|
import copy
|
2014-09-15 16:31:26 +01:00
|
|
|
import inspect
|
2015-01-07 12:20:33 -08:00
|
|
|
import mock
|
2012-10-28 01:32:59 -04:00
|
|
|
import os
|
2010-08-09 09:47:08 -04:00
|
|
|
|
2012-11-09 15:49:51 +01:00
|
|
|
import fixtures
|
2016-01-29 12:50:58 -05:00
|
|
|
from oslo_cache import core as cache
|
2014-12-06 11:59:30 +08:00
|
|
|
from oslo_concurrency import lockutils
|
2015-01-10 22:26:32 -05:00
|
|
|
from oslo_config import cfg
|
|
|
|
from oslo_config import fixture as config_fixture
|
2015-02-09 17:28:19 -05:00
|
|
|
from oslo_log.fixture import logging_error as log_fixture
|
|
|
|
from oslo_log import log as logging
|
2015-03-13 10:59:51 -04:00
|
|
|
from oslo_serialization import jsonutils
|
2015-01-10 22:26:32 -05:00
|
|
|
from oslo_utils import timeutils
|
2014-11-20 08:17:06 -05:00
|
|
|
from oslotest import moxstubout
|
2014-11-13 14:18:16 -05:00
|
|
|
import six
|
2012-10-25 11:06:13 -07:00
|
|
|
import testtools
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2012-10-28 01:32:59 -04:00
|
|
|
from nova import context
|
|
|
|
from nova import db
|
|
|
|
from nova.network import manager as network_manager
|
2015-12-10 11:53:30 -08:00
|
|
|
from nova.network.security_group import openstack_driver
|
2013-05-28 18:45:33 -07:00
|
|
|
from nova.objects import base as objects_base
|
2014-12-09 08:35:14 -05:00
|
|
|
from nova.tests import fixtures as nova_fixtures
|
2014-11-07 14:27:03 +01:00
|
|
|
from nova.tests.unit import conf_fixture
|
|
|
|
from nova.tests.unit import policy_fixture
|
2014-04-17 21:01:10 +00:00
|
|
|
from nova import utils
|
2011-02-21 00:15:49 -08:00
|
|
|
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2012-11-17 22:50:17 +00:00
|
|
|
CONF = cfg.CONF
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2015-02-09 17:28:19 -05:00
|
|
|
logging.register_options(CONF)
|
|
|
|
CONF.set_override('use_stderr', False)
|
|
|
|
logging.setup(CONF, 'nova')
|
2016-01-29 12:50:58 -05:00
|
|
|
cache.configure(CONF)
|
2011-05-25 16:37:39 -05:00
|
|
|
|
2013-11-18 02:27:55 -08:00
|
|
|
_TRUE_VALUES = ('True', 'true', '1', 'yes')
|
2012-10-28 01:32:59 -04:00
|
|
|
|
2016-01-13 12:36:01 -08:00
|
|
|
if six.PY2:
|
|
|
|
nested = contextlib.nested
|
|
|
|
else:
|
2015-06-02 17:53:35 -04:00
|
|
|
@contextlib.contextmanager
|
|
|
|
def nested(*contexts):
|
|
|
|
with contextlib.ExitStack() as stack:
|
|
|
|
yield [stack.enter_context(c) for c in contexts]
|
|
|
|
|
2012-10-28 01:32:59 -04:00
|
|
|
|
2013-05-18 14:07:03 +03:00
|
|
|
class SampleNetworks(fixtures.Fixture):
|
|
|
|
|
|
|
|
"""Create sample networks in the database."""
|
|
|
|
|
2013-08-16 22:28:44 +01:00
|
|
|
def __init__(self, host=None):
|
|
|
|
self.host = host
|
|
|
|
|
2013-05-18 14:07:03 +03:00
|
|
|
def setUp(self):
|
|
|
|
super(SampleNetworks, self).setUp()
|
2012-12-19 13:04:21 -08:00
|
|
|
ctxt = context.get_admin_context()
|
2013-08-16 22:28:44 +01:00
|
|
|
network = network_manager.VlanManager(host=self.host)
|
2012-12-19 13:04:21 -08:00
|
|
|
bridge_interface = CONF.flat_interface or CONF.vlan_interface
|
|
|
|
network.create_networks(ctxt,
|
|
|
|
label='test',
|
2013-07-12 20:34:44 +01:00
|
|
|
cidr='10.0.0.0/8',
|
2012-12-19 13:04:21 -08:00
|
|
|
multi_host=CONF.multi_host,
|
|
|
|
num_networks=CONF.num_networks,
|
|
|
|
network_size=CONF.network_size,
|
|
|
|
cidr_v6=CONF.fixed_range_v6,
|
|
|
|
gateway=CONF.gateway,
|
|
|
|
gateway_v6=CONF.gateway_v6,
|
|
|
|
bridge=CONF.flat_network_bridge,
|
|
|
|
bridge_interface=bridge_interface,
|
|
|
|
vpn_start=CONF.vpn_start,
|
|
|
|
vlan_start=CONF.vlan_start,
|
|
|
|
dns1=CONF.flat_network_dns)
|
|
|
|
for net in db.network_get_all(ctxt):
|
|
|
|
network.set_network_host(ctxt, net)
|
|
|
|
|
2012-11-09 15:49:51 +01:00
|
|
|
|
2012-03-03 17:53:41 +00:00
|
|
|
class TestingException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2014-11-13 14:18:16 -05:00
|
|
|
class skipIf(object):
|
|
|
|
def __init__(self, condition, reason):
|
|
|
|
self.condition = condition
|
|
|
|
self.reason = reason
|
|
|
|
|
|
|
|
def __call__(self, func_or_cls):
|
|
|
|
condition = self.condition
|
|
|
|
reason = self.reason
|
|
|
|
if inspect.isfunction(func_or_cls):
|
|
|
|
@six.wraps(func_or_cls)
|
|
|
|
def wrapped(*args, **kwargs):
|
|
|
|
if condition:
|
|
|
|
raise testtools.TestCase.skipException(reason)
|
|
|
|
return func_or_cls(*args, **kwargs)
|
|
|
|
|
|
|
|
return wrapped
|
|
|
|
elif inspect.isclass(func_or_cls):
|
|
|
|
orig_func = getattr(func_or_cls, 'setUp')
|
|
|
|
|
|
|
|
@six.wraps(orig_func)
|
|
|
|
def new_func(self, *args, **kwargs):
|
|
|
|
if condition:
|
|
|
|
raise testtools.TestCase.skipException(reason)
|
|
|
|
orig_func(self, *args, **kwargs)
|
|
|
|
|
|
|
|
func_or_cls.setUp = new_func
|
|
|
|
return func_or_cls
|
|
|
|
else:
|
|
|
|
raise TypeError('skipUnless can be used only with functions or '
|
|
|
|
'classes')
|
|
|
|
|
|
|
|
|
2014-12-12 14:33:28 +01:00
|
|
|
def _patch_mock_to_raise_for_invalid_assert_calls():
|
|
|
|
def raise_for_invalid_assert_calls(wrapped):
|
|
|
|
def wrapper(_self, name):
|
|
|
|
valid_asserts = [
|
|
|
|
'assert_called_with',
|
|
|
|
'assert_called_once_with',
|
|
|
|
'assert_has_calls',
|
|
|
|
'assert_any_calls']
|
|
|
|
|
|
|
|
if name.startswith('assert') and name not in valid_asserts:
|
|
|
|
raise AttributeError('%s is not a valid mock assert method'
|
|
|
|
% name)
|
|
|
|
|
|
|
|
return wrapped(_self, name)
|
|
|
|
return wrapper
|
|
|
|
mock.Mock.__getattr__ = raise_for_invalid_assert_calls(
|
|
|
|
mock.Mock.__getattr__)
|
|
|
|
|
|
|
|
# NOTE(gibi): needs to be called only once at import time
|
|
|
|
# to patch the mock lib
|
|
|
|
_patch_mock_to_raise_for_invalid_assert_calls()
|
|
|
|
|
|
|
|
|
2012-10-25 11:06:13 -07:00
|
|
|
class TestCase(testtools.TestCase):
|
2013-05-25 01:01:13 +00:00
|
|
|
"""Test case base class for all unit tests.
|
|
|
|
|
|
|
|
Due to the slowness of DB access, please consider deriving from
|
|
|
|
`NoDBTestCase` first.
|
|
|
|
"""
|
|
|
|
USES_DB = True
|
2016-03-22 11:52:47 -07:00
|
|
|
USES_DB_SELF = False
|
2014-10-28 09:33:49 -04:00
|
|
|
REQUIRES_LOCKING = False
|
2011-04-20 12:08:22 -07:00
|
|
|
|
2013-12-04 18:13:50 +02:00
|
|
|
TIMEOUT_SCALING_FACTOR = 1
|
|
|
|
|
2014-12-01 10:39:23 -05:00
|
|
|
def setUp(self):
|
|
|
|
"""Run before each test method to initialize test environment."""
|
|
|
|
super(TestCase, self).setUp()
|
2014-12-09 09:04:42 -05:00
|
|
|
self.useFixture(nova_fixtures.Timeout(
|
|
|
|
os.environ.get('OS_TEST_TIMEOUT', 0),
|
|
|
|
self.TIMEOUT_SCALING_FACTOR))
|
2014-12-01 10:39:23 -05:00
|
|
|
|
|
|
|
self.useFixture(fixtures.NestedTempfile())
|
|
|
|
self.useFixture(fixtures.TempHomeDir())
|
|
|
|
self.useFixture(log_fixture.get_logging_handle_error_fixture())
|
|
|
|
|
2014-12-09 08:35:14 -05:00
|
|
|
self.useFixture(nova_fixtures.OutputStreamCapture())
|
|
|
|
|
|
|
|
self.useFixture(nova_fixtures.StandardLogging())
|
2014-12-01 10:39:23 -05:00
|
|
|
|
2014-10-28 09:33:49 -04:00
|
|
|
# NOTE(sdague): because of the way we were using the lock
|
|
|
|
# wrapper we eneded up with a lot of tests that started
|
|
|
|
# relying on global external locking being set up for them. We
|
|
|
|
# consider all of these to be *bugs*. Tests should not require
|
|
|
|
# global external locking, or if they do, they should
|
|
|
|
# explicitly set it up themselves.
|
|
|
|
#
|
|
|
|
# The following REQUIRES_LOCKING class parameter is provided
|
|
|
|
# as a bridge to get us there. No new tests should be added
|
|
|
|
# that require it, and existing classes and tests should be
|
|
|
|
# fixed to not need it.
|
|
|
|
if self.REQUIRES_LOCKING:
|
2014-10-24 14:18:30 -04:00
|
|
|
lock_path = self.useFixture(fixtures.TempDir()).path
|
|
|
|
self.fixture = self.useFixture(
|
|
|
|
config_fixture.Config(lockutils.CONF))
|
|
|
|
self.fixture.config(lock_path=lock_path,
|
|
|
|
group='oslo_concurrency')
|
2014-10-28 09:33:49 -04:00
|
|
|
|
2012-11-09 15:49:51 +01:00
|
|
|
self.useFixture(conf_fixture.ConfFixture(CONF))
|
2014-12-10 10:38:04 -05:00
|
|
|
self.useFixture(nova_fixtures.RPCFixture('nova.test'))
|
Port to oslo.messaging
The oslo.messaging library takes the existing RPC code from oslo and
wraps it in a sane API with well defined semantics around which we can
make a commitment to retain compatibility in future.
The patch is large, but the changes can be summarized as:
* oslo.messaging>=1.3.0a4 is required; a proper 1.3.0 release will be
pushed before the icehouse release candidates.
* The new rpc module has init() and cleanup() methods which manage the
global oslo.messaging transport state. The TRANSPORT and NOTIFIER
globals are conceptually similar to the current RPCIMPL global,
except we're free to create and use alternate Transport objects
in e.g. the cells code.
* The rpc.get_{client,server,notifier}() methods are just helpers
which wrap the global messaging state, specifiy serializers and
specify the use of the eventlet executor.
* In oslo.messaging, a request context is expected to be a dict so
we add a RequestContextSerializer which can serialize to and from
dicts using RequestContext.{to,from}_dict()
* The allowed_rpc_exception_modules configuration option is replaced
by an allowed_remote_exmods get_transport() parameter. This is not
something that users ever need to configure, but it is something
each project using oslo.messaging needs to be able to customize.
* The nova.rpcclient module is removed; it was only a helper class
to allow us split a lot of the more tedious changes out of this
patch.
* Finalizing the port from RpcProxy to RPCClient is straightforward.
We put the default topic, version and namespace into a Target and
contstruct the client using that.
* Porting endpoint classes (like ComputeManager) just involves setting
a target attribute on the class.
* The @client_exceptions() decorator has been renamed to
@expected_exceptions since it's used on the server side to designate
exceptions we expect the decorated method to raise.
* We maintain a global NOTIFIER object and create specializations of
it with specific publisher IDs in order to avoid notification driver
loading overhead.
* rpc.py contains transport aliases for backwards compatibility
purposes. setup.cfg also contains notification driver aliases for
backwards compat.
* The messaging options are moved about in nova.conf.sample because
the options are advertised via a oslo.config.opts entry point and
picked up by the generator.
* We use messaging.ConfFixture in tests to override oslo.messaging
config options, rather than making assumptions about the options
registered by the library.
The porting of cells code is particularly tricky:
* messaging.TransportURL parse() and str() replaces the
[un]parse_transport_url() methods. Note the complication that an
oslo.messaging transport URL can actually have multiple hosts in
order to support message broker clustering. Also the complication
of transport aliases in rpc.get_transport_url().
* proxy_rpc_to_manager() is fairly nasty. Right now, we're proxying
the on-the-wire message format over this call, but you can't supply
such messages to oslo.messaging's cast()/call() methods. Rather than
change the inter-cell RPC API to suit oslo.messaging, we instead
just unpack the topic, server, method and args from the message on
the remote side.
cells_api.RPCClientCellsProxy is a mock RPCClient implementation
which allows us to wrap up a RPC in the message format currently
used for inter-cell RPCs.
* Similarly, proxy_rpc_to_manager uses the on-the-wire format for
exception serialization, but this format is an implementation detail
of oslo.messaging's transport drivers. So, we need to duplicate the
exception serialization code in cells.messaging. We may find a way
to reconcile this in future - for example a ExceptionSerializer
class might work, but with the current format it might be difficult
for the deserializer to generically detect a serialized exception.
* CellsRPCDriver.start_servers() and InterCellRPCAPI._get_client()
need close review, but they're pretty straightforward ports of code
to listen on some specialized topics and connect to a remote cell
using its transport URL.
blueprint: oslo-messaging
Change-Id: Ib613e6300f2c215be90f924afbd223a3da053a69
2013-08-02 14:44:16 +01:00
|
|
|
|
2013-05-25 01:01:13 +00:00
|
|
|
if self.USES_DB:
|
2014-12-09 11:46:28 -05:00
|
|
|
self.useFixture(nova_fixtures.Database())
|
2015-12-09 11:54:26 -05:00
|
|
|
self.useFixture(nova_fixtures.Database(database='api'))
|
2016-03-31 10:57:14 -07:00
|
|
|
self.useFixture(nova_fixtures.DefaultFlavorsFixture())
|
2016-03-22 11:52:47 -07:00
|
|
|
elif not self.USES_DB_SELF:
|
|
|
|
self.useFixture(nova_fixtures.DatabasePoisonFixture())
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2015-01-08 14:39:45 -06:00
|
|
|
# NOTE(blk-u): WarningsFixture must be after the Database fixture
|
|
|
|
# because sqlalchemy-migrate messes with the warnings filters.
|
|
|
|
self.useFixture(nova_fixtures.WarningsFixture())
|
|
|
|
|
2013-05-28 18:45:33 -07:00
|
|
|
# NOTE(danms): Make sure to reset us back to non-remote objects
|
2013-06-13 09:35:10 -07:00
|
|
|
# for each test to avoid interactions. Also, backup the object
|
|
|
|
# registry.
|
2013-05-28 18:45:33 -07:00
|
|
|
objects_base.NovaObject.indirection_api = None
|
2013-06-13 09:35:10 -07:00
|
|
|
self._base_test_obj_backup = copy.copy(
|
2015-06-03 10:25:38 -07:00
|
|
|
objects_base.NovaObjectRegistry._registry._obj_classes)
|
2013-06-13 09:35:10 -07:00
|
|
|
self.addCleanup(self._restore_obj_registry)
|
2013-05-28 18:45:33 -07:00
|
|
|
|
2016-02-11 11:48:13 -08:00
|
|
|
self.useFixture(nova_fixtures.StableObjectJsonFixture())
|
|
|
|
|
2014-04-17 21:01:10 +00:00
|
|
|
# NOTE(mnaser): All calls to utils.is_neutron() are cached in
|
|
|
|
# nova.utils._IS_NEUTRON. We set it to None to avoid any
|
|
|
|
# caching of that value.
|
|
|
|
utils._IS_NEUTRON = None
|
|
|
|
|
2013-08-12 16:16:57 +02:00
|
|
|
mox_fixture = self.useFixture(moxstubout.MoxStubout())
|
2012-11-09 15:49:51 +01:00
|
|
|
self.mox = mox_fixture.mox
|
|
|
|
self.stubs = mox_fixture.stubs
|
|
|
|
self.addCleanup(self._clear_attrs)
|
|
|
|
self.useFixture(fixtures.EnvironmentVariable('http_proxy'))
|
2012-11-09 12:28:34 -05:00
|
|
|
self.policy = self.useFixture(policy_fixture.PolicyFixture())
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2015-02-13 07:07:49 -05:00
|
|
|
self.useFixture(nova_fixtures.PoisonFunctions())
|
|
|
|
|
2015-12-10 11:53:30 -08:00
|
|
|
openstack_driver.DRIVER_CACHE = {}
|
|
|
|
|
2016-02-03 16:53:04 +01:00
|
|
|
self.useFixture(nova_fixtures.ForbidNewLegacyNotificationFixture())
|
|
|
|
|
2013-06-13 09:35:10 -07:00
|
|
|
def _restore_obj_registry(self):
|
2015-06-03 10:25:38 -07:00
|
|
|
objects_base.NovaObjectRegistry._registry._obj_classes = \
|
|
|
|
self._base_test_obj_backup
|
2013-06-13 09:35:10 -07:00
|
|
|
|
2012-11-09 15:49:51 +01:00
|
|
|
def _clear_attrs(self):
|
|
|
|
# Delete attributes that don't start with _ so they don't pin
|
|
|
|
# memory around unnecessarily for the duration of the test
|
|
|
|
# suite
|
|
|
|
for key in [k for k in self.__dict__.keys() if k[0] != '_']:
|
2015-03-23 15:22:15 +09:00
|
|
|
# NOTE(gmann): Skip attribute 'id' because if tests are being
|
|
|
|
# generated using testscenarios then, 'id' attribute is being
|
|
|
|
# added during cloning the tests. And later that 'id' attribute
|
|
|
|
# is being used by test suite to generate the results for each
|
|
|
|
# newly generated tests by testscenarios.
|
|
|
|
if key != 'id':
|
|
|
|
del self.__dict__[key]
|
2012-10-01 23:00:19 +00:00
|
|
|
|
2015-12-17 11:25:37 -05:00
|
|
|
def stub_out(self, old, new):
|
|
|
|
"""Replace a function for the duration of the test.
|
|
|
|
|
|
|
|
Use the monkey patch fixture to replace a function for the
|
|
|
|
duration of a test. Useful when you want to provide fake
|
|
|
|
methods instead of mocks during testing.
|
|
|
|
|
|
|
|
This should be used instead of self.stubs.Set (which is based
|
|
|
|
on mox) going forward.
|
|
|
|
"""
|
|
|
|
self.useFixture(fixtures.MonkeyPatch(old, new))
|
|
|
|
|
2010-05-27 23:05:26 -07:00
|
|
|
def flags(self, **kw):
|
2011-04-20 12:08:22 -07:00
|
|
|
"""Override flag variables for a test."""
|
2012-11-15 20:12:52 +00:00
|
|
|
group = kw.pop('group', None)
|
2015-05-14 01:26:33 +02:00
|
|
|
for k, v in six.iteritems(kw):
|
2012-11-15 20:12:52 +00:00
|
|
|
CONF.set_override(k, v, group)
|
2010-05-27 23:05:26 -07:00
|
|
|
|
2011-02-21 14:55:06 -08:00
|
|
|
def start_service(self, name, host=None, **kwargs):
|
2014-12-09 08:47:55 -05:00
|
|
|
svc = self.useFixture(
|
|
|
|
nova_fixtures.ServiceFixture(name, host, **kwargs))
|
2012-11-09 15:49:51 +01:00
|
|
|
return svc.service
|
2012-12-13 18:21:50 -08:00
|
|
|
|
2015-03-13 10:59:51 -04:00
|
|
|
def assertJsonEqual(self, expected, observed):
|
2015-07-30 07:03:14 -04:00
|
|
|
"""Asserts that 2 complex data structures are json equivalent.
|
|
|
|
|
|
|
|
We use data structures which serialize down to json throughout
|
|
|
|
the code, and often times we just need to know that these are
|
|
|
|
json equivalent. This means that list order is not important,
|
|
|
|
and should be sorted.
|
|
|
|
|
|
|
|
Because this is a recursive set of assertions, when failure
|
|
|
|
happens we want to expose both the local failure and the
|
|
|
|
global view of the 2 data structures being compared. So a
|
|
|
|
MismatchError which includes the inner failure as the
|
|
|
|
mismatch, and the passed in expected / observed as matchee /
|
|
|
|
matcher.
|
|
|
|
|
|
|
|
"""
|
2015-03-13 10:59:51 -04:00
|
|
|
if isinstance(expected, six.string_types):
|
|
|
|
expected = jsonutils.loads(expected)
|
|
|
|
if isinstance(observed, six.string_types):
|
|
|
|
observed = jsonutils.loads(observed)
|
|
|
|
|
2015-07-02 15:05:12 +02:00
|
|
|
def sort_key(x):
|
2015-06-03 07:55:04 -04:00
|
|
|
if isinstance(x, (set, list)) or isinstance(x, datetime.datetime):
|
2015-07-02 15:05:12 +02:00
|
|
|
return str(x)
|
|
|
|
if isinstance(x, dict):
|
|
|
|
items = ((sort_key(key), sort_key(value))
|
|
|
|
for key, value in x.items())
|
|
|
|
return sorted(items)
|
|
|
|
return x
|
2015-03-13 10:59:51 -04:00
|
|
|
|
|
|
|
def inner(expected, observed):
|
|
|
|
if isinstance(expected, dict) and isinstance(observed, dict):
|
|
|
|
self.assertEqual(len(expected), len(observed))
|
2015-05-21 14:51:18 -07:00
|
|
|
expected_keys = sorted(expected)
|
2015-06-02 17:53:35 -04:00
|
|
|
observed_keys = sorted(observed)
|
2015-03-13 10:59:51 -04:00
|
|
|
self.assertEqual(expected_keys, observed_keys)
|
|
|
|
|
2015-06-02 17:53:35 -04:00
|
|
|
for key in list(six.iterkeys(expected)):
|
|
|
|
inner(expected[key], observed[key])
|
2015-03-13 10:59:51 -04:00
|
|
|
elif (isinstance(expected, (list, tuple, set)) and
|
|
|
|
isinstance(observed, (list, tuple, set))):
|
|
|
|
self.assertEqual(len(expected), len(observed))
|
|
|
|
|
2015-07-02 15:05:12 +02:00
|
|
|
expected_values_iter = iter(sorted(expected, key=sort_key))
|
|
|
|
observed_values_iter = iter(sorted(observed, key=sort_key))
|
2015-03-13 10:59:51 -04:00
|
|
|
|
|
|
|
for i in range(len(expected)):
|
2015-05-04 17:21:11 +02:00
|
|
|
inner(next(expected_values_iter),
|
|
|
|
next(observed_values_iter))
|
2015-03-13 10:59:51 -04:00
|
|
|
else:
|
|
|
|
self.assertEqual(expected, observed)
|
|
|
|
|
2015-07-30 07:03:14 -04:00
|
|
|
try:
|
|
|
|
inner(expected, observed)
|
|
|
|
except testtools.matchers.MismatchError as e:
|
|
|
|
inner_mismatch = e.mismatch
|
|
|
|
# inverting the observed / expected because testtools
|
|
|
|
# error messages assume expected is second. Possibly makes
|
|
|
|
# reading the error messages less confusing.
|
|
|
|
raise testtools.matchers.MismatchError(observed, expected,
|
|
|
|
inner_mismatch, verbose=True)
|
2015-03-13 10:59:51 -04:00
|
|
|
|
2014-09-15 16:31:26 +01:00
|
|
|
def assertPublicAPISignatures(self, baseinst, inst):
|
|
|
|
def get_public_apis(inst):
|
|
|
|
methods = {}
|
2015-02-13 11:11:25 +00:00
|
|
|
|
|
|
|
def findmethods(object):
|
|
|
|
return inspect.ismethod(object) or inspect.isfunction(object)
|
|
|
|
|
|
|
|
for (name, value) in inspect.getmembers(inst, findmethods):
|
2014-09-15 16:31:26 +01:00
|
|
|
if name.startswith("_"):
|
|
|
|
continue
|
|
|
|
methods[name] = value
|
|
|
|
return methods
|
|
|
|
|
|
|
|
baseclass = baseinst.__class__.__name__
|
|
|
|
basemethods = get_public_apis(baseinst)
|
|
|
|
implmethods = get_public_apis(inst)
|
|
|
|
|
|
|
|
extranames = []
|
|
|
|
for name in sorted(implmethods.keys()):
|
|
|
|
if name not in basemethods:
|
|
|
|
extranames.append(name)
|
|
|
|
|
|
|
|
self.assertEqual([], extranames,
|
|
|
|
"public APIs not listed in base class %s" %
|
|
|
|
baseclass)
|
|
|
|
|
|
|
|
for name in sorted(implmethods.keys()):
|
|
|
|
baseargs = inspect.getargspec(basemethods[name])
|
|
|
|
implargs = inspect.getargspec(implmethods[name])
|
|
|
|
|
|
|
|
self.assertEqual(baseargs, implargs,
|
|
|
|
"%s args don't match base class %s" %
|
|
|
|
(name, baseclass))
|
|
|
|
|
2012-12-13 18:21:50 -08:00
|
|
|
|
|
|
|
class APICoverage(object):
|
|
|
|
|
|
|
|
cover_api = None
|
|
|
|
|
|
|
|
def test_api_methods(self):
|
|
|
|
self.assertTrue(self.cover_api is not None)
|
|
|
|
api_methods = [x for x in dir(self.cover_api)
|
|
|
|
if not x.startswith('_')]
|
|
|
|
test_methods = [x[5:] for x in dir(self)
|
|
|
|
if x.startswith('test_')]
|
|
|
|
self.assertThat(
|
|
|
|
test_methods,
|
|
|
|
testtools.matchers.ContainsAll(api_methods))
|
2012-12-18 09:26:24 +13:00
|
|
|
|
|
|
|
|
|
|
|
class TimeOverride(fixtures.Fixture):
|
|
|
|
"""Fixture to start and remove time override."""
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
super(TimeOverride, self).setUp()
|
|
|
|
timeutils.set_time_override()
|
|
|
|
self.addCleanup(timeutils.clear_time_override)
|
2013-05-25 01:01:13 +00:00
|
|
|
|
|
|
|
|
|
|
|
class NoDBTestCase(TestCase):
|
2014-02-24 10:41:36 +04:00
|
|
|
"""`NoDBTestCase` differs from TestCase in that DB access is not supported.
|
2013-05-25 01:01:13 +00:00
|
|
|
This makes tests run significantly faster. If possible, all new tests
|
|
|
|
should derive from this class.
|
|
|
|
"""
|
|
|
|
USES_DB = False
|
2014-05-22 11:08:47 -04:00
|
|
|
|
|
|
|
|
|
|
|
class BaseHookTestCase(NoDBTestCase):
|
|
|
|
def assert_has_hook(self, expected_name, func):
|
|
|
|
self.assertTrue(hasattr(func, '__hook_name__'))
|
|
|
|
self.assertEqual(expected_name, func.__hook_name__)
|
test: add MatchType helper class as equivalent of mox.IsA
The mox test library had a helper method mox.IsA(SomeType) which
allowed unit tests to assert that a method call argument had a
particular type. The mock test library has a mock.ANY helper
which is often used, but nothing that allows a stricter checker
for a specific type. This patch introduces a MatchType class
which is similar to mock.ANY, but restricted to a single type.
So instead of using lots of mock.ANY parameters
mock_some_method.assert_called_once_with(
"hello",
mock.ANY,
mock.ANY,
"world",
mock.ANY)
It becomes possible to be stricter
mock_some_method.assert_called_once_with(
"hello",
MatchType(objects.Instance),
mock.ANY,
"world",
MatchType(objects.KeyPair))
Change-Id: I3a1ca33500ef8007b6c496e4e0917d7de07ac40a
2015-06-10 12:48:15 +01:00
|
|
|
|
|
|
|
|
|
|
|
class MatchType(object):
|
|
|
|
"""Matches any instance of a specified type
|
|
|
|
|
|
|
|
The MatchType class is a helper for use with the
|
|
|
|
mock.assert_called_with() method that lets you
|
|
|
|
assert that a particular parameter has a specific
|
|
|
|
data type. It enables strict check than the built
|
|
|
|
in mock.ANY helper, and is the equivalent of the
|
|
|
|
mox.IsA() function from the legacy mox library
|
|
|
|
|
|
|
|
Example usage could be:
|
|
|
|
|
|
|
|
mock_some_method.assert_called_once_with(
|
|
|
|
"hello",
|
|
|
|
MatchType(objects.Instance),
|
|
|
|
mock.ANY,
|
|
|
|
"world",
|
|
|
|
MatchType(objects.KeyPair))
|
|
|
|
"""
|
|
|
|
def __init__(self, wanttype):
|
|
|
|
self.wanttype = wanttype
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
return type(other) == self.wanttype
|
|
|
|
|
|
|
|
def __ne__(self, other):
|
|
|
|
return type(other) != self.wanttype
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "<MatchType:" + str(self.wanttype) + ">"
|
2016-06-17 16:10:51 +07:00
|
|
|
|
|
|
|
|
|
|
|
class ContainKeyValue(object):
|
|
|
|
"""Checks whether a key/value pair is in a dict parameter.
|
|
|
|
|
|
|
|
The ContainKeyValue class is a helper for use with the
|
|
|
|
mock.assert_*() method that lets you assert that a particular
|
|
|
|
dict contain a key/value paire. It enables strict check than
|
|
|
|
the built in mock.ANY helper, and is the equivalent of the
|
|
|
|
mox.ContainsKeyValue() function from the legacy mox library
|
|
|
|
|
|
|
|
Example usage could be:
|
|
|
|
|
|
|
|
mock_some_method.assert_called_once_with(
|
|
|
|
"hello",
|
|
|
|
ContainKeyValue('foo', bar),
|
|
|
|
mock.ANY,
|
|
|
|
"world",
|
|
|
|
ContainKeyValue('hello', world))
|
|
|
|
"""
|
|
|
|
def __init__(self, wantkey, wantvalue):
|
|
|
|
self.wantkey = wantkey
|
|
|
|
self.wantvalue = wantvalue
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
try:
|
|
|
|
return other[self.wantkey] == self.wantvalue
|
|
|
|
except (KeyError, TypeError):
|
|
|
|
return False
|
|
|
|
|
|
|
|
def __ne__(self, other):
|
|
|
|
try:
|
|
|
|
return other[self.wantkey] != self.wantvalue
|
|
|
|
except (KeyError, TypeError):
|
|
|
|
return True
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "<ContainKeyValue: key " + str(self.wantkey) + \
|
|
|
|
" and value " + str(self.wantvalue) + ">"
|