Stop using six library
Since we've dropped support for Python 2.7, it's time to look at the bright future that Python 3.x will bring and stop forcing compatibility with older versions. This patch removes the six library from requirements, not looking back. Change-Id: Ic443c7e4d5a5a849c4dc220207f8957e4c90bf53
This commit is contained in:
parent
b2c0c52a95
commit
9b1450398c
@ -12,10 +12,11 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
"""Generic Rest Api tools."""
|
"""Generic Rest Api tools."""
|
||||||
|
|
||||||
|
import functools
|
||||||
|
|
||||||
import flask
|
import flask
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
import six
|
|
||||||
|
|
||||||
from ironic_inspector.common.i18n import _
|
from ironic_inspector.common.i18n import _
|
||||||
from ironic_inspector import utils
|
from ironic_inspector import utils
|
||||||
@ -29,7 +30,7 @@ def raises_coercion_exceptions(fn):
|
|||||||
:raises: utils.Error when the coercion function raises an
|
:raises: utils.Error when the coercion function raises an
|
||||||
AssertionError or a ValueError
|
AssertionError or a ValueError
|
||||||
"""
|
"""
|
||||||
@six.wraps(fn)
|
@functools.wraps(fn)
|
||||||
def inner(*args, **kwargs):
|
def inner(*args, **kwargs):
|
||||||
try:
|
try:
|
||||||
ret = fn(*args, **kwargs)
|
ret = fn(*args, **kwargs)
|
||||||
@ -46,7 +47,7 @@ def request_field(field_name):
|
|||||||
:returns: a decorator
|
:returns: a decorator
|
||||||
"""
|
"""
|
||||||
def outer(fn):
|
def outer(fn):
|
||||||
@six.wraps(fn)
|
@functools.wraps(fn)
|
||||||
def inner(*args, **kwargs):
|
def inner(*args, **kwargs):
|
||||||
default = kwargs.pop('default', None)
|
default = kwargs.pop('default', None)
|
||||||
field = flask.request.args.get(field_name, default=default)
|
field = flask.request.args.get(field_name, default=default)
|
||||||
|
@ -21,7 +21,6 @@ from alembic import config as alembic_config
|
|||||||
from alembic import util as alembic_util
|
from alembic import util as alembic_util
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
import six
|
|
||||||
|
|
||||||
from ironic_inspector import conf # noqa
|
from ironic_inspector import conf # noqa
|
||||||
|
|
||||||
@ -78,7 +77,7 @@ def do_alembic_command(config, cmd, *args, **kwargs):
|
|||||||
try:
|
try:
|
||||||
getattr(alembic_command, cmd)(config, *args, **kwargs)
|
getattr(alembic_command, cmd)(config, *args, **kwargs)
|
||||||
except alembic_util.CommandError as e:
|
except alembic_util.CommandError as e:
|
||||||
alembic_util.err(six.text_type(e))
|
alembic_util.err(str(e))
|
||||||
|
|
||||||
|
|
||||||
def main(args=sys.argv[1:]):
|
def main(args=sys.argv[1:]):
|
||||||
|
@ -19,7 +19,6 @@ import sys
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
from oslo_utils import encodeutils
|
from oslo_utils import encodeutils
|
||||||
import six
|
|
||||||
|
|
||||||
from ironic_inspector.common.i18n import _
|
from ironic_inspector.common.i18n import _
|
||||||
from ironic_inspector.conf import opts
|
from ironic_inspector.conf import opts
|
||||||
@ -120,7 +119,7 @@ def main():
|
|||||||
print(_("... terminating migration tool"), file=sys.stderr)
|
print(_("... terminating migration tool"), file=sys.stderr)
|
||||||
return 130
|
return 130
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(encodeutils.safe_encode(six.text_type(e)), file=sys.stderr)
|
print(encodeutils.safe_encode(str(e)), file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
@ -15,7 +15,6 @@ import abc
|
|||||||
|
|
||||||
from oslo_concurrency import lockutils
|
from oslo_concurrency import lockutils
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
import six
|
|
||||||
|
|
||||||
from ironic_inspector.common import coordination
|
from ironic_inspector.common import coordination
|
||||||
|
|
||||||
@ -24,8 +23,7 @@ _LOCK_TEMPLATE = 'node-%s'
|
|||||||
_SEMAPHORES = lockutils.Semaphores()
|
_SEMAPHORES = lockutils.Semaphores()
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class BaseLock(object, metaclass=abc.ABCMeta):
|
||||||
class BaseLock(object):
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def acquire(self, blocking=True):
|
def acquire(self, blocking=True):
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import functools
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
@ -18,7 +19,6 @@ import re
|
|||||||
import flask
|
import flask
|
||||||
from oslo_utils import strutils
|
from oslo_utils import strutils
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
import six
|
|
||||||
|
|
||||||
from ironic_inspector import api_tools
|
from ironic_inspector import api_tools
|
||||||
from ironic_inspector.common import context
|
from ironic_inspector.common import context
|
||||||
@ -124,7 +124,7 @@ def error_response(exc, code=500):
|
|||||||
|
|
||||||
|
|
||||||
def convert_exceptions(func):
|
def convert_exceptions(func):
|
||||||
@six.wraps(func)
|
@functools.wraps(func)
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
try:
|
try:
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
@ -229,7 +229,7 @@ def api(path, is_public_api=False, rule=None, verb_to_rule_map=None,
|
|||||||
def outer(func):
|
def outer(func):
|
||||||
@_app.route(path, **flask_kwargs)
|
@_app.route(path, **flask_kwargs)
|
||||||
@convert_exceptions
|
@convert_exceptions
|
||||||
@six.wraps(func)
|
@functools.wraps(func)
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
flask.request.context = context.RequestContext.from_environ(
|
flask.request.context = context.RequestContext.from_environ(
|
||||||
flask.request.environ,
|
flask.request.environ,
|
||||||
|
@ -17,6 +17,7 @@ import collections
|
|||||||
import contextlib
|
import contextlib
|
||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
|
import functools
|
||||||
import json
|
import json
|
||||||
import operator
|
import operator
|
||||||
|
|
||||||
@ -28,7 +29,6 @@ from oslo_utils import excutils
|
|||||||
from oslo_utils import reflection
|
from oslo_utils import reflection
|
||||||
from oslo_utils import timeutils
|
from oslo_utils import timeutils
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
import six
|
|
||||||
from sqlalchemy.orm import exc as orm_errors
|
from sqlalchemy.orm import exc as orm_errors
|
||||||
|
|
||||||
from ironic_inspector.common.i18n import _
|
from ironic_inspector.common.i18n import _
|
||||||
@ -418,7 +418,7 @@ class NodeInfo(object):
|
|||||||
"""
|
"""
|
||||||
ironic = ironic or self.ironic
|
ironic = ironic or self.ironic
|
||||||
ports = self.ports()
|
ports = self.ports()
|
||||||
if isinstance(port, six.string_types):
|
if isinstance(port, str):
|
||||||
port = ports[port]
|
port = ports[port]
|
||||||
|
|
||||||
LOG.debug('Updating port %(mac)s with patches %(patches)s',
|
LOG.debug('Updating port %(mac)s with patches %(patches)s',
|
||||||
@ -481,7 +481,7 @@ class NodeInfo(object):
|
|||||||
"""
|
"""
|
||||||
ironic = ironic or self.ironic
|
ironic = ironic or self.ironic
|
||||||
ports = self.ports()
|
ports = self.ports()
|
||||||
if isinstance(port, six.string_types):
|
if isinstance(port, str):
|
||||||
port = ports[port]
|
port = ports[port]
|
||||||
|
|
||||||
ironic.port.delete(port.uuid)
|
ironic.port.delete(port.uuid)
|
||||||
@ -545,7 +545,7 @@ def triggers_fsm_error_transition(errors=(Exception,),
|
|||||||
error event.
|
error event.
|
||||||
"""
|
"""
|
||||||
def outer(func):
|
def outer(func):
|
||||||
@six.wraps(func)
|
@functools.wraps(func)
|
||||||
def inner(node_info, *args, **kwargs):
|
def inner(node_info, *args, **kwargs):
|
||||||
ret = None
|
ret = None
|
||||||
try:
|
try:
|
||||||
@ -581,7 +581,7 @@ def fsm_event_before(event, strict=False):
|
|||||||
:param strict: make an invalid fsm event trigger an error event
|
:param strict: make an invalid fsm event trigger an error event
|
||||||
"""
|
"""
|
||||||
def outer(func):
|
def outer(func):
|
||||||
@six.wraps(func)
|
@functools.wraps(func)
|
||||||
def inner(node_info, *args, **kwargs):
|
def inner(node_info, *args, **kwargs):
|
||||||
LOG.debug('Processing event %(event)s before calling '
|
LOG.debug('Processing event %(event)s before calling '
|
||||||
'%(func)s', {'event': event, 'func': func},
|
'%(func)s', {'event': event, 'func': func},
|
||||||
@ -602,7 +602,7 @@ def fsm_event_after(event, strict=False):
|
|||||||
:param strict: make an invalid fsm event trigger an error event
|
:param strict: make an invalid fsm event trigger an error event
|
||||||
"""
|
"""
|
||||||
def outer(func):
|
def outer(func):
|
||||||
@six.wraps(func)
|
@functools.wraps(func)
|
||||||
def inner(node_info, *args, **kwargs):
|
def inner(node_info, *args, **kwargs):
|
||||||
ret = func(node_info, *args, **kwargs)
|
ret = func(node_info, *args, **kwargs)
|
||||||
LOG.debug('Processing event %(event)s after calling '
|
LOG.debug('Processing event %(event)s after calling '
|
||||||
@ -641,7 +641,7 @@ def release_lock(func):
|
|||||||
instance.
|
instance.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@six.wraps(func)
|
@functools.wraps(func)
|
||||||
def inner(node_info, *args, **kwargs):
|
def inner(node_info, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
return func(node_info, *args, **kwargs)
|
return func(node_info, *args, **kwargs)
|
||||||
@ -811,7 +811,7 @@ def find_node(**attributes):
|
|||||||
query = db.model_query(db.Attribute.node_uuid)
|
query = db.model_query(db.Attribute.node_uuid)
|
||||||
pairs = [(db.Attribute.name == name) &
|
pairs = [(db.Attribute.name == name) &
|
||||||
(db.Attribute.value == v) for v in value]
|
(db.Attribute.value == v) for v in value]
|
||||||
query = query.filter(six.moves.reduce(operator.or_, pairs))
|
query = query.filter(functools.reduce(operator.or_, pairs))
|
||||||
found.update(row.node_uuid for row in query.distinct().all())
|
found.update(row.node_uuid for row in query.distinct().all())
|
||||||
|
|
||||||
if not found:
|
if not found:
|
||||||
|
@ -17,7 +17,6 @@ import abc
|
|||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
import six
|
|
||||||
import stevedore
|
import stevedore
|
||||||
|
|
||||||
from ironic_inspector.common.i18n import _
|
from ironic_inspector.common.i18n import _
|
||||||
@ -27,8 +26,7 @@ CONF = cfg.CONF
|
|||||||
LOG = log.getLogger(__name__)
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class ProcessingHook(object, metaclass=abc.ABCMeta): # pragma: no cover
|
||||||
class ProcessingHook(object): # pragma: no cover
|
|
||||||
"""Abstract base class for introspection data processing hooks."""
|
"""Abstract base class for introspection data processing hooks."""
|
||||||
|
|
||||||
dependencies = []
|
dependencies = []
|
||||||
@ -97,8 +95,7 @@ class WithValidation(object):
|
|||||||
raise ValueError('; '.join(msg))
|
raise ValueError('; '.join(msg))
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class RuleConditionPlugin(WithValidation, metaclass=abc.ABCMeta): # pragma: no cover # noqa
|
||||||
class RuleConditionPlugin(WithValidation): # pragma: no cover
|
|
||||||
"""Abstract base class for rule condition plugins."""
|
"""Abstract base class for rule condition plugins."""
|
||||||
|
|
||||||
REQUIRED_PARAMS = {'value'}
|
REQUIRED_PARAMS = {'value'}
|
||||||
@ -120,8 +117,7 @@ class RuleConditionPlugin(WithValidation): # pragma: no cover
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class RuleActionPlugin(WithValidation, metaclass=abc.ABCMeta): # pragma: no cover # noqa
|
||||||
class RuleActionPlugin(WithValidation): # pragma: no cover
|
|
||||||
"""Abstract base class for rule action plugins."""
|
"""Abstract base class for rule action plugins."""
|
||||||
|
|
||||||
FORMATTED_PARAMS = []
|
FORMATTED_PARAMS = []
|
||||||
|
@ -18,7 +18,6 @@ import json
|
|||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_utils import excutils
|
from oslo_utils import excutils
|
||||||
import six
|
|
||||||
|
|
||||||
from ironic_inspector.common import swift
|
from ironic_inspector.common import swift
|
||||||
from ironic_inspector import node_cache
|
from ironic_inspector import node_cache
|
||||||
@ -38,8 +37,7 @@ def _filter_data_excluded_keys(data):
|
|||||||
if k not in _STORAGE_EXCLUDED_KEYS}
|
if k not in _STORAGE_EXCLUDED_KEYS}
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class BaseStorageBackend(object, metaclass=abc.ABCMeta):
|
||||||
class BaseStorageBackend(object):
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def get(self, node_uuid, processed=True, get_json=False):
|
def get(self, node_uuid, processed=True, get_json=False):
|
||||||
|
@ -18,7 +18,6 @@ import netaddr
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_utils import netutils
|
from oslo_utils import netutils
|
||||||
from oslo_utils import units
|
from oslo_utils import units
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
from ironic_inspector.common.i18n import _
|
from ironic_inspector.common.i18n import _
|
||||||
@ -104,7 +103,7 @@ class SchedulerHook(base.ProcessingHook):
|
|||||||
node_info=node_info)
|
node_info=node_info)
|
||||||
try:
|
try:
|
||||||
introspection_data['cpus'] = int(inventory['cpu']['count'])
|
introspection_data['cpus'] = int(inventory['cpu']['count'])
|
||||||
introspection_data['cpu_arch'] = six.text_type(
|
introspection_data['cpu_arch'] = str(
|
||||||
inventory['cpu']['architecture'])
|
inventory['cpu']['architecture'])
|
||||||
except (KeyError, ValueError, TypeError):
|
except (KeyError, ValueError, TypeError):
|
||||||
LOG.warning('malformed or missing CPU information: %s',
|
LOG.warning('malformed or missing CPU information: %s',
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
"""Base code for PXE boot filtering."""
|
"""Base code for PXE boot filtering."""
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
|
import functools
|
||||||
|
|
||||||
from automaton import exceptions as automaton_errors
|
from automaton import exceptions as automaton_errors
|
||||||
from automaton import machines
|
from automaton import machines
|
||||||
@ -22,7 +23,6 @@ from futurist import periodics
|
|||||||
from oslo_concurrency import lockutils
|
from oslo_concurrency import lockutils
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
import six
|
|
||||||
import stevedore
|
import stevedore
|
||||||
|
|
||||||
from ironic_inspector.common.i18n import _
|
from ironic_inspector.common.i18n import _
|
||||||
@ -74,7 +74,7 @@ State_space = [
|
|||||||
def locked_driver_event(event):
|
def locked_driver_event(event):
|
||||||
"""Call driver method having processed the fsm event."""
|
"""Call driver method having processed the fsm event."""
|
||||||
def outer(method):
|
def outer(method):
|
||||||
@six.wraps(method)
|
@functools.wraps(method)
|
||||||
def inner(self, *args, **kwargs):
|
def inner(self, *args, **kwargs):
|
||||||
with self.lock, self.fsm_reset_on_error() as fsm:
|
with self.lock, self.fsm_reset_on_error() as fsm:
|
||||||
fsm.process_event(event)
|
fsm.process_event(event)
|
||||||
|
@ -15,11 +15,8 @@
|
|||||||
|
|
||||||
import abc
|
import abc
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
class FilterDriver(object, metaclass=abc.ABCMeta):
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class FilterDriver(object):
|
|
||||||
"""The PXE boot filtering interface."""
|
"""The PXE boot filtering interface."""
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
|
@ -19,7 +19,6 @@ import jsonschema
|
|||||||
from oslo_db import exception as db_exc
|
from oslo_db import exception as db_exc
|
||||||
from oslo_utils import timeutils
|
from oslo_utils import timeutils
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
import six
|
|
||||||
from sqlalchemy import orm
|
from sqlalchemy import orm
|
||||||
|
|
||||||
from ironic_inspector.common.i18n import _
|
from ironic_inspector.common.i18n import _
|
||||||
@ -225,7 +224,7 @@ def _format_value(value, data):
|
|||||||
:param data: Introspection data.
|
:param data: Introspection data.
|
||||||
:returns: `value`, formatted with the parameters from `data`.
|
:returns: `value`, formatted with the parameters from `data`.
|
||||||
"""
|
"""
|
||||||
if isinstance(value, six.string_types):
|
if isinstance(value, str):
|
||||||
# NOTE(aarefiev): verify provided value with introspection
|
# NOTE(aarefiev): verify provided value with introspection
|
||||||
# data format specifications.
|
# data format specifications.
|
||||||
# TODO(aarefiev): simple verify on import rule time.
|
# TODO(aarefiev): simple verify on import rule time.
|
||||||
@ -237,7 +236,7 @@ def _format_value(value, data):
|
|||||||
{'val': value, 'e': e}, data=data)
|
{'val': value, 'e': e}, data=data)
|
||||||
elif isinstance(value, dict):
|
elif isinstance(value, dict):
|
||||||
return {_format_value(k, data): _format_value(v, data)
|
return {_format_value(k, data): _format_value(v, data)
|
||||||
for k, v in six.iteritems(value)}
|
for k, v in value.items()}
|
||||||
elif isinstance(value, list):
|
elif isinstance(value, list):
|
||||||
return [_format_value(v, data) for v in value]
|
return [_format_value(v, data) for v in value]
|
||||||
else:
|
else:
|
||||||
|
@ -11,18 +11,19 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import eventlet
|
import contextlib
|
||||||
|
|
||||||
import contextlib # noqa
|
|
||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
|
import functools
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
|
import urllib
|
||||||
|
|
||||||
|
import eventlet
|
||||||
import fixtures
|
import fixtures
|
||||||
import mock
|
import mock
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
@ -31,8 +32,6 @@ from oslo_utils import timeutils
|
|||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
import pytz
|
import pytz
|
||||||
import requests
|
import requests
|
||||||
import six
|
|
||||||
from six.moves import urllib
|
|
||||||
|
|
||||||
from ironic_inspector.cmd import all as inspector_cmd
|
from ironic_inspector.cmd import all as inspector_cmd
|
||||||
from ironic_inspector.cmd import dbsync
|
from ironic_inspector.cmd import dbsync
|
||||||
@ -88,7 +87,7 @@ def get_error(response):
|
|||||||
|
|
||||||
def _query_string(*field_names):
|
def _query_string(*field_names):
|
||||||
def outer(func):
|
def outer(func):
|
||||||
@six.wraps(func)
|
@functools.wraps(func)
|
||||||
def inner(*args, **kwargs):
|
def inner(*args, **kwargs):
|
||||||
queries = []
|
queries = []
|
||||||
for field_name in field_names:
|
for field_name in field_names:
|
||||||
|
@ -11,11 +11,12 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import functools
|
||||||
|
|
||||||
import flask
|
import flask
|
||||||
import mock
|
import mock
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
import six
|
|
||||||
|
|
||||||
from ironic_inspector import api_tools
|
from ironic_inspector import api_tools
|
||||||
import ironic_inspector.test.base as test_base
|
import ironic_inspector.test.base as test_base
|
||||||
@ -28,8 +29,9 @@ app.testing = True
|
|||||||
|
|
||||||
def mock_test_field(return_value=None, side_effect=None):
|
def mock_test_field(return_value=None, side_effect=None):
|
||||||
"""Mock flask.request.args.get"""
|
"""Mock flask.request.args.get"""
|
||||||
|
|
||||||
def outer(func):
|
def outer(func):
|
||||||
@six.wraps(func)
|
@functools.wraps(func)
|
||||||
def inner(self, *args, **kwargs):
|
def inner(self, *args, **kwargs):
|
||||||
with app.test_request_context('/'):
|
with app.test_request_context('/'):
|
||||||
get_mock = flask.request.args.get = mock.Mock()
|
get_mock = flask.request.args.get = mock.Mock()
|
||||||
@ -37,7 +39,9 @@ def mock_test_field(return_value=None, side_effect=None):
|
|||||||
get_mock.side_effect = side_effect
|
get_mock.side_effect = side_effect
|
||||||
ret = func(self, get_mock, *args, **kwargs)
|
ret = func(self, get_mock, *args, **kwargs)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
||||||
return outer
|
return outer
|
||||||
|
|
||||||
|
|
||||||
@ -46,6 +50,7 @@ class RaisesCoercionExceptionTestCase(test_base.BaseTest):
|
|||||||
@api_tools.raises_coercion_exceptions
|
@api_tools.raises_coercion_exceptions
|
||||||
def fn():
|
def fn():
|
||||||
return True
|
return True
|
||||||
|
|
||||||
self.assertIs(True, fn())
|
self.assertIs(True, fn())
|
||||||
|
|
||||||
def test_assertion_error(self):
|
def test_assertion_error(self):
|
||||||
@ -53,14 +58,14 @@ class RaisesCoercionExceptionTestCase(test_base.BaseTest):
|
|||||||
def fn():
|
def fn():
|
||||||
assert False, 'Oops!'
|
assert False, 'Oops!'
|
||||||
|
|
||||||
six.assertRaisesRegex(self, utils.Error, 'Bad request: Oops!', fn)
|
self.assertRaisesRegex(utils.Error, 'Bad request: Oops!', fn)
|
||||||
|
|
||||||
def test_value_error(self):
|
def test_value_error(self):
|
||||||
@api_tools.raises_coercion_exceptions
|
@api_tools.raises_coercion_exceptions
|
||||||
def fn():
|
def fn():
|
||||||
raise ValueError('Oops!')
|
raise ValueError('Oops!')
|
||||||
|
|
||||||
six.assertRaisesRegex(self, utils.Error, 'Bad request: Oops!', fn)
|
self.assertRaisesRegex(utils.Error, 'Bad request: Oops!', fn)
|
||||||
|
|
||||||
|
|
||||||
class RequestFieldTestCase(test_base.BaseTest):
|
class RequestFieldTestCase(test_base.BaseTest):
|
||||||
@ -102,8 +107,9 @@ class MarkerFieldTestCase(test_base.BaseTest):
|
|||||||
@mock_test_field(return_value='foo')
|
@mock_test_field(return_value='foo')
|
||||||
def test_marker_check_fails(self, get_mock, like_mock):
|
def test_marker_check_fails(self, get_mock, like_mock):
|
||||||
like_mock.return_value = False
|
like_mock.return_value = False
|
||||||
six.assertRaisesRegex(self, utils.Error, '.*(Marker not UUID-like)',
|
self.assertRaisesRegex(utils.Error,
|
||||||
api_tools.marker_field)
|
'.*(Marker not UUID-like)',
|
||||||
|
api_tools.marker_field)
|
||||||
like_mock.assert_called_once_with(get_mock.return_value)
|
like_mock.assert_called_once_with(get_mock.return_value)
|
||||||
|
|
||||||
|
|
||||||
@ -115,9 +121,9 @@ class LimitFieldTestCase(test_base.BaseTest):
|
|||||||
|
|
||||||
@mock_test_field(return_value=str(CONF.api_max_limit + 1))
|
@mock_test_field(return_value=str(CONF.api_max_limit + 1))
|
||||||
def test_limit_over(self, get_mock):
|
def test_limit_over(self, get_mock):
|
||||||
six.assertRaisesRegex(self, utils.Error,
|
self.assertRaisesRegex(utils.Error,
|
||||||
'.*(Limit over %s)' % CONF.api_max_limit,
|
'.*(Limit over %s)' % CONF.api_max_limit,
|
||||||
api_tools.limit_field)
|
api_tools.limit_field)
|
||||||
|
|
||||||
@mock_test_field(return_value='0')
|
@mock_test_field(return_value='0')
|
||||||
def test_limit_zero(self, get_mock):
|
def test_limit_zero(self, get_mock):
|
||||||
@ -126,11 +132,11 @@ class LimitFieldTestCase(test_base.BaseTest):
|
|||||||
|
|
||||||
@mock_test_field(return_value='-1')
|
@mock_test_field(return_value='-1')
|
||||||
def test_limit_negative(self, get_mock):
|
def test_limit_negative(self, get_mock):
|
||||||
six.assertRaisesRegex(self, utils.Error,
|
self.assertRaisesRegex(utils.Error,
|
||||||
'.*(Limit cannot be negative)',
|
'.*(Limit cannot be negative)',
|
||||||
api_tools.limit_field)
|
api_tools.limit_field)
|
||||||
|
|
||||||
@mock_test_field(return_value='foo')
|
@mock_test_field(return_value='foo')
|
||||||
def test_limit_invalid_value(self, get_mock):
|
def test_limit_invalid_value(self, get_mock):
|
||||||
six.assertRaisesRegex(self, utils.Error, 'Bad request',
|
self.assertRaisesRegex(utils.Error, 'Bad request',
|
||||||
api_tools.limit_field)
|
api_tools.limit_field)
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import builtins
|
||||||
try:
|
try:
|
||||||
import errno
|
import errno
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -22,7 +23,6 @@ import fixtures
|
|||||||
from ironicclient import exc as ironic_exc
|
from ironicclient import exc as ironic_exc
|
||||||
import mock
|
import mock
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
import six
|
|
||||||
|
|
||||||
from ironic_inspector.common import ironic as ir_utils
|
from ironic_inspector.common import ironic as ir_utils
|
||||||
from ironic_inspector import node_cache
|
from ironic_inspector import node_cache
|
||||||
@ -110,7 +110,7 @@ class TestExclusiveWriteOrPass(test_base.BaseTest):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestExclusiveWriteOrPass, self).setUp()
|
super(TestExclusiveWriteOrPass, self).setUp()
|
||||||
self.mock_open = self.useFixture(fixtures.MockPatchObject(
|
self.mock_open = self.useFixture(fixtures.MockPatchObject(
|
||||||
six.moves.builtins, 'open', new=mock.mock_open())).mock
|
builtins, 'open', new=mock.mock_open())).mock
|
||||||
self.mock_fd = self.mock_open.return_value
|
self.mock_fd = self.mock_open.return_value
|
||||||
self.mock_fcntl = self.useFixture(fixtures.MockPatchObject(
|
self.mock_fcntl = self.useFixture(fixtures.MockPatchObject(
|
||||||
dnsmasq.fcntl, 'flock', autospec=True)).mock
|
dnsmasq.fcntl, 'flock', autospec=True)).mock
|
||||||
|
@ -335,7 +335,7 @@ class TestIBMapping(test_base.BaseTest):
|
|||||||
self.fileobj = mock.mock_open(read_data=self.ib_data)
|
self.fileobj = mock.mock_open(read_data=self.ib_data)
|
||||||
|
|
||||||
def test_matching_ib(self):
|
def test_matching_ib(self):
|
||||||
with mock.patch('six.moves.builtins.open', self.fileobj,
|
with mock.patch('builtins.open', self.fileobj,
|
||||||
create=True) as mock_open:
|
create=True) as mock_open:
|
||||||
iptables._ib_mac_to_rmac_mapping(self.ports)
|
iptables._ib_mac_to_rmac_mapping(self.ports)
|
||||||
|
|
||||||
@ -346,7 +346,7 @@ class TestIBMapping(test_base.BaseTest):
|
|||||||
|
|
||||||
def test_ib_not_match(self):
|
def test_ib_not_match(self):
|
||||||
self.ports[0].extra['client-id'] = 'foo'
|
self.ports[0].extra['client-id'] = 'foo'
|
||||||
with mock.patch('six.moves.builtins.open', self.fileobj,
|
with mock.patch('builtins.open', self.fileobj,
|
||||||
create=True) as mock_open:
|
create=True) as mock_open:
|
||||||
iptables._ib_mac_to_rmac_mapping(self.ports)
|
iptables._ib_mac_to_rmac_mapping(self.ports)
|
||||||
|
|
||||||
@ -356,7 +356,7 @@ class TestIBMapping(test_base.BaseTest):
|
|||||||
'r')
|
'r')
|
||||||
|
|
||||||
def test_open_no_such_file(self):
|
def test_open_no_such_file(self):
|
||||||
with mock.patch('six.moves.builtins.open',
|
with mock.patch('builtins.open',
|
||||||
side_effect=IOError()) as mock_open:
|
side_effect=IOError()) as mock_open:
|
||||||
iptables._ib_mac_to_rmac_mapping(self.ports)
|
iptables._ib_mac_to_rmac_mapping(self.ports)
|
||||||
|
|
||||||
@ -367,7 +367,7 @@ class TestIBMapping(test_base.BaseTest):
|
|||||||
|
|
||||||
def test_no_interfaces(self):
|
def test_no_interfaces(self):
|
||||||
CONF.set_override('ethoib_interfaces', [], 'iptables')
|
CONF.set_override('ethoib_interfaces', [], 'iptables')
|
||||||
with mock.patch('six.moves.builtins.open', self.fileobj,
|
with mock.patch('builtins.open', self.fileobj,
|
||||||
create=True) as mock_open:
|
create=True) as mock_open:
|
||||||
iptables._ib_mac_to_rmac_mapping(self.ports)
|
iptables._ib_mac_to_rmac_mapping(self.ports)
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
|
|
||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
|
import functools
|
||||||
import json
|
import json
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
@ -22,7 +23,6 @@ from oslo_config import cfg
|
|||||||
import oslo_db
|
import oslo_db
|
||||||
from oslo_utils import timeutils
|
from oslo_utils import timeutils
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
import six
|
|
||||||
|
|
||||||
from ironic_inspector.common import ironic as ir_utils
|
from ironic_inspector.common import ironic as ir_utils
|
||||||
from ironic_inspector.common import locking
|
from ironic_inspector.common import locking
|
||||||
@ -56,7 +56,7 @@ class TestNodeCache(test_base.NodeTest):
|
|||||||
self.assertEqual(self.uuid, node.uuid)
|
self.assertEqual(self.uuid, node.uuid)
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
(datetime.datetime.utcnow() - datetime.timedelta(seconds=60)
|
(datetime.datetime.utcnow() - datetime.timedelta(seconds=60)
|
||||||
< node.started_at <
|
< node.started_at <
|
||||||
datetime.datetime.utcnow() + datetime.timedelta(seconds=60)))
|
datetime.datetime.utcnow() + datetime.timedelta(seconds=60)))
|
||||||
self.assertFalse(node._lock.is_locked())
|
self.assertFalse(node._lock.is_locked())
|
||||||
|
|
||||||
@ -85,7 +85,7 @@ class TestNodeCache(test_base.NodeTest):
|
|||||||
state=istate.States.finished).save(session)
|
state=istate.States.finished).save(session)
|
||||||
db.Attribute(uuid=uuidutils.generate_uuid(), name='mac',
|
db.Attribute(uuid=uuidutils.generate_uuid(), name='mac',
|
||||||
value='11:22:11:22:11:22', node_uuid=self.uuid).save(
|
value='11:22:11:22:11:22', node_uuid=self.uuid).save(
|
||||||
session)
|
session)
|
||||||
data = {'s': 'value', 'b': True, 'i': 42}
|
data = {'s': 'value', 'b': True, 'i': 42}
|
||||||
encoded = json.dumps(data)
|
encoded = json.dumps(data)
|
||||||
db.Option(uuid=self.uuid, name='name', value=encoded).save(
|
db.Option(uuid=self.uuid, name='name', value=encoded).save(
|
||||||
@ -264,8 +264,8 @@ class TestNodeCacheFind(test_base.NodeTest):
|
|||||||
node_cache.add_node(uuid2,
|
node_cache.add_node(uuid2,
|
||||||
istate.States.starting,
|
istate.States.starting,
|
||||||
bmc_address='1.2.3.4')
|
bmc_address='1.2.3.4')
|
||||||
six.assertRaisesRegex(self, utils.Error, 'Multiple nodes',
|
self.assertRaisesRegex(utils.Error, 'Multiple nodes',
|
||||||
node_cache.find_node, bmc_address='1.2.3.4')
|
node_cache.find_node, bmc_address='1.2.3.4')
|
||||||
|
|
||||||
def test_macs(self):
|
def test_macs(self):
|
||||||
res = node_cache.find_node(mac=['11:22:33:33:33:33', self.macs[1]])
|
res = node_cache.find_node(mac=['11:22:33:33:33:33', self.macs[1]])
|
||||||
@ -304,7 +304,7 @@ class TestNodeCacheFind(test_base.NodeTest):
|
|||||||
session = db.get_writer_session()
|
session = db.get_writer_session()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
(db.model_query(db.Node).filter_by(uuid=self.uuid).
|
(db.model_query(db.Node).filter_by(uuid=self.uuid).
|
||||||
delete())
|
delete())
|
||||||
self.assertRaises(utils.Error, node_cache.find_node,
|
self.assertRaises(utils.Error, node_cache.find_node,
|
||||||
bmc_address='1.2.3.4')
|
bmc_address='1.2.3.4')
|
||||||
|
|
||||||
@ -312,7 +312,7 @@ class TestNodeCacheFind(test_base.NodeTest):
|
|||||||
session = db.get_writer_session()
|
session = db.get_writer_session()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
(db.model_query(db.Node).filter_by(uuid=self.uuid).
|
(db.model_query(db.Node).filter_by(uuid=self.uuid).
|
||||||
update({'finished_at': datetime.datetime.utcnow()}))
|
update({'finished_at': datetime.datetime.utcnow()}))
|
||||||
self.assertRaises(utils.Error, node_cache.find_node,
|
self.assertRaises(utils.Error, node_cache.find_node,
|
||||||
bmc_address='1.2.3.4')
|
bmc_address='1.2.3.4')
|
||||||
|
|
||||||
@ -502,19 +502,19 @@ class TestNodeInfoFinished(test_base.NodeTest):
|
|||||||
with session.begin():
|
with session.begin():
|
||||||
self.assertEqual((datetime.datetime(1, 1, 1), None),
|
self.assertEqual((datetime.datetime(1, 1, 1), None),
|
||||||
tuple(db.model_query(
|
tuple(db.model_query(
|
||||||
db.Node.finished_at,
|
db.Node.finished_at,
|
||||||
db.Node.error).first()))
|
db.Node.error).first()))
|
||||||
self.assertEqual([], db.model_query(db.Attribute,
|
self.assertEqual([], db.model_query(db.Attribute,
|
||||||
session=session).all())
|
session=session).all())
|
||||||
self.assertEqual([], db.model_query(db.Option,
|
self.assertEqual([], db.model_query(db.Option,
|
||||||
session=session).all())
|
session=session).all())
|
||||||
|
|
||||||
def test_error(self):
|
def test_error(self):
|
||||||
self.node_info.finished(istate.Events.error, error='boom')
|
self.node_info.finished(istate.Events.error, error='boom')
|
||||||
|
|
||||||
self.assertEqual((datetime.datetime(1, 1, 1), 'boom'),
|
self.assertEqual((datetime.datetime(1, 1, 1), 'boom'),
|
||||||
tuple(db.model_query(db.Node.finished_at,
|
tuple(db.model_query(db.Node.finished_at,
|
||||||
db.Node.error).first()))
|
db.Node.error).first()))
|
||||||
self.assertEqual([], db.model_query(db.Attribute).all())
|
self.assertEqual([], db.model_query(db.Attribute).all())
|
||||||
self.assertEqual([], db.model_query(db.Option).all())
|
self.assertEqual([], db.model_query(db.Option).all())
|
||||||
|
|
||||||
@ -981,7 +981,8 @@ class TestNodeInfoVersionId(test_base.NodeStateTest):
|
|||||||
def func():
|
def func():
|
||||||
return self.node_info.version_id
|
return self.node_info.version_id
|
||||||
|
|
||||||
six.assertRaisesRegex(self, utils.NotFoundInCacheError, '.*', func)
|
self.assertRaisesRegex(utils.NotFoundInCacheError, '.*',
|
||||||
|
func)
|
||||||
|
|
||||||
def test_set(self):
|
def test_set(self):
|
||||||
with db.ensure_transaction() as session:
|
with db.ensure_transaction() as session:
|
||||||
@ -997,9 +998,10 @@ class TestNodeInfoVersionId(test_base.NodeStateTest):
|
|||||||
row.update({'version_id': uuidutils.generate_uuid()})
|
row.update({'version_id': uuidutils.generate_uuid()})
|
||||||
row.save(session)
|
row.save(session)
|
||||||
|
|
||||||
six.assertRaisesRegex(self, utils.NodeStateRaceCondition,
|
self.assertRaisesRegex(utils.NodeStateRaceCondition,
|
||||||
'Node state mismatch', self.node_info._set_state,
|
'Node state mismatch',
|
||||||
istate.States.finished)
|
self.node_info._set_state,
|
||||||
|
istate.States.finished)
|
||||||
|
|
||||||
|
|
||||||
class TestNodeInfoState(test_base.NodeStateTest):
|
class TestNodeInfoState(test_base.NodeStateTest):
|
||||||
@ -1013,9 +1015,9 @@ class TestNodeInfoState(test_base.NodeStateTest):
|
|||||||
self.assertEqual(self.node_info.state, row.state)
|
self.assertEqual(self.node_info.state, row.state)
|
||||||
|
|
||||||
def test_set_invalid_state(self):
|
def test_set_invalid_state(self):
|
||||||
six.assertRaisesRegex(self, oslo_db.exception.DBError,
|
self.assertRaisesRegex(oslo_db.exception.DBError,
|
||||||
'constraint failed',
|
'constraint failed',
|
||||||
self.node_info._set_state, 'foo')
|
self.node_info._set_state, 'foo')
|
||||||
|
|
||||||
def test_commit(self):
|
def test_commit(self):
|
||||||
current_time = timeutils.utcnow()
|
current_time = timeutils.utcnow()
|
||||||
@ -1038,9 +1040,9 @@ class TestNodeInfoStateFsm(test_base.NodeStateTest):
|
|||||||
def test__get_fsm_invalid_state(self):
|
def test__get_fsm_invalid_state(self):
|
||||||
self.node_info._fsm = None
|
self.node_info._fsm = None
|
||||||
self.node_info._state = 'foo'
|
self.node_info._state = 'foo'
|
||||||
six.assertRaisesRegex(self, automaton.exceptions.NotFound,
|
self.assertRaisesRegex(automaton.exceptions.NotFound,
|
||||||
'.*undefined state.*',
|
'.*undefined state.*',
|
||||||
self.node_info._get_fsm)
|
self.node_info._get_fsm)
|
||||||
|
|
||||||
def test__fsm_ctx_set_state(self):
|
def test__fsm_ctx_set_state(self):
|
||||||
with self.node_info._fsm_ctx() as fsm:
|
with self.node_info._fsm_ctx() as fsm:
|
||||||
@ -1056,9 +1058,10 @@ class TestNodeInfoStateFsm(test_base.NodeStateTest):
|
|||||||
|
|
||||||
def test__fsm_ctx_illegal_event(self):
|
def test__fsm_ctx_illegal_event(self):
|
||||||
with self.node_info._fsm_ctx() as fsm:
|
with self.node_info._fsm_ctx() as fsm:
|
||||||
six.assertRaisesRegex(self, automaton.exceptions.NotFound,
|
self.assertRaisesRegex(automaton.exceptions.NotFound,
|
||||||
'no defined transition', fsm.process_event,
|
'no defined transition',
|
||||||
istate.Events.finish)
|
fsm.process_event,
|
||||||
|
istate.Events.finish)
|
||||||
self.assertEqual(self.node_info.state, istate.States.starting)
|
self.assertEqual(self.node_info.state, istate.States.starting)
|
||||||
|
|
||||||
def test__fsm_ctx_generic_exception(self):
|
def test__fsm_ctx_generic_exception(self):
|
||||||
@ -1078,16 +1081,17 @@ class TestNodeInfoStateFsm(test_base.NodeStateTest):
|
|||||||
self.assertEqual(self.node_info.state, istate.States.waiting)
|
self.assertEqual(self.node_info.state, istate.States.waiting)
|
||||||
|
|
||||||
def test_fsm_illegal_event(self):
|
def test_fsm_illegal_event(self):
|
||||||
six.assertRaisesRegex(self, utils.NodeStateInvalidEvent,
|
self.assertRaisesRegex(utils.NodeStateInvalidEvent,
|
||||||
'no defined transition',
|
'no defined transition',
|
||||||
self.node_info.fsm_event, istate.Events.finish)
|
self.node_info.fsm_event,
|
||||||
|
istate.Events.finish)
|
||||||
self.assertEqual(self.node_info.state, istate.States.starting)
|
self.assertEqual(self.node_info.state, istate.States.starting)
|
||||||
|
|
||||||
def test_fsm_illegal_strict_event(self):
|
def test_fsm_illegal_strict_event(self):
|
||||||
six.assertRaisesRegex(self, utils.NodeStateInvalidEvent,
|
self.assertRaisesRegex(utils.NodeStateInvalidEvent,
|
||||||
'no defined transition',
|
'no defined transition',
|
||||||
self.node_info.fsm_event,
|
self.node_info.fsm_event,
|
||||||
istate.Events.finish, strict=True)
|
istate.Events.finish, strict=True)
|
||||||
self.assertIn('no defined transition', self.node_info.error)
|
self.assertIn('no defined transition', self.node_info.error)
|
||||||
self.assertEqual(self.node_info.state, istate.States.error)
|
self.assertEqual(self.node_info.state, istate.States.error)
|
||||||
|
|
||||||
@ -1196,6 +1200,7 @@ class TestFsmEvent(test_base.NodeStateTest):
|
|||||||
@node_cache.fsm_transition(istate.Events.finish)
|
@node_cache.fsm_transition(istate.Events.finish)
|
||||||
def func():
|
def func():
|
||||||
pass
|
pass
|
||||||
|
|
||||||
fsm_event_after_mock.assert_called_once_with(istate.Events.finish)
|
fsm_event_after_mock.assert_called_once_with(istate.Events.finish)
|
||||||
trigger_mock.assert_called_once_with()
|
trigger_mock.assert_called_once_with()
|
||||||
|
|
||||||
@ -1207,6 +1212,7 @@ class TestFsmEvent(test_base.NodeStateTest):
|
|||||||
@node_cache.fsm_transition(istate.Events.abort, reentrant=False)
|
@node_cache.fsm_transition(istate.Events.abort, reentrant=False)
|
||||||
def func():
|
def func():
|
||||||
pass
|
pass
|
||||||
|
|
||||||
fsm_event_before_mock.assert_called_once_with(istate.Events.abort,
|
fsm_event_before_mock.assert_called_once_with(istate.Events.abort,
|
||||||
strict=True)
|
strict=True)
|
||||||
trigger_mock.assert_called_once_with()
|
trigger_mock.assert_called_once_with()
|
||||||
@ -1216,13 +1222,14 @@ class TestFsmEvent(test_base.NodeStateTest):
|
|||||||
@mock.patch.object(node_cache, 'NodeInfo', autospec=True)
|
@mock.patch.object(node_cache, 'NodeInfo', autospec=True)
|
||||||
class TestStartIntrospection(test_base.NodeTest):
|
class TestStartIntrospection(test_base.NodeTest):
|
||||||
def prepare_mocks(fn):
|
def prepare_mocks(fn):
|
||||||
@six.wraps(fn)
|
@functools.wraps(fn)
|
||||||
def inner(self, NodeMock, *args):
|
def inner(self, NodeMock, *args):
|
||||||
method_mock = mock.Mock()
|
method_mock = mock.Mock()
|
||||||
NodeMock.return_value = self.node_info
|
NodeMock.return_value = self.node_info
|
||||||
self.node_info.fsm_event = method_mock
|
self.node_info.fsm_event = method_mock
|
||||||
fn(self, method_mock, *args)
|
fn(self, method_mock, *args)
|
||||||
method_mock.assert_called_once_with(istate.Events.start)
|
method_mock.assert_called_once_with(istate.Events.start)
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
||||||
@prepare_mocks
|
@prepare_mocks
|
||||||
@ -1237,25 +1244,25 @@ class TestStartIntrospection(test_base.NodeTest):
|
|||||||
@prepare_mocks
|
@prepare_mocks
|
||||||
def test_node_in_db_invalid_state(self, fsm_event_mock, add_node_mock):
|
def test_node_in_db_invalid_state(self, fsm_event_mock, add_node_mock):
|
||||||
fsm_event_mock.side_effect = utils.NodeStateInvalidEvent('Oops!')
|
fsm_event_mock.side_effect = utils.NodeStateInvalidEvent('Oops!')
|
||||||
six.assertRaisesRegex(self, utils.NodeStateInvalidEvent, 'Oops!',
|
self.assertRaisesRegex(utils.NodeStateInvalidEvent, 'Oops!',
|
||||||
node_cache.start_introspection,
|
node_cache.start_introspection,
|
||||||
self.node_info.uuid)
|
self.node_info.uuid)
|
||||||
self.assertFalse(add_node_mock.called)
|
self.assertFalse(add_node_mock.called)
|
||||||
|
|
||||||
@prepare_mocks
|
@prepare_mocks
|
||||||
def test_node_in_db_race_condition(self, fsm_event_mock, add_node_mock):
|
def test_node_in_db_race_condition(self, fsm_event_mock, add_node_mock):
|
||||||
fsm_event_mock.side_effect = utils.NodeStateRaceCondition()
|
fsm_event_mock.side_effect = utils.NodeStateRaceCondition()
|
||||||
six.assertRaisesRegex(self, utils.NodeStateRaceCondition, '.*',
|
self.assertRaisesRegex(utils.NodeStateRaceCondition, '.*',
|
||||||
node_cache.start_introspection,
|
node_cache.start_introspection,
|
||||||
self.node_info.uuid)
|
self.node_info.uuid)
|
||||||
self.assertFalse(add_node_mock.called)
|
self.assertFalse(add_node_mock.called)
|
||||||
|
|
||||||
@prepare_mocks
|
@prepare_mocks
|
||||||
def test_error_fsm_event(self, fsm_event_mock, add_node_mock):
|
def test_error_fsm_event(self, fsm_event_mock, add_node_mock):
|
||||||
fsm_event_mock.side_effect = utils.Error('Oops!')
|
fsm_event_mock.side_effect = utils.Error('Oops!')
|
||||||
six.assertRaisesRegex(self, utils.Error, 'Oops!',
|
self.assertRaisesRegex(utils.Error, 'Oops!',
|
||||||
node_cache.start_introspection,
|
node_cache.start_introspection,
|
||||||
self.node_info.uuid)
|
self.node_info.uuid)
|
||||||
self.assertFalse(add_node_mock.called)
|
self.assertFalse(add_node_mock.called)
|
||||||
|
|
||||||
@prepare_mocks
|
@prepare_mocks
|
||||||
@ -1271,9 +1278,9 @@ class TestStartIntrospection(test_base.NodeTest):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
fsm_event_mock.side_effect = CustomError('Oops!')
|
fsm_event_mock.side_effect = CustomError('Oops!')
|
||||||
six.assertRaisesRegex(self, CustomError, 'Oops!',
|
self.assertRaisesRegex(CustomError, 'Oops!',
|
||||||
node_cache.start_introspection,
|
node_cache.start_introspection,
|
||||||
self.node_info.uuid)
|
self.node_info.uuid)
|
||||||
self.assertFalse(add_node_mock.called)
|
self.assertFalse(add_node_mock.called)
|
||||||
|
|
||||||
|
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
import mock
|
import mock
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_utils import units
|
from oslo_utils import units
|
||||||
import six
|
|
||||||
|
|
||||||
from ironic_inspector import node_cache
|
from ironic_inspector import node_cache
|
||||||
from ironic_inspector.plugins import base
|
from ironic_inspector.plugins import base
|
||||||
@ -426,10 +425,10 @@ class TestRootDiskSelection(test_base.NodeTest):
|
|||||||
self.node.properties['root_device'] = {'size': 10}
|
self.node.properties['root_device'] = {'size': 10}
|
||||||
self.inventory['disks'] = []
|
self.inventory['disks'] = []
|
||||||
|
|
||||||
six.assertRaisesRegex(self, utils.Error,
|
self.assertRaisesRegex(utils.Error,
|
||||||
'No disks satisfied root device hints',
|
'No disks satisfied root device hints',
|
||||||
self.hook.before_update,
|
self.hook.before_update,
|
||||||
self.data, self.node_info)
|
self.data, self.node_info)
|
||||||
|
|
||||||
self.assertNotIn('local_gb', self.data)
|
self.assertNotIn('local_gb', self.data)
|
||||||
self.assertFalse(self.node_info.update_properties.called)
|
self.assertFalse(self.node_info.update_properties.called)
|
||||||
|
@ -12,6 +12,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
import functools
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
@ -25,7 +26,6 @@ from oslo_config import cfg
|
|||||||
from oslo_serialization import base64
|
from oslo_serialization import base64
|
||||||
from oslo_utils import timeutils
|
from oslo_utils import timeutils
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
import six
|
|
||||||
|
|
||||||
from ironic_inspector.common import ironic as ir_utils
|
from ironic_inspector.common import ironic as ir_utils
|
||||||
from ironic_inspector.common import swift
|
from ironic_inspector.common import swift
|
||||||
@ -613,7 +613,7 @@ class TestProcessNode(BaseTest):
|
|||||||
@mock.patch.object(node_cache, 'get_node', autospec=True)
|
@mock.patch.object(node_cache, 'get_node', autospec=True)
|
||||||
class TestReapply(BaseTest):
|
class TestReapply(BaseTest):
|
||||||
def prepare_mocks(func):
|
def prepare_mocks(func):
|
||||||
@six.wraps(func)
|
@functools.wraps(func)
|
||||||
def wrapper(self, pop_mock, *args, **kw):
|
def wrapper(self, pop_mock, *args, **kw):
|
||||||
pop_mock.return_value = node_cache.NodeInfo(
|
pop_mock.return_value = node_cache.NodeInfo(
|
||||||
uuid=self.node.uuid,
|
uuid=self.node.uuid,
|
||||||
@ -700,7 +700,7 @@ class TestReapplyNode(BaseTest):
|
|||||||
self.node_info.release_lock.assert_called_once_with(self.node_info)
|
self.node_info.release_lock.assert_called_once_with(self.node_info)
|
||||||
|
|
||||||
def prepare_mocks(fn):
|
def prepare_mocks(fn):
|
||||||
@six.wraps(fn)
|
@functools.wraps(fn)
|
||||||
def wrapper(self, release_mock, finished_mock, swift_mock,
|
def wrapper(self, release_mock, finished_mock, swift_mock,
|
||||||
*args, **kw):
|
*args, **kw):
|
||||||
finished_mock.side_effect = lambda *a, **kw: \
|
finished_mock.side_effect = lambda *a, **kw: \
|
||||||
|
@ -17,7 +17,6 @@ import fixtures
|
|||||||
from futurist import periodics
|
from futurist import periodics
|
||||||
import mock
|
import mock
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
import six
|
|
||||||
import stevedore
|
import stevedore
|
||||||
|
|
||||||
from ironic_inspector.common import ironic as ir_utils
|
from ironic_inspector.common import ironic as ir_utils
|
||||||
@ -78,8 +77,8 @@ class TestDriverManagerLoading(test_base.BaseTest):
|
|||||||
|
|
||||||
def test_invalid_filter_driver(self):
|
def test_invalid_filter_driver(self):
|
||||||
CONF.set_override('driver', 'foo', 'pxe_filter')
|
CONF.set_override('driver', 'foo', 'pxe_filter')
|
||||||
six.assertRaisesRegex(self, stevedore.exception.NoMatches, 'foo',
|
self.assertRaisesRegex(stevedore.exception.NoMatches, 'foo',
|
||||||
pxe_filter._driver_manager)
|
pxe_filter._driver_manager)
|
||||||
self.assertIsNone(pxe_filter._DRIVER_MANAGER)
|
self.assertIsNone(pxe_filter._DRIVER_MANAGER)
|
||||||
|
|
||||||
|
|
||||||
@ -118,7 +117,7 @@ class TestLockedDriverEvent(BaseFilterBaseTest):
|
|||||||
self.mock_fsm = self.useFixture(
|
self.mock_fsm = self.useFixture(
|
||||||
fixtures.MockPatchObject(self.driver, 'fsm')).mock
|
fixtures.MockPatchObject(self.driver, 'fsm')).mock
|
||||||
(self.driver.fsm_reset_on_error.return_value.
|
(self.driver.fsm_reset_on_error.return_value.
|
||||||
__enter__.return_value) = self.mock_fsm
|
__enter__.return_value) = self.mock_fsm
|
||||||
|
|
||||||
def test_locked_driver_event(self):
|
def test_locked_driver_event(self):
|
||||||
event = 'foo'
|
event = 'foo'
|
||||||
@ -163,7 +162,6 @@ class TestBaseFilterFsmPrecautions(BaseFilterBaseTest):
|
|||||||
self.mock_reset.assert_not_called()
|
self.mock_reset.assert_not_called()
|
||||||
|
|
||||||
def test_fsm_automaton_error(self):
|
def test_fsm_automaton_error(self):
|
||||||
|
|
||||||
def fun():
|
def fun():
|
||||||
with self.driver.fsm_reset_on_error():
|
with self.driver.fsm_reset_on_error():
|
||||||
raise automaton_errors.NotFound('Oops!')
|
raise automaton_errors.NotFound('Oops!')
|
||||||
@ -173,7 +171,6 @@ class TestBaseFilterFsmPrecautions(BaseFilterBaseTest):
|
|||||||
self.mock_reset.assert_not_called()
|
self.mock_reset.assert_not_called()
|
||||||
|
|
||||||
def test_fsm_reset_on_error_ctx_custom_error(self):
|
def test_fsm_reset_on_error_ctx_custom_error(self):
|
||||||
|
|
||||||
class MyError(Exception):
|
class MyError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -41,7 +41,6 @@ python-ironicclient==2.3.0
|
|||||||
pytz==2013.6
|
pytz==2013.6
|
||||||
reno==2.5.0
|
reno==2.5.0
|
||||||
retrying==1.2.3
|
retrying==1.2.3
|
||||||
six==1.10.0
|
|
||||||
sphinx==1.6.2
|
sphinx==1.6.2
|
||||||
sphinxcontrib-svg2pdfconverter==0.1.0
|
sphinxcontrib-svg2pdfconverter==0.1.0
|
||||||
SQLAlchemy==1.0.10
|
SQLAlchemy==1.0.10
|
||||||
|
@ -32,7 +32,6 @@ oslo.serialization!=2.19.1,>=2.18.0 # Apache-2.0
|
|||||||
oslo.service!=1.28.1,>=1.24.0 # Apache-2.0
|
oslo.service!=1.28.1,>=1.24.0 # Apache-2.0
|
||||||
oslo.utils>=3.33.0 # Apache-2.0
|
oslo.utils>=3.33.0 # Apache-2.0
|
||||||
retrying!=1.3.0,>=1.2.3 # Apache-2.0
|
retrying!=1.3.0,>=1.2.3 # Apache-2.0
|
||||||
six>=1.10.0 # MIT
|
|
||||||
stevedore>=1.20.0 # Apache-2.0
|
stevedore>=1.20.0 # Apache-2.0
|
||||||
SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT
|
SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT
|
||||||
tooz>=1.64.0 # Apache-2.0
|
tooz>=1.64.0 # Apache-2.0
|
||||||
|
Loading…
x
Reference in New Issue
Block a user