Introducing node introspection state management

Currently, state of a node introspection isn't kept in the database.
This change introduces:

* a new database column to keep the node introspection state
* an automaton to manage the node introspection state
* a decorator to declare a function performing an introspection state
  transition
* a version_id column is added, to enhance database consistency, that
  is consulted whenever node_info is committed

This change is part of the HA_Inspector effort[1]

[1] https://specs.openstack.org/openstack/ironic-inspector-specs/specs/HA_inspector.html

Closes-Bug: #1618835
Partial-Bug: #1525218
Change-Id: I18cb45f0d1194414715ccbe826d8a95610ec718d
This commit is contained in:
dparalen 2016-08-19 16:10:17 +02:00
parent fbe216b782
commit 3ddc0615e5
18 changed files with 1269 additions and 194 deletions

View File

@ -22,12 +22,13 @@ from oslo_db import options as db_opts
from oslo_db.sqlalchemy import models from oslo_db.sqlalchemy import models
from oslo_db.sqlalchemy import session as db_session from oslo_db.sqlalchemy import session as db_session
from oslo_db.sqlalchemy import types as db_types from oslo_db.sqlalchemy import types as db_types
from sqlalchemy import (Boolean, Column, DateTime, Float, ForeignKey, Integer, from sqlalchemy import (Boolean, Column, DateTime, Enum, Float, ForeignKey,
String, Text) Integer, String, Text)
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import orm from sqlalchemy import orm
from ironic_inspector import conf # noqa from ironic_inspector import conf # noqa
from ironic_inspector import introspection_state as istate
class ModelBase(models.ModelBase): class ModelBase(models.ModelBase):
@ -51,10 +52,22 @@ if CONF.discoverd.database:
class Node(Base): class Node(Base):
__tablename__ = 'nodes' __tablename__ = 'nodes'
uuid = Column(String(36), primary_key=True) uuid = Column(String(36), primary_key=True)
version_id = Column(String(36), server_default='')
state = Column(Enum(*istate.States.all()), nullable=False,
default=istate.States.finished,
server_default=istate.States.finished)
started_at = Column(Float, nullable=True) started_at = Column(Float, nullable=True)
finished_at = Column(Float, nullable=True) finished_at = Column(Float, nullable=True)
error = Column(Text, nullable=True) error = Column(Text, nullable=True)
# version_id is being tracked in the NodeInfo object
# for the sake of consistency. See also SQLAlchemy docs:
# http://docs.sqlalchemy.org/en/latest/orm/versioning.html
__mapper_args__ = {
'version_id_col': version_id,
'version_id_generator': False,
}
class Attribute(Base): class Attribute(Base):
__tablename__ = 'attributes' __tablename__ = 'attributes'

View File

@ -23,6 +23,7 @@ from oslo_config import cfg
from ironic_inspector.common.i18n import _, _LI, _LW from ironic_inspector.common.i18n import _, _LI, _LW
from ironic_inspector.common import ironic as ir_utils from ironic_inspector.common import ironic as ir_utils
from ironic_inspector import firewall from ironic_inspector import firewall
from ironic_inspector import introspection_state as istate
from ironic_inspector import node_cache from ironic_inspector import node_cache
from ironic_inspector import utils from ironic_inspector import utils
@ -87,9 +88,9 @@ def introspect(node_id, new_ipmi_credentials=None, token=None):
node_info=node) node_info=node)
bmc_address = ir_utils.get_ipmi_address(node) bmc_address = ir_utils.get_ipmi_address(node)
node_info = node_cache.add_node(node.uuid, node_info = node_cache.start_introspection(node.uuid,
bmc_address=bmc_address, bmc_address=bmc_address,
ironic=ironic) ironic=ironic)
node_info.set_option('new_ipmi_credentials', new_ipmi_credentials) node_info.set_option('new_ipmi_credentials', new_ipmi_credentials)
def _handle_exceptions(fut): def _handle_exceptions(fut):
@ -124,12 +125,13 @@ def _background_introspect(ironic, node_info):
node_info.acquire_lock() node_info.acquire_lock()
try: try:
_background_introspect_locked(ironic, node_info) _background_introspect_locked(node_info, ironic)
finally: finally:
node_info.release_lock() node_info.release_lock()
def _background_introspect_locked(ironic, node_info): @node_cache.fsm_transition(istate.Events.wait)
def _background_introspect_locked(node_info, ironic):
# TODO(dtantsur): pagination # TODO(dtantsur): pagination
macs = list(node_info.ports()) macs = list(node_info.ports())
if macs: if macs:
@ -192,6 +194,8 @@ def abort(node_id, token=None):
utils.executor().submit(_abort, node_info, ironic) utils.executor().submit(_abort, node_info, ironic)
@node_cache.release_lock
@node_cache.fsm_transition(istate.Events.abort, reentrant=False)
def _abort(node_info, ironic): def _abort(node_info, ironic):
# runs in background # runs in background
if node_info.finished_at is not None: if node_info.finished_at is not None:

View File

@ -0,0 +1,144 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Introspection state."""
from automaton import machines
class States(object):
"""States of an introspection."""
# received introspection data from a nonexistent node
# active - the inspector performs an operation on the node
enrolling = 'enrolling'
# an error appeared in a previous introspection state
# passive - the inspector doesn't perform any operation on the node
error = 'error'
# introspection finished successfully
# passive
finished = 'finished'
# processing introspection data from the node
# active
processing = 'processing'
# processing stored introspection data from the node
# active
reapplying = 'reapplying'
# received a request to start node introspection
# active
starting = 'starting'
# waiting for node introspection data
# passive
waiting = 'waiting'
@classmethod
def all(cls):
"""Return a list of all states."""
return [cls.starting, cls.waiting, cls.processing, cls.finished,
cls.error, cls.reapplying, cls.enrolling]
class Events(object):
"""Events that change introspection state."""
# cancel a waiting node introspection
# API, user
abort = 'abort'
# mark an introspection failed
# internal
error = 'error'
# mark an introspection finished
# internal
finish = 'finish'
# process node introspection data
# API, introspection image
process = 'process'
# process stored node introspection data
# API, user
reapply = 'reapply'
# initialize node introspection
# API, user
start = 'start'
# mark an introspection timed-out waiting for data
# internal
timeout = 'timeout'
# mark an introspection waiting for image data
# internal
wait = 'wait'
@classmethod
def all(cls):
"""Return a list of all events."""
return [cls.process, cls.reapply, cls.timeout, cls.wait, cls.abort,
cls.error, cls.finish]
# Error transition is allowed in any state.
State_space = [
{
'name': States.enrolling,
'next_states': {
Events.error: States.error,
Events.process: States.processing,
},
},
{
'name': States.error,
'next_states': {
Events.abort: States.error,
Events.error: States.error,
Events.reapply: States.reapplying,
Events.start: States.starting,
},
},
{
'name': States.finished,
'next_states': {
Events.finish: States.finished,
Events.reapply: States.reapplying,
Events.start: States.starting
},
},
{
'name': States.processing,
'next_states': {
Events.error: States.error,
Events.finish: States.finished,
},
},
{
'name': States.reapplying,
'next_states': {
Events.error: States.error,
Events.finish: States.finished,
Events.reapply: States.reapplying,
},
},
{
'name': States.starting,
'next_states': {
Events.error: States.error,
Events.start: States.starting,
Events.wait: States.waiting,
},
},
{
'name': States.waiting,
'next_states': {
Events.abort: States.error,
Events.process: States.processing,
Events.start: States.starting,
Events.timeout: States.error,
},
},
]
FSM = machines.FiniteMachine.build(State_space)
FSM.default_start_state = States.finished

View File

@ -0,0 +1,49 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Introducing Node.state attribute
Revision ID: d2e48801c8ef
Revises: e169a4a81d88
Create Date: 2016-07-29 10:10:32.351661
"""
# revision identifiers, used by Alembic.
revision = 'd2e48801c8ef'
down_revision = 'e169a4a81d88'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy import sql
from ironic_inspector import introspection_state as istate
Node = sql.table('nodes',
sql.column('error', sa.String),
sql.column('state', sa.Enum(*istate.States.all())))
def upgrade():
op.add_column('nodes', sa.Column('version_id', sa.String(36),
server_default=''))
op.add_column('nodes', sa.Column('state', sa.Enum(*istate.States.all(),
name='node_state'),
nullable=False,
default=istate.States.finished,
server_default=istate.States.finished))
# correct the state: finished -> error if Node.error is not null
stmt = Node.update().where(Node.c.error != sql.null()).values(
{'state': op.inline_literal(istate.States.error)})
op.execute(stmt)

View File

@ -13,11 +13,13 @@
"""Cache for nodes currently under introspection.""" """Cache for nodes currently under introspection."""
import contextlib
import copy import copy
import json import json
import six import six
import time import time
from automaton import exceptions as automaton_errors
from ironicclient import exceptions from ironicclient import exceptions
from oslo_concurrency import lockutils from oslo_concurrency import lockutils
from oslo_config import cfg from oslo_config import cfg
@ -25,13 +27,16 @@ from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import utils as db_utils from oslo_db.sqlalchemy import utils as db_utils
from oslo_utils import excutils from oslo_utils import excutils
from oslo_utils import uuidutils from oslo_utils import uuidutils
from sqlalchemy.orm import exc as orm_errors
from sqlalchemy import text from sqlalchemy import text
from ironic_inspector import db from ironic_inspector import db
from ironic_inspector.common.i18n import _, _LE, _LW, _LI from ironic_inspector.common.i18n import _, _LE, _LW, _LI
from ironic_inspector.common import ironic as ir_utils from ironic_inspector.common import ironic as ir_utils
from ironic_inspector import introspection_state as istate
from ironic_inspector import utils from ironic_inspector import utils
CONF = cfg.CONF CONF = cfg.CONF
@ -62,9 +67,12 @@ class NodeInfo(object):
for every thread. for every thread.
""" """
def __init__(self, uuid, started_at=None, finished_at=None, error=None, def __init__(self, uuid, version_id=None, state=None, started_at=None,
node=None, ports=None, ironic=None, lock=None): finished_at=None, error=None, node=None, ports=None,
ironic=None, lock=None):
self.uuid = uuid self.uuid = uuid
self._version_id = version_id
self._state = state
self.started_at = started_at self.started_at = started_at
self.finished_at = finished_at self.finished_at = finished_at
self.error = error self.error = error
@ -79,6 +87,7 @@ class NodeInfo(object):
self._lock = lock if lock is not None else _get_lock(uuid) self._lock = lock if lock is not None else _get_lock(uuid)
# Whether lock was acquired using this NodeInfo object # Whether lock was acquired using this NodeInfo object
self._locked = lock is not None self._locked = lock is not None
self._fsm = None
def __del__(self): def __del__(self):
if self._locked: if self._locked:
@ -86,6 +95,11 @@ class NodeInfo(object):
'node info object is deleted')) 'node info object is deleted'))
self._lock.release() self._lock.release()
def __str__(self):
"""Self represented as an UUID and a state."""
return _("%(uuid)s state %(state)s") % {'uuid': self.uuid,
'state': self._state}
def acquire_lock(self, blocking=True): def acquire_lock(self, blocking=True):
"""Acquire a lock on the associated node. """Acquire a lock on the associated node.
@ -118,6 +132,106 @@ class NodeInfo(object):
self._lock.release() self._lock.release()
self._locked = False self._locked = False
@property
def version_id(self):
"""Get the version id"""
if self._version_id is None:
row = db.model_query(db.Node).get(self.uuid)
if row is None:
raise utils.NotFoundInCacheError(_('Node not found in the '
'cache'), node_info=self)
self._version_id = row.version_id
return self._version_id
def _set_version_id(self, value, session):
row = self._row(session)
row.version_id = value
row.save(session)
self._version_id = value
def _row(self, session=None):
"""Get a row from the database with self.uuid and self.version_id"""
try:
# race condition if version_id changed outside of this node_info
return db.model_query(db.Node, session=session).filter_by(
uuid=self.uuid, version_id=self.version_id).one()
except (orm_errors.NoResultFound, orm_errors.StaleDataError):
raise utils.NodeStateRaceCondition(node_info=self)
def _commit(self, **fields):
"""Commit the fields into the DB."""
LOG.debug('Committing fields: %s', fields, node_info=self)
with db.ensure_transaction() as session:
self._set_version_id(uuidutils.generate_uuid(), session)
row = self._row(session)
row.update(fields)
row.save(session)
def commit(self):
"""Commit current node status into the database."""
# state and version_id are updated separately
self._commit(started_at=self.started_at, finished_at=self.finished_at,
error=self.error)
@property
def state(self):
"""State of the node_info object."""
if self._state is None:
row = self._row()
self._state = row.state
return self._state
def _set_state(self, value):
self._commit(state=value)
self._state = value
def _get_fsm(self):
"""Get an fsm instance initialized with self.state."""
if self._fsm is None:
self._fsm = istate.FSM.copy(shallow=True)
self._fsm.initialize(start_state=self.state)
return self._fsm
@contextlib.contextmanager
def _fsm_ctx(self):
fsm = self._get_fsm()
try:
yield fsm
finally:
if fsm.current_state != self.state:
LOG.info(_LI('Updating node state: %(current)s --> %(new)s'),
{'current': self.state, 'new': fsm.current_state},
node_info=self)
self._set_state(fsm.current_state)
def fsm_event(self, event, strict=False):
"""Update node_info.state based on a fsm.process_event(event) call.
An AutomatonException triggers an error event.
If strict, node_info.finished(error=str(exc)) is called with the
AutomatonException instance and a EventError raised.
:param event: an event to process by the fsm
:strict: whether to fail the introspection upon an invalid event
:raises: NodeStateInvalidEvent
"""
with self._fsm_ctx() as fsm:
LOG.debug('Executing fsm(%(state)s).process_event(%(event)s)',
{'state': fsm.current_state, 'event': event},
node_info=self)
try:
fsm.process_event(event)
except automaton_errors.NotFound as exc:
msg = _('Invalid event: %s') % exc
if strict:
LOG.error(msg, node_info=self)
# assuming an error event is always possible
fsm.process_event(istate.Events.error)
self.finished(error=str(exc))
else:
LOG.warning(msg, node_info=self)
raise utils.NodeStateInvalidEvent(str(exc), node_info=self)
@property @property
def options(self): def options(self):
"""Node introspection options as a dict.""" """Node introspection options as a dict."""
@ -169,9 +283,7 @@ class NodeInfo(object):
self.error = error self.error = error
with db.ensure_transaction() as session: with db.ensure_transaction() as session:
db.model_query(db.Node, session=session).filter_by( self._commit(finished_at=self.finished_at, error=self.error)
uuid=self.uuid).update(
{'finished_at': self.finished_at, 'error': error})
db.model_query(db.Attribute, session=session).filter_by( db.model_query(db.Attribute, session=session).filter_by(
uuid=self.uuid).delete() uuid=self.uuid).delete()
db.model_query(db.Option, session=session).filter_by( db.model_query(db.Option, session=session).filter_by(
@ -207,7 +319,8 @@ class NodeInfo(object):
def from_row(cls, row, ironic=None, lock=None, node=None): def from_row(cls, row, ironic=None, lock=None, node=None):
"""Construct NodeInfo from a database row.""" """Construct NodeInfo from a database row."""
fields = {key: row[key] fields = {key: row[key]
for key in ('uuid', 'started_at', 'finished_at', 'error')} for key in ('uuid', 'version_id', 'state', 'started_at',
'finished_at', 'error')}
return cls(ironic=ironic, lock=lock, node=node, **fields) return cls(ironic=ironic, lock=lock, node=node, **fields)
def invalidate_cache(self): def invalidate_cache(self):
@ -217,6 +330,9 @@ class NodeInfo(object):
self._ports = None self._ports = None
self._attributes = None self._attributes = None
self._ironic = None self._ironic = None
self._fsm = None
self._state = None
self._version_id = None
def node(self, ironic=None): def node(self, ironic=None):
"""Get Ironic node object associated with the cached node record.""" """Get Ironic node object associated with the cached node record."""
@ -385,13 +501,172 @@ class NodeInfo(object):
self.patch([{'op': op, 'path': path, 'value': value}], ironic) self.patch([{'op': op, 'path': path, 'value': value}], ironic)
def add_node(uuid, **attributes): def triggers_fsm_error_transition(errors=(Exception,),
no_errors=(utils.NodeStateInvalidEvent,
utils.NodeStateRaceCondition)):
"""Trigger an fsm error transition upon certain errors.
It is assumed the first function arg of the decorated function is always a
NodeInfo instance.
:param errors: a tuple of exceptions upon which an error
event is triggered. Re-raised.
:param no_errors: a tuple of exceptions that won't trigger the
error event.
"""
def outer(func):
@six.wraps(func)
def inner(node_info, *args, **kwargs):
ret = None
try:
ret = func(node_info, *args, **kwargs)
except no_errors as exc:
LOG.debug('Not processing error event for the '
'exception: %(exc)s raised by %(func)s',
{'exc': exc, 'func': func}, node_info=node_info)
except errors as exc:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Processing the error event because of an '
'exception %(exc_type)s: %(exc)s raised by '
'%(func)s'),
{'exc_type': type(exc), 'exc': exc,
'func': func},
node_info=node_info)
# an error event should be possible from all states
node_info.fsm_event(istate.Events.error)
return ret
return inner
return outer
def fsm_event_before(event, strict=False):
"""Trigger an fsm event before the function execution.
It is assumed the first function arg of the decorated function is always a
NodeInfo instance.
:param event: the event to process before the function call
:param strict: make an invalid fsm event trigger an error event
"""
def outer(func):
@six.wraps(func)
def inner(node_info, *args, **kwargs):
LOG.debug('Processing event %(event)s before calling '
'%(func)s', {'event': event, 'func': func},
node_info=node_info)
node_info.fsm_event(event, strict=strict)
LOG.debug('Calling: %(func)s(<node>, *%(args_)s, '
'**%(kwargs_)s)', {'func': func, 'args_': args,
'kwargs_': kwargs},
node_info=node_info)
return func(node_info, *args, **kwargs)
return inner
return outer
def fsm_event_after(event, strict=False):
"""Trigger an fsm event after the function execution.
It is assumed the first function arg of the decorated function is always a
NodeInfo instance.
:param event: the event to process after the function call
:param strict: make an invalid fsm event trigger an error event
"""
def outer(func):
@six.wraps(func)
def inner(node_info, *args, **kwargs):
LOG.debug('Calling: %(func)s(<node>, *%(args_)s, '
'**%(kwargs_)s)', {'func': func, 'args_': args,
'kwargs_': kwargs},
node_info=node_info)
ret = func(node_info, *args, **kwargs)
LOG.debug('Processing event %(event)s after calling '
'%(func)s', {'event': event, 'func': func},
node_info=node_info)
node_info.fsm_event(event, strict=strict)
return ret
return inner
return outer
def fsm_transition(event, reentrant=True, **exc_kwargs):
"""Decorate a function to perform a (non-)reentrant transition.
If True, reentrant transition will be performed at the end of a function
call. If False, the transition will be performed before the function call.
The function is decorated with the triggers_fsm_error_transition decorator
as well.
:param event: the event to bind the transition to.
:param reentrant: whether the transition is reentrant.
:param exc_kwargs: passed on to the triggers_fsm_error_transition decorator
"""
def outer(func):
inner = triggers_fsm_error_transition(**exc_kwargs)(func)
if not reentrant:
return fsm_event_before(event, strict=True)(inner)
return fsm_event_after(event)(inner)
return outer
def release_lock(func):
"""Decorate a node_info-function to release the node_info lock.
Assumes the first parameter of the function func is always a NodeInfo
instance.
"""
@six.wraps(func)
def inner(node_info, *args, **kwargs):
try:
return func(node_info, *args, **kwargs)
finally:
# FIXME(milan) hacking the test cases to work
# with release_lock.assert_called_once...
if node_info._locked:
node_info.release_lock()
return inner
def start_introspection(uuid, **kwargs):
"""Start the introspection of a node.
If a node_info record exists in the DB, a start transition is used rather
than dropping the record in order to check for the start transition
validity in particular node state.
:param uuid: Ironic node UUID
:param kwargs: passed on to add_node()
:raises: NodeStateInvalidEvent in case the start transition is invalid in
the current node state
:raises: NodeStateRaceCondition if a mismatch was detected between the
node_info cache and the DB
:returns: NodeInfo
"""
with db.ensure_transaction():
node_info = NodeInfo(uuid)
# check that the start transition is possible
try:
node_info.fsm_event(istate.Events.start)
except utils.NotFoundInCacheError:
# node not found while in the fsm_event handler
LOG.debug('Node missing in the cache; adding it now',
node_info=node_info)
state = istate.States.starting
else:
state = node_info.state
return add_node(uuid, state, **kwargs)
def add_node(uuid, state, **attributes):
"""Store information about a node under introspection. """Store information about a node under introspection.
All existing information about this node is dropped. All existing information about this node is dropped.
Empty values are skipped. Empty values are skipped.
:param uuid: Ironic node UUID :param uuid: Ironic node UUID
:param state: The initial state of the node
:param attributes: attributes known about this node (like macs, BMC etc); :param attributes: attributes known about this node (like macs, BMC etc);
also ironic client instance may be passed under 'ironic' also ironic client instance may be passed under 'ironic'
:returns: NodeInfo :returns: NodeInfo
@ -399,9 +674,9 @@ def add_node(uuid, **attributes):
started_at = time.time() started_at = time.time()
with db.ensure_transaction() as session: with db.ensure_transaction() as session:
_delete_node(uuid) _delete_node(uuid)
db.Node(uuid=uuid, started_at=started_at).save(session) db.Node(uuid=uuid, state=state, started_at=started_at).save(session)
node_info = NodeInfo(uuid=uuid, started_at=started_at, node_info = NodeInfo(uuid=uuid, state=state, started_at=started_at,
ironic=attributes.pop('ironic', None)) ironic=attributes.pop('ironic', None))
for (name, value) in attributes.items(): for (name, value) in attributes.items():
if not value: if not value:
@ -591,10 +866,9 @@ def clean_up():
try: try:
if node_info.finished_at or node_info.started_at > threshold: if node_info.finished_at or node_info.started_at > threshold:
continue continue
node_info.fsm_event(istate.Events.timeout)
node_info.finished(error='Introspection timeout')
db.model_query(db.Node, session=session).filter_by(
uuid=u).update({'finished_at': time.time(),
'error': 'Introspection timeout'})
db.model_query(db.Attribute, session=session).filter_by( db.model_query(db.Attribute, session=session).filter_by(
uuid=u).delete() uuid=u).delete()
db.model_query(db.Option, session=session).filter_by( db.model_query(db.Option, session=session).filter_by(
@ -610,6 +884,7 @@ def create_node(driver, ironic=None, **attributes):
* Create new node in ironic. * Create new node in ironic.
* Cache it in inspector. * Cache it in inspector.
* Sets node_info state to enrolling.
:param driver: driver for Ironic node. :param driver: driver for Ironic node.
:param ironic: ronic client instance. :param ironic: ronic client instance.
@ -625,7 +900,7 @@ def create_node(driver, ironic=None, **attributes):
LOG.error(_LE('Failed to create new node: %s'), e) LOG.error(_LE('Failed to create new node: %s'), e)
else: else:
LOG.info(_LI('Node %s was created successfully'), node.uuid) LOG.info(_LI('Node %s was created successfully'), node.uuid)
return add_node(node.uuid, ironic=ironic) return add_node(node.uuid, istate.States.enrolling, ironic=ironic)
def get_node_list(ironic=None, marker=None, limit=None): def get_node_list(ironic=None, marker=None, limit=None):

View File

@ -28,6 +28,7 @@ from ironic_inspector.common.i18n import _, _LE, _LI, _LW
from ironic_inspector.common import ironic as ir_utils from ironic_inspector.common import ironic as ir_utils
from ironic_inspector.common import swift from ironic_inspector.common import swift
from ironic_inspector import firewall from ironic_inspector import firewall
from ironic_inspector import introspection_state as istate
from ironic_inspector import node_cache from ironic_inspector import node_cache
from ironic_inspector.plugins import base as plugins_base from ironic_inspector.plugins import base as plugins_base
from ironic_inspector import rules from ironic_inspector import rules
@ -234,7 +235,7 @@ def process(introspection_data):
_store_logs(introspection_data, node_info) _store_logs(introspection_data, node_info)
try: try:
result = _process_node(node, introspection_data, node_info) result = _process_node(node_info, node, introspection_data)
except utils.Error as exc: except utils.Error as exc:
node_info.finished(error=str(exc)) node_info.finished(error=str(exc))
with excutils.save_and_reraise_exception(): with excutils.save_and_reraise_exception():
@ -263,7 +264,8 @@ def _run_post_hooks(node_info, introspection_data):
hook_ext.obj.before_update(introspection_data, node_info) hook_ext.obj.before_update(introspection_data, node_info)
def _process_node(node, introspection_data, node_info): @node_cache.fsm_transition(istate.Events.process, reentrant=False)
def _process_node(node_info, node, introspection_data):
# NOTE(dtantsur): repeat the check in case something changed # NOTE(dtantsur): repeat the check in case something changed
ir_utils.check_provision_state(node) ir_utils.check_provision_state(node)
@ -284,19 +286,20 @@ def _process_node(node, introspection_data, node_info):
new_username, new_password = ( new_username, new_password = (
node_info.options.get('new_ipmi_credentials')) node_info.options.get('new_ipmi_credentials'))
utils.executor().submit(_finish_set_ipmi_credentials, utils.executor().submit(_finish_set_ipmi_credentials,
ironic, node, node_info, introspection_data, node_info, ironic, node, introspection_data,
new_username, new_password) new_username, new_password)
resp['ipmi_setup_credentials'] = True resp['ipmi_setup_credentials'] = True
resp['ipmi_username'] = new_username resp['ipmi_username'] = new_username
resp['ipmi_password'] = new_password resp['ipmi_password'] = new_password
else: else:
utils.executor().submit(_finish, ironic, node_info, introspection_data, utils.executor().submit(_finish, node_info, ironic, introspection_data,
power_off=CONF.processing.power_off) power_off=CONF.processing.power_off)
return resp return resp
def _finish_set_ipmi_credentials(ironic, node, node_info, introspection_data, @node_cache.fsm_transition(istate.Events.finish)
def _finish_set_ipmi_credentials(node_info, ironic, node, introspection_data,
new_username, new_password): new_username, new_password):
patch = [{'op': 'add', 'path': '/driver_info/ipmi_username', patch = [{'op': 'add', 'path': '/driver_info/ipmi_username',
'value': new_username}, 'value': new_username},
@ -320,7 +323,7 @@ def _finish_set_ipmi_credentials(ironic, node, node_info, introspection_data,
node_info=node_info, data=introspection_data) node_info=node_info, data=introspection_data)
eventlet.greenthread.sleep(_CREDENTIALS_WAIT_PERIOD) eventlet.greenthread.sleep(_CREDENTIALS_WAIT_PERIOD)
else: else:
_finish(ironic, node_info, introspection_data) _finish_common(node_info, ironic, introspection_data)
return return
msg = (_('Failed to validate updated IPMI credentials for node ' msg = (_('Failed to validate updated IPMI credentials for node '
@ -329,7 +332,7 @@ def _finish_set_ipmi_credentials(ironic, node, node_info, introspection_data,
raise utils.Error(msg, node_info=node_info, data=introspection_data) raise utils.Error(msg, node_info=node_info, data=introspection_data)
def _finish(ironic, node_info, introspection_data, power_off=True): def _finish_common(node_info, ironic, introspection_data, power_off=True):
if power_off: if power_off:
LOG.debug('Forcing power off of node %s', node_info.uuid) LOG.debug('Forcing power off of node %s', node_info.uuid)
try: try:
@ -356,6 +359,9 @@ def _finish(ironic, node_info, introspection_data, power_off=True):
node_info=node_info, data=introspection_data) node_info=node_info, data=introspection_data)
_finish = node_cache.fsm_transition(istate.Events.finish)(_finish_common)
def reapply(node_ident): def reapply(node_ident):
"""Re-apply introspection steps. """Re-apply introspection steps.
@ -395,30 +401,41 @@ def _reapply(node_info):
node_info.finished(error=msg) node_info.finished(error=msg)
return return
failures = [] try:
_run_pre_hooks(introspection_data, failures) ironic = ir_utils.get_client()
if failures: except Exception as exc:
LOG.error(_LE('Pre-processing failures detected reapplying ' msg = _('Encountered an exception while getting the Ironic client: '
'introspection on stored data:\n%s'), '%s') % exc
'\n'.join(failures), node_info=node_info) LOG.error(msg, node_info=node_info, data=introspection_data)
node_info.finished(error='\n'.join(failures)) node_info.fsm_event(istate.Events.error)
node_info.finished(error=msg)
return return
try: try:
ironic = ir_utils.get_client() _reapply_with_data(node_info, introspection_data)
node_info.create_ports(introspection_data.get('macs') or ())
_run_post_hooks(node_info, introspection_data)
_store_data(node_info, introspection_data)
node_info.invalidate_cache()
rules.apply(node_info, introspection_data)
_finish(ironic, node_info, introspection_data,
power_off=False)
except Exception as exc: except Exception as exc:
LOG.exception(_LE('Encountered exception reapplying '
'introspection on stored data'),
node_info=node_info,
data=introspection_data)
node_info.finished(error=str(exc)) node_info.finished(error=str(exc))
else: return
LOG.info(_LI('Successfully reapplied introspection on stored '
'data'), node_info=node_info, data=introspection_data) _finish(node_info, ironic, introspection_data,
power_off=False)
LOG.info(_LI('Successfully reapplied introspection on stored '
'data'), node_info=node_info, data=introspection_data)
@node_cache.fsm_event_before(istate.Events.reapply)
@node_cache.triggers_fsm_error_transition()
def _reapply_with_data(node_info, introspection_data):
failures = []
_run_pre_hooks(introspection_data, failures)
if failures:
raise utils.Error(_('Pre-processing failures detected reapplying '
'introspection on stored data:\n%s') %
'\n'.join(failures), node_info=node_info)
node_info.create_ports(introspection_data.get('macs') or ())
_run_post_hooks(node_info, introspection_data)
_store_data(node_info, introspection_data)
node_info.invalidate_cache()
rules.apply(node_info, introspection_data)

View File

@ -26,6 +26,7 @@ from ironic_inspector.common import i18n
# Import configuration options # Import configuration options
from ironic_inspector import conf # noqa from ironic_inspector import conf # noqa
from ironic_inspector import db from ironic_inspector import db
from ironic_inspector import introspection_state as istate
from ironic_inspector import node_cache from ironic_inspector import node_cache
from ironic_inspector.plugins import base as plugins_base from ironic_inspector.plugins import base as plugins_base
from ironic_inspector import utils from ironic_inspector import utils
@ -161,3 +162,17 @@ class NodeTest(InventoryTest):
self.node_info = node_cache.NodeInfo(uuid=self.uuid, started_at=0, self.node_info = node_cache.NodeInfo(uuid=self.uuid, started_at=0,
node=self.node, ports=self.ports) node=self.node, ports=self.ports)
self.node_info.node = mock.Mock(return_value=self.node) self.node_info.node = mock.Mock(return_value=self.node)
class NodeStateTest(NodeTest):
def setUp(self):
super(NodeStateTest, self).setUp()
self.node_info._version_id = uuidutils.generate_uuid()
self.node_info._state = istate.States.starting
self.db_node = db.Node(uuid=self.node_info.uuid,
version_id=self.node_info._version_id,
state=self.node_info._state,
started_at=self.node_info.started_at,
finished_at=self.node_info.finished_at,
error=self.node_info.error)
self.db_node.save(self.session)

View File

@ -38,6 +38,7 @@ from ironic_inspector.common import ironic as ir_utils
from ironic_inspector.common import swift from ironic_inspector.common import swift
from ironic_inspector import db from ironic_inspector import db
from ironic_inspector import dbsync from ironic_inspector import dbsync
from ironic_inspector import introspection_state as istate
from ironic_inspector import main from ironic_inspector import main
from ironic_inspector import rules from ironic_inspector import rules
from ironic_inspector.test import base from ironic_inspector.test import base
@ -221,6 +222,10 @@ class Base(base.NodeTest):
else: else:
self.assertIsNone(status['finished_at']) self.assertIsNone(status['finished_at'])
def db_row(self):
"""return database row matching self.uuid."""
return db.model_query(db.Node).get(self.uuid)
class Test(Base): class Test(Base):
def test_bmc(self): def test_bmc(self):
@ -561,6 +566,122 @@ class Test(Base):
self.assertEqual(store_processing_call, self.assertEqual(store_processing_call,
store_mock.call_args_list[-1]) store_mock.call_args_list[-1])
# TODO(milan): remove the test case in favor of other tests once
# the introspection status endpoint exposes the state information
@mock.patch.object(swift, 'store_introspection_data', autospec=True)
@mock.patch.object(swift, 'get_introspection_data', autospec=True)
def test_state_transitions(self, get_mock, store_mock):
"""Assert state transitions work as expected."""
cfg.CONF.set_override('store_data', 'swift', 'processing')
# ramdisk data copy
# please mind the data is changed during processing
ramdisk_data = json.dumps(copy.deepcopy(self.data))
get_mock.return_value = ramdisk_data
self.call_introspect(self.uuid)
reboot_call = mock.call(self.uuid, 'reboot')
self.cli.node.set_power_state.assert_has_calls([reboot_call])
eventlet.greenthread.sleep(DEFAULT_SLEEP)
row = self.db_row()
self.assertEqual(istate.States.waiting, row.state)
self.call_continue(self.data)
eventlet.greenthread.sleep(DEFAULT_SLEEP)
row = self.db_row()
self.assertEqual(istate.States.finished, row.state)
self.assertIsNone(row.error)
version_id = row.version_id
self.call_reapply(self.uuid)
eventlet.greenthread.sleep(DEFAULT_SLEEP)
row = self.db_row()
self.assertEqual(istate.States.finished, row.state)
self.assertIsNone(row.error)
# the finished state was visited from the reapplying state
self.assertNotEqual(version_id, row.version_id)
self.call_introspect(self.uuid)
eventlet.greenthread.sleep(DEFAULT_SLEEP)
row = self.db_row()
self.assertEqual(istate.States.waiting, row.state)
self.call_abort_introspect(self.uuid)
row = self.db_row()
self.assertEqual(istate.States.error, row.state)
self.assertEqual('Canceled by operator', row.error)
@mock.patch.object(swift, 'store_introspection_data', autospec=True)
@mock.patch.object(swift, 'get_introspection_data', autospec=True)
def test_edge_state_transitions(self, get_mock, store_mock):
"""Assert state transitions work as expected in edge conditions."""
cfg.CONF.set_override('store_data', 'swift', 'processing')
# ramdisk data copy
# please mind the data is changed during processing
ramdisk_data = json.dumps(copy.deepcopy(self.data))
get_mock.return_value = ramdisk_data
# multiple introspect calls
self.call_introspect(self.uuid)
self.call_introspect(self.uuid)
eventlet.greenthread.sleep(DEFAULT_SLEEP)
# TODO(milan): switch to API once the introspection status
# endpoint exposes the state information
row = self.db_row()
self.assertEqual(istate.States.waiting, row.state)
# an error -start-> starting state transition is possible
self.call_abort_introspect(self.uuid)
self.call_introspect(self.uuid)
eventlet.greenthread.sleep(DEFAULT_SLEEP)
row = self.db_row()
self.assertEqual(istate.States.waiting, row.state)
# double abort works
self.call_abort_introspect(self.uuid)
row = self.db_row()
version_id = row.version_id
error = row.error
self.assertEqual(istate.States.error, row.state)
self.call_abort_introspect(self.uuid)
row = self.db_row()
self.assertEqual(istate.States.error, row.state)
# assert the error didn't change
self.assertEqual(error, row.error)
self.assertEqual(version_id, row.version_id)
# preventing stale data race condition
# waiting -> processing is a strict state transition
self.call_introspect(self.uuid)
eventlet.greenthread.sleep(DEFAULT_SLEEP)
row = self.db_row()
row.state = istate.States.processing
with db.ensure_transaction() as session:
row.save(session)
self.call_continue(self.data, expect_error=400)
row = self.db_row()
self.assertEqual(istate.States.error, row.state)
self.assertIn('no defined transition', row.error)
# multiple reapply calls
self.call_introspect(self.uuid)
eventlet.greenthread.sleep(DEFAULT_SLEEP)
self.call_continue(self.data)
eventlet.greenthread.sleep(DEFAULT_SLEEP)
self.call_reapply(self.uuid)
row = self.db_row()
version_id = row.version_id
self.assertEqual(istate.States.finished, row.state)
self.assertIsNone(row.error)
self.call_reapply(self.uuid)
# assert an finished -reapply-> reapplying -> finished state transition
row = self.db_row()
self.assertEqual(istate.States.finished, row.state)
self.assertIsNone(row.error)
self.assertNotEqual(version_id, row.version_id)
@contextlib.contextmanager @contextlib.contextmanager
def mocked_server(): def mocked_server():

View File

@ -20,6 +20,7 @@ from oslo_config import cfg
from ironic_inspector.common import ironic as ir_utils from ironic_inspector.common import ironic as ir_utils
from ironic_inspector import firewall from ironic_inspector import firewall
from ironic_inspector import introspection_state as istate
from ironic_inspector import node_cache from ironic_inspector import node_cache
from ironic_inspector.test import base as test_base from ironic_inspector.test import base as test_base
@ -95,7 +96,8 @@ class TestFirewall(test_base.NodeTest):
def test_update_filters_args(self, mock_call, mock_get_client, def test_update_filters_args(self, mock_call, mock_get_client,
mock_iptables): mock_iptables):
# Pretend that we have nodes on introspection # Pretend that we have nodes on introspection
node_cache.add_node(self.node.uuid, bmc_address='1.2.3.4') node_cache.add_node(self.node.uuid, state=istate.States.waiting,
bmc_address='1.2.3.4')
firewall.init() firewall.init()
@ -162,6 +164,7 @@ class TestFirewall(test_base.NodeTest):
self.ports = [mock.Mock(address=m) for m in self.macs] self.ports = [mock.Mock(address=m) for m in self.macs]
mock_get_client.port.list.return_value = self.ports mock_get_client.port.list.return_value = self.ports
node_cache.add_node(self.node.uuid, mac=active_macs, node_cache.add_node(self.node.uuid, mac=active_macs,
state=istate.States.finished,
bmc_address='1.2.3.4', foo=None) bmc_address='1.2.3.4', foo=None)
firewall.init() firewall.init()
@ -211,6 +214,7 @@ class TestFirewall(test_base.NodeTest):
self.ports = [mock.Mock(address=m) for m in self.macs] self.ports = [mock.Mock(address=m) for m in self.macs]
mock_get_client.port.list.return_value = self.ports mock_get_client.port.list.return_value = self.ports
node_cache.add_node(self.node.uuid, mac=active_macs, node_cache.add_node(self.node.uuid, mac=active_macs,
state=istate.States.finished,
bmc_address='1.2.3.4', foo=None) bmc_address='1.2.3.4', foo=None)
firewall.init() firewall.init()
@ -330,7 +334,8 @@ class TestFirewall(test_base.NodeTest):
# Adding a node changes it back # Adding a node changes it back
node_cache.add_node(self.node.uuid, bmc_address='1.2.3.4') node_cache.add_node(self.node.uuid, state=istate.States.starting,
bmc_address='1.2.3.4')
mock_iptables.reset_mock() mock_iptables.reset_mock()
firewall.update_filters() firewall.update_filters()

View File

@ -50,21 +50,21 @@ class BaseTest(test_base.NodeTest):
@mock.patch.object(eventlet.greenthread, 'sleep', lambda _: None) @mock.patch.object(eventlet.greenthread, 'sleep', lambda _: None)
@mock.patch.object(firewall, 'update_filters', autospec=True) @mock.patch.object(firewall, 'update_filters', autospec=True)
@mock.patch.object(node_cache, 'add_node', autospec=True) @mock.patch.object(node_cache, 'start_introspection', autospec=True)
@mock.patch.object(ir_utils, 'get_client', autospec=True) @mock.patch.object(ir_utils, 'get_client', autospec=True)
class TestIntrospect(BaseTest): class TestIntrospect(BaseTest):
def test_ok(self, client_mock, add_mock, filters_mock): def test_ok(self, client_mock, start_mock, filters_mock):
cli = self._prepare(client_mock) cli = self._prepare(client_mock)
add_mock.return_value = self.node_info start_mock.return_value = self.node_info
introspect.introspect(self.node.uuid) introspect.introspect(self.node.uuid)
cli.node.get.assert_called_once_with(self.uuid) cli.node.get.assert_called_once_with(self.uuid)
cli.node.validate.assert_called_once_with(self.uuid) cli.node.validate.assert_called_once_with(self.uuid)
add_mock.assert_called_once_with(self.uuid, start_mock.assert_called_once_with(self.uuid,
bmc_address=self.bmc_address, bmc_address=self.bmc_address,
ironic=cli) ironic=cli)
self.node_info.ports.assert_called_once_with() self.node_info.ports.assert_called_once_with()
self.node_info.add_attribute.assert_called_once_with('mac', self.node_info.add_attribute.assert_called_once_with('mac',
self.macs) self.macs)
@ -79,71 +79,71 @@ class TestIntrospect(BaseTest):
self.node_info.acquire_lock.assert_called_once_with() self.node_info.acquire_lock.assert_called_once_with()
self.node_info.release_lock.assert_called_once_with() self.node_info.release_lock.assert_called_once_with()
def test_ok_ilo_and_drac(self, client_mock, add_mock, filters_mock): def test_ok_ilo_and_drac(self, client_mock, start_mock, filters_mock):
cli = self._prepare(client_mock) cli = self._prepare(client_mock)
add_mock.return_value = self.node_info start_mock.return_value = self.node_info
for name in ('ilo_address', 'drac_host'): for name in ('ilo_address', 'drac_host'):
self.node.driver_info = {name: self.bmc_address} self.node.driver_info = {name: self.bmc_address}
introspect.introspect(self.node.uuid) introspect.introspect(self.node.uuid)
add_mock.assert_called_with(self.uuid, start_mock.assert_called_with(self.uuid,
bmc_address=self.bmc_address, bmc_address=self.bmc_address,
ironic=cli) ironic=cli)
def test_power_failure(self, client_mock, add_mock, filters_mock): def test_power_failure(self, client_mock, start_mock, filters_mock):
cli = self._prepare(client_mock) cli = self._prepare(client_mock)
cli.node.set_boot_device.side_effect = exceptions.BadRequest() cli.node.set_boot_device.side_effect = exceptions.BadRequest()
cli.node.set_power_state.side_effect = exceptions.BadRequest() cli.node.set_power_state.side_effect = exceptions.BadRequest()
add_mock.return_value = self.node_info start_mock.return_value = self.node_info
introspect.introspect(self.node.uuid) introspect.introspect(self.node.uuid)
cli.node.get.assert_called_once_with(self.uuid) cli.node.get.assert_called_once_with(self.uuid)
add_mock.assert_called_once_with(self.uuid, start_mock.assert_called_once_with(self.uuid,
bmc_address=self.bmc_address, bmc_address=self.bmc_address,
ironic=cli) ironic=cli)
cli.node.set_boot_device.assert_called_once_with(self.uuid, cli.node.set_boot_device.assert_called_once_with(self.uuid,
'pxe', 'pxe',
persistent=False) persistent=False)
cli.node.set_power_state.assert_called_once_with(self.uuid, cli.node.set_power_state.assert_called_once_with(self.uuid,
'reboot') 'reboot')
add_mock.return_value.finished.assert_called_once_with( start_mock.return_value.finished.assert_called_once_with(
error=mock.ANY) error=mock.ANY)
self.node_info.acquire_lock.assert_called_once_with() self.node_info.acquire_lock.assert_called_once_with()
self.node_info.release_lock.assert_called_once_with() self.node_info.release_lock.assert_called_once_with()
def test_unexpected_error(self, client_mock, add_mock, filters_mock): def test_unexpected_error(self, client_mock, start_mock, filters_mock):
cli = self._prepare(client_mock) cli = self._prepare(client_mock)
add_mock.return_value = self.node_info start_mock.return_value = self.node_info
filters_mock.side_effect = RuntimeError() filters_mock.side_effect = RuntimeError()
introspect.introspect(self.node.uuid) introspect.introspect(self.node.uuid)
cli.node.get.assert_called_once_with(self.uuid) cli.node.get.assert_called_once_with(self.uuid)
add_mock.assert_called_once_with(self.uuid, start_mock.assert_called_once_with(self.uuid,
bmc_address=self.bmc_address, bmc_address=self.bmc_address,
ironic=cli) ironic=cli)
self.assertFalse(cli.node.set_boot_device.called) self.assertFalse(cli.node.set_boot_device.called)
add_mock.return_value.finished.assert_called_once_with( start_mock.return_value.finished.assert_called_once_with(
error=mock.ANY) error=mock.ANY)
self.node_info.acquire_lock.assert_called_once_with() self.node_info.acquire_lock.assert_called_once_with()
self.node_info.release_lock.assert_called_once_with() self.node_info.release_lock.assert_called_once_with()
def test_no_macs(self, client_mock, add_mock, filters_mock): def test_no_macs(self, client_mock, start_mock, filters_mock):
cli = self._prepare(client_mock) cli = self._prepare(client_mock)
self.node_info.ports.return_value = [] self.node_info.ports.return_value = []
add_mock.return_value = self.node_info start_mock.return_value = self.node_info
introspect.introspect(self.node.uuid) introspect.introspect(self.node.uuid)
self.node_info.ports.assert_called_once_with() self.node_info.ports.assert_called_once_with()
add_mock.assert_called_once_with(self.uuid, start_mock.assert_called_once_with(self.uuid,
bmc_address=self.bmc_address, bmc_address=self.bmc_address,
ironic=cli) ironic=cli)
self.assertFalse(self.node_info.add_attribute.called) self.assertFalse(self.node_info.add_attribute.called)
self.assertFalse(filters_mock.called) self.assertFalse(filters_mock.called)
cli.node.set_boot_device.assert_called_once_with(self.uuid, cli.node.set_boot_device.assert_called_once_with(self.uuid,
@ -152,10 +152,10 @@ class TestIntrospect(BaseTest):
cli.node.set_power_state.assert_called_once_with(self.uuid, cli.node.set_power_state.assert_called_once_with(self.uuid,
'reboot') 'reboot')
def test_no_lookup_attrs(self, client_mock, add_mock, filters_mock): def test_no_lookup_attrs(self, client_mock, start_mock, filters_mock):
cli = self._prepare(client_mock) cli = self._prepare(client_mock)
self.node_info.ports.return_value = [] self.node_info.ports.return_value = []
add_mock.return_value = self.node_info start_mock.return_value = self.node_info
self.node_info.attributes = {} self.node_info.attributes = {}
introspect.introspect(self.uuid) introspect.introspect(self.uuid)
@ -168,11 +168,12 @@ class TestIntrospect(BaseTest):
self.node_info.release_lock.assert_called_once_with() self.node_info.release_lock.assert_called_once_with()
def test_no_lookup_attrs_with_node_not_found_hook(self, client_mock, def test_no_lookup_attrs_with_node_not_found_hook(self, client_mock,
add_mock, filters_mock): start_mock,
filters_mock):
CONF.set_override('node_not_found_hook', 'example', 'processing') CONF.set_override('node_not_found_hook', 'example', 'processing')
cli = self._prepare(client_mock) cli = self._prepare(client_mock)
self.node_info.ports.return_value = [] self.node_info.ports.return_value = []
add_mock.return_value = self.node_info start_mock.return_value = self.node_info
self.node_info.attributes = {} self.node_info.attributes = {}
introspect.introspect(self.uuid) introspect.introspect(self.uuid)
@ -185,7 +186,7 @@ class TestIntrospect(BaseTest):
cli.node.set_power_state.assert_called_once_with(self.uuid, cli.node.set_power_state.assert_called_once_with(self.uuid,
'reboot') 'reboot')
def test_failed_to_get_node(self, client_mock, add_mock, filters_mock): def test_failed_to_get_node(self, client_mock, start_mock, filters_mock):
cli = client_mock.return_value cli = client_mock.return_value
cli.node.get.side_effect = exceptions.NotFound() cli.node.get.side_effect = exceptions.NotFound()
self.assertRaisesRegex(utils.Error, self.assertRaisesRegex(utils.Error,
@ -200,10 +201,10 @@ class TestIntrospect(BaseTest):
self.assertEqual(0, self.node_info.ports.call_count) self.assertEqual(0, self.node_info.ports.call_count)
self.assertEqual(0, filters_mock.call_count) self.assertEqual(0, filters_mock.call_count)
self.assertEqual(0, cli.node.set_power_state.call_count) self.assertEqual(0, cli.node.set_power_state.call_count)
self.assertFalse(add_mock.called) self.assertFalse(start_mock.called)
self.assertFalse(self.node_info.acquire_lock.called) self.assertFalse(self.node_info.acquire_lock.called)
def test_failed_to_validate_node(self, client_mock, add_mock, def test_failed_to_validate_node(self, client_mock, start_mock,
filters_mock): filters_mock):
cli = client_mock.return_value cli = client_mock.return_value
cli.node.get.return_value = self.node cli.node.get.return_value = self.node
@ -219,10 +220,11 @@ class TestIntrospect(BaseTest):
self.assertEqual(0, self.node_info.ports.call_count) self.assertEqual(0, self.node_info.ports.call_count)
self.assertEqual(0, filters_mock.call_count) self.assertEqual(0, filters_mock.call_count)
self.assertEqual(0, cli.node.set_power_state.call_count) self.assertEqual(0, cli.node.set_power_state.call_count)
self.assertFalse(add_mock.called) self.assertFalse(start_mock.called)
self.assertFalse(self.node_info.acquire_lock.called) self.assertFalse(self.node_info.acquire_lock.called)
def test_wrong_provision_state(self, client_mock, add_mock, filters_mock): def test_wrong_provision_state(self, client_mock, start_mock,
filters_mock):
self.node.provision_state = 'active' self.node.provision_state = 'active'
cli = client_mock.return_value cli = client_mock.return_value
cli.node.get.return_value = self.node cli.node.get.return_value = self.node
@ -234,20 +236,20 @@ class TestIntrospect(BaseTest):
self.assertEqual(0, self.node_info.ports.call_count) self.assertEqual(0, self.node_info.ports.call_count)
self.assertEqual(0, filters_mock.call_count) self.assertEqual(0, filters_mock.call_count)
self.assertEqual(0, cli.node.set_power_state.call_count) self.assertEqual(0, cli.node.set_power_state.call_count)
self.assertFalse(add_mock.called) self.assertFalse(start_mock.called)
self.assertFalse(self.node_info.acquire_lock.called) self.assertFalse(self.node_info.acquire_lock.called)
@mock.patch.object(time, 'sleep') @mock.patch.object(time, 'sleep')
@mock.patch.object(time, 'time') @mock.patch.object(time, 'time')
def test_sleep_no_pxe_ssh(self, time_mock, sleep_mock, client_mock, def test_sleep_no_pxe_ssh(self, time_mock, sleep_mock, client_mock,
add_mock, filters_mock): start_mock, filters_mock):
self.node.driver = 'pxe_ipmitool' self.node.driver = 'pxe_ipmitool'
time_mock.return_value = 42 time_mock.return_value = 42
introspect._LAST_INTROSPECTION_TIME = 40 introspect._LAST_INTROSPECTION_TIME = 40
CONF.set_override('introspection_delay', 10) CONF.set_override('introspection_delay', 10)
cli = self._prepare(client_mock) cli = self._prepare(client_mock)
add_mock.return_value = self.node_info start_mock.return_value = self.node_info
introspect.introspect(self.uuid) introspect.introspect(self.uuid)
@ -263,14 +265,14 @@ class TestIntrospect(BaseTest):
@mock.patch.object(time, 'sleep') @mock.patch.object(time, 'sleep')
@mock.patch.object(time, 'time') @mock.patch.object(time, 'time')
def test_sleep_with_pxe_ssh(self, time_mock, sleep_mock, client_mock, def test_sleep_with_pxe_ssh(self, time_mock, sleep_mock, client_mock,
add_mock, filters_mock): start_mock, filters_mock):
self.node.driver = 'pxe_ssh' self.node.driver = 'pxe_ssh'
time_mock.return_value = 42 time_mock.return_value = 42
introspect._LAST_INTROSPECTION_TIME = 40 introspect._LAST_INTROSPECTION_TIME = 40
CONF.set_override('introspection_delay', 10) CONF.set_override('introspection_delay', 10)
cli = self._prepare(client_mock) cli = self._prepare(client_mock)
add_mock.return_value = self.node_info start_mock.return_value = self.node_info
introspect.introspect(self.uuid) introspect.introspect(self.uuid)
@ -286,7 +288,7 @@ class TestIntrospect(BaseTest):
@mock.patch.object(time, 'sleep') @mock.patch.object(time, 'sleep')
@mock.patch.object(time, 'time') @mock.patch.object(time, 'time')
def test_sleep_not_needed_with_pxe_ssh(self, time_mock, sleep_mock, def test_sleep_not_needed_with_pxe_ssh(self, time_mock, sleep_mock,
client_mock, add_mock, client_mock, start_mock,
filters_mock): filters_mock):
self.node.driver = 'agent_ssh' self.node.driver = 'agent_ssh'
time_mock.return_value = 100 time_mock.return_value = 100
@ -294,7 +296,7 @@ class TestIntrospect(BaseTest):
CONF.set_override('introspection_delay', 10) CONF.set_override('introspection_delay', 10)
cli = self._prepare(client_mock) cli = self._prepare(client_mock)
add_mock.return_value = self.node_info start_mock.return_value = self.node_info
introspect.introspect(self.uuid) introspect.introspect(self.uuid)
@ -310,7 +312,7 @@ class TestIntrospect(BaseTest):
@mock.patch.object(time, 'sleep') @mock.patch.object(time, 'sleep')
@mock.patch.object(time, 'time') @mock.patch.object(time, 'time')
def test_sleep_with_custom_driver(self, time_mock, sleep_mock, client_mock, def test_sleep_with_custom_driver(self, time_mock, sleep_mock, client_mock,
add_mock, filters_mock): start_mock, filters_mock):
self.node.driver = 'foobar' self.node.driver = 'foobar'
time_mock.return_value = 42 time_mock.return_value = 42
introspect._LAST_INTROSPECTION_TIME = 40 introspect._LAST_INTROSPECTION_TIME = 40
@ -318,7 +320,7 @@ class TestIntrospect(BaseTest):
CONF.set_override('introspection_delay_drivers', 'fo{1,2}b.r') CONF.set_override('introspection_delay_drivers', 'fo{1,2}b.r')
cli = self._prepare(client_mock) cli = self._prepare(client_mock)
add_mock.return_value = self.node_info start_mock.return_value = self.node_info
introspect.introspect(self.uuid) introspect.introspect(self.uuid)
@ -333,7 +335,7 @@ class TestIntrospect(BaseTest):
@mock.patch.object(firewall, 'update_filters', autospec=True) @mock.patch.object(firewall, 'update_filters', autospec=True)
@mock.patch.object(node_cache, 'add_node', autospec=True) @mock.patch.object(node_cache, 'start_introspection', autospec=True)
@mock.patch.object(ir_utils, 'get_client', autospec=True) @mock.patch.object(ir_utils, 'get_client', autospec=True)
class TestSetIpmiCredentials(BaseTest): class TestSetIpmiCredentials(BaseTest):
def setUp(self): def setUp(self):
@ -344,23 +346,23 @@ class TestSetIpmiCredentials(BaseTest):
self.node_info.options['new_ipmi_credentials'] = self.new_creds self.node_info.options['new_ipmi_credentials'] = self.new_creds
self.node.provision_state = 'enroll' self.node.provision_state = 'enroll'
def test_ok(self, client_mock, add_mock, filters_mock): def test_ok(self, client_mock, start_mock, filters_mock):
cli = self._prepare(client_mock) cli = self._prepare(client_mock)
add_mock.return_value = self.node_info start_mock.return_value = self.node_info
introspect.introspect(self.uuid, new_ipmi_credentials=self.new_creds) introspect.introspect(self.uuid, new_ipmi_credentials=self.new_creds)
add_mock.assert_called_once_with(self.uuid, start_mock.assert_called_once_with(self.uuid,
bmc_address=self.bmc_address, bmc_address=self.bmc_address,
ironic=cli) ironic=cli)
filters_mock.assert_called_with(cli) filters_mock.assert_called_with(cli)
self.assertFalse(cli.node.validate.called) self.assertFalse(cli.node.validate.called)
self.assertFalse(cli.node.set_boot_device.called) self.assertFalse(cli.node.set_boot_device.called)
self.assertFalse(cli.node.set_power_state.called) self.assertFalse(cli.node.set_power_state.called)
add_mock.return_value.set_option.assert_called_once_with( start_mock.return_value.set_option.assert_called_once_with(
'new_ipmi_credentials', self.new_creds) 'new_ipmi_credentials', self.new_creds)
def test_disabled(self, client_mock, add_mock, filters_mock): def test_disabled(self, client_mock, start_mock, filters_mock):
CONF.set_override('enable_setting_ipmi_credentials', False, CONF.set_override('enable_setting_ipmi_credentials', False,
'processing') 'processing')
self._prepare(client_mock) self._prepare(client_mock)
@ -369,45 +371,45 @@ class TestSetIpmiCredentials(BaseTest):
introspect.introspect, self.uuid, introspect.introspect, self.uuid,
new_ipmi_credentials=self.new_creds) new_ipmi_credentials=self.new_creds)
def test_no_username(self, client_mock, add_mock, filters_mock): def test_no_username(self, client_mock, start_mock, filters_mock):
self._prepare(client_mock) self._prepare(client_mock)
self.assertRaises(utils.Error, introspect.introspect, self.uuid, self.assertRaises(utils.Error, introspect.introspect, self.uuid,
new_ipmi_credentials=(None, 'password')) new_ipmi_credentials=(None, 'password'))
def test_default_username(self, client_mock, add_mock, filters_mock): def test_default_username(self, client_mock, start_mock, filters_mock):
cli = self._prepare(client_mock) cli = self._prepare(client_mock)
add_mock.return_value = self.node_info start_mock.return_value = self.node_info
self.node.driver_info['ipmi_username'] = self.new_creds[0] self.node.driver_info['ipmi_username'] = self.new_creds[0]
introspect.introspect(self.uuid, introspect.introspect(self.uuid,
new_ipmi_credentials=(None, self.new_creds[1])) new_ipmi_credentials=(None, self.new_creds[1]))
add_mock.assert_called_once_with(self.uuid, start_mock.assert_called_once_with(self.uuid,
bmc_address=self.bmc_address, bmc_address=self.bmc_address,
ironic=cli) ironic=cli)
filters_mock.assert_called_with(cli) filters_mock.assert_called_with(cli)
self.assertFalse(cli.node.validate.called) self.assertFalse(cli.node.validate.called)
self.assertFalse(cli.node.set_boot_device.called) self.assertFalse(cli.node.set_boot_device.called)
self.assertFalse(cli.node.set_power_state.called) self.assertFalse(cli.node.set_power_state.called)
add_mock.return_value.set_option.assert_called_once_with( start_mock.return_value.set_option.assert_called_once_with(
'new_ipmi_credentials', self.new_creds) 'new_ipmi_credentials', self.new_creds)
def test_wrong_letters(self, client_mock, add_mock, filters_mock): def test_wrong_letters(self, client_mock, start_mock, filters_mock):
self.new_creds = ('user', 'p ssw@rd') self.new_creds = ('user', 'p ssw@rd')
self._prepare(client_mock) self._prepare(client_mock)
self.assertRaises(utils.Error, introspect.introspect, self.uuid, self.assertRaises(utils.Error, introspect.introspect, self.uuid,
new_ipmi_credentials=self.new_creds) new_ipmi_credentials=self.new_creds)
def test_too_long(self, client_mock, add_mock, filters_mock): def test_too_long(self, client_mock, start_mock, filters_mock):
self.new_creds = ('user', 'password' * 100) self.new_creds = ('user', 'password' * 100)
self._prepare(client_mock) self._prepare(client_mock)
self.assertRaises(utils.Error, introspect.introspect, self.uuid, self.assertRaises(utils.Error, introspect.introspect, self.uuid,
new_ipmi_credentials=self.new_creds) new_ipmi_credentials=self.new_creds)
def test_wrong_state(self, client_mock, add_mock, filters_mock): def test_wrong_state(self, client_mock, start_mock, filters_mock):
self.node.provision_state = 'manageable' self.node.provision_state = 'manageable'
self._prepare(client_mock) self._prepare(client_mock)

View File

@ -34,11 +34,13 @@ from oslo_db.sqlalchemy import test_base
from oslo_db.sqlalchemy import test_migrations from oslo_db.sqlalchemy import test_migrations
from oslo_db.sqlalchemy import utils as db_utils from oslo_db.sqlalchemy import utils as db_utils
from oslo_log import log as logging from oslo_log import log as logging
from oslo_utils import uuidutils
import sqlalchemy import sqlalchemy
from ironic_inspector.common.i18n import _LE from ironic_inspector.common.i18n import _LE
from ironic_inspector import db from ironic_inspector import db
from ironic_inspector import dbsync from ironic_inspector import dbsync
from ironic_inspector import introspection_state as istate
from ironic_inspector.test import base from ironic_inspector.test import base
CONF = cfg.CONF CONF = cfg.CONF
@ -314,6 +316,54 @@ class MigrationCheckersMixin(object):
rule_conditions.c.id == 2).execute().first() rule_conditions.c.id == 2).execute().first()
self.assertTrue(conds['invert']) self.assertTrue(conds['invert'])
def _pre_upgrade_d2e48801c8ef(self, engine):
ok_node_id = uuidutils.generate_uuid()
err_node_id = uuidutils.generate_uuid()
data = [
{
'uuid': ok_node_id,
'error': None,
'finished_at': 0.0,
'started_at': 0.0
},
{
'uuid': err_node_id,
'error': 'Oops!',
'finished_at': 0.0,
'started_at': 0.0
}
]
nodes = db_utils.get_table(engine, 'nodes')
for node in data:
nodes.insert().execute(node)
return {'err_node_id': err_node_id, 'ok_node_id': ok_node_id}
def _check_d2e48801c8ef(self, engine, data):
nodes = db_utils.get_table(engine, 'nodes')
col_names = [column.name for column in nodes.c]
self.assertIn('uuid', col_names)
self.assertIsInstance(nodes.c.uuid.type, sqlalchemy.types.String)
self.assertIn('version_id', col_names)
self.assertIsInstance(nodes.c.version_id.type, sqlalchemy.types.String)
self.assertIn('state', col_names)
self.assertIsInstance(nodes.c.state.type, sqlalchemy.types.String)
self.assertIn('started_at', col_names)
self.assertIsInstance(nodes.c.started_at.type, sqlalchemy.types.Float)
self.assertIn('finished_at', col_names)
self.assertIsInstance(nodes.c.started_at.type, sqlalchemy.types.Float)
self.assertIn('error', col_names)
self.assertIsInstance(nodes.c.error.type, sqlalchemy.types.Text)
ok_node_id = data['ok_node_id']
err_node_id = data['err_node_id']
# assert the ok node is in the (default) finished state
ok_node = nodes.select(nodes.c.uuid == ok_node_id).execute().first()
self.assertEqual(istate.States.finished, ok_node['state'])
# assert err node state is error after the migration
# even though the default state is finished
err_node = nodes.select(nodes.c.uuid == err_node_id).execute().first()
self.assertEqual(istate.States.error, err_node['state'])
def test_upgrade_and_version(self): def test_upgrade_and_version(self):
with patch_with_engine(self.engine): with patch_with_engine(self.engine):
self.migration_ext.upgrade('head') self.migration_ext.upgrade('head')

View File

@ -13,15 +13,19 @@
import copy import copy
import json import json
import six
import time import time
import unittest import unittest
import automaton
import mock import mock
from oslo_config import cfg from oslo_config import cfg
import oslo_db
from oslo_utils import uuidutils from oslo_utils import uuidutils
from ironic_inspector.common import ironic as ir_utils from ironic_inspector.common import ironic as ir_utils
from ironic_inspector import db from ironic_inspector import db
from ironic_inspector import introspection_state as istate
from ironic_inspector import node_cache from ironic_inspector import node_cache
from ironic_inspector.test import base as test_base from ironic_inspector.test import base as test_base
from ironic_inspector import utils from ironic_inspector import utils
@ -35,14 +39,18 @@ class TestNodeCache(test_base.NodeTest):
uuid2 = uuidutils.generate_uuid() uuid2 = uuidutils.generate_uuid()
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Node(uuid=self.node.uuid).save(session) db.Node(uuid=self.node.uuid,
db.Node(uuid=uuid2).save(session) state=istate.States.starting).save(session)
db.Node(uuid=uuid2,
state=istate.States.starting).save(session)
db.Attribute(name='mac', db.Attribute(name='mac',
value='11:22:11:22:11:22', value='11:22:11:22:11:22',
uuid=self.uuid).save(session) uuid=self.uuid).save(session)
node = node_cache.add_node(self.node.uuid, mac=self.macs, node = node_cache.add_node(self.node.uuid,
bmc_address='1.2.3.4', foo=None) istate.States.starting,
mac=self.macs, bmc_address='1.2.3.4',
foo=None)
self.assertEqual(self.uuid, node.uuid) self.assertEqual(self.uuid, node.uuid)
self.assertTrue(time.time() - 60 < node.started_at < time.time() + 60) self.assertTrue(time.time() - 60 < node.started_at < time.time() + 60)
self.assertFalse(node._locked) self.assertFalse(node._locked)
@ -64,7 +72,8 @@ class TestNodeCache(test_base.NodeTest):
def test__delete_node(self): def test__delete_node(self):
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Node(uuid=self.node.uuid).save(session) db.Node(uuid=self.node.uuid,
state=istate.States.finished).save(session)
db.Attribute(name='mac', value='11:22:11:22:11:22', db.Attribute(name='mac', value='11:22:11:22:11:22',
uuid=self.uuid).save(session) uuid=self.uuid).save(session)
data = {'s': 'value', 'b': True, 'i': 42} data = {'s': 'value', 'b': True, 'i': 42}
@ -104,17 +113,21 @@ class TestNodeCache(test_base.NodeTest):
session = db.get_session() session = db.get_session()
uuid = uuidutils.generate_uuid() uuid = uuidutils.generate_uuid()
with session.begin(): with session.begin():
db.Node(uuid=uuid).save(session) db.Node(uuid=uuid,
state=istate.States.starting).save(session)
db.Attribute(name='mac', value='11:22:11:22:11:22', db.Attribute(name='mac', value='11:22:11:22:11:22',
uuid=uuid).save(session) uuid=uuid).save(session)
self.assertRaises(utils.Error, self.assertRaises(utils.Error,
node_cache.add_node, node_cache.add_node,
self.node.uuid, mac=['11:22:11:22:11:22']) self.node.uuid,
istate.States.starting,
mac=['11:22:11:22:11:22'])
def test_active_macs(self): def test_active_macs(self):
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Node(uuid=self.node.uuid).save(session) db.Node(uuid=self.node.uuid,
state=istate.States.starting).save(session)
values = [('mac', '11:22:11:22:11:22', self.uuid), values = [('mac', '11:22:11:22:11:22', self.uuid),
('mac', '22:11:22:11:22:11', self.uuid)] ('mac', '22:11:22:11:22:11', self.uuid)]
for value in values: for value in values:
@ -127,8 +140,10 @@ class TestNodeCache(test_base.NodeTest):
session = db.get_session() session = db.get_session()
uuid2 = uuidutils.generate_uuid() uuid2 = uuidutils.generate_uuid()
with session.begin(): with session.begin():
db.Node(uuid=self.node.uuid).save(session) db.Node(uuid=self.node.uuid,
db.Node(uuid=uuid2).save(session) state=istate.States.starting).save(session)
db.Node(uuid=uuid2,
state=istate.States.starting).save(session)
node_uuid_list = node_cache._list_node_uuids() node_uuid_list = node_cache._list_node_uuids()
self.assertEqual({self.uuid, uuid2}, node_uuid_list) self.assertEqual({self.uuid, uuid2}, node_uuid_list)
@ -136,7 +151,8 @@ class TestNodeCache(test_base.NodeTest):
def test_add_attribute(self): def test_add_attribute(self):
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Node(uuid=self.node.uuid).save(session) db.Node(uuid=self.node.uuid,
state=istate.States.starting).save(session)
node_info = node_cache.NodeInfo(uuid=self.uuid, started_at=42) node_info = node_cache.NodeInfo(uuid=self.uuid, started_at=42)
node_info.add_attribute('key', 'value') node_info.add_attribute('key', 'value')
res = db.model_query(db.Attribute.name, res = db.model_query(db.Attribute.name,
@ -153,6 +169,7 @@ class TestNodeCache(test_base.NodeTest):
def test_attributes(self): def test_attributes(self):
node_info = node_cache.add_node(self.uuid, node_info = node_cache.add_node(self.uuid,
istate.States.starting,
bmc_address='1.2.3.4', bmc_address='1.2.3.4',
mac=self.macs) mac=self.macs)
self.assertEqual({'bmc_address': ['1.2.3.4'], self.assertEqual({'bmc_address': ['1.2.3.4'],
@ -177,6 +194,7 @@ class TestNodeCacheFind(test_base.NodeTest):
super(TestNodeCacheFind, self).setUp() super(TestNodeCacheFind, self).setUp()
self.macs2 = ['00:00:00:00:00:00'] self.macs2 = ['00:00:00:00:00:00']
node_cache.add_node(self.uuid, node_cache.add_node(self.uuid,
istate.States.starting,
bmc_address='1.2.3.4', bmc_address='1.2.3.4',
mac=self.macs) mac=self.macs)
@ -202,7 +220,9 @@ class TestNodeCacheFind(test_base.NodeTest):
'66:66:44:33:22:11']) '66:66:44:33:22:11'])
def test_macs_multiple_found(self): def test_macs_multiple_found(self):
node_cache.add_node('uuid2', mac=self.macs2) node_cache.add_node('uuid2',
istate.States.starting,
mac=self.macs2)
self.assertRaises(utils.Error, node_cache.find_node, self.assertRaises(utils.Error, node_cache.find_node,
mac=[self.macs[0], self.macs2[0]]) mac=[self.macs[0], self.macs2[0]])
@ -236,7 +256,9 @@ class TestNodeCacheCleanUp(test_base.NodeTest):
self.started_at = 100.0 self.started_at = 100.0
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Node(uuid=self.uuid, started_at=self.started_at).save( db.Node(uuid=self.uuid,
state=istate.States.waiting,
started_at=self.started_at).save(
session) session)
for v in self.macs: for v in self.macs:
db.Attribute(name='mac', value=v, uuid=self.uuid).save( db.Attribute(name='mac', value=v, uuid=self.uuid).save(
@ -279,17 +301,22 @@ class TestNodeCacheCleanUp(test_base.NodeTest):
time_mock.return_value = self.started_at time_mock.return_value = self.started_at
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Node(uuid=self.uuid + '1', started_at=self.started_at, db.Node(uuid=self.uuid + '1',
state=istate.States.waiting,
started_at=self.started_at,
finished_at=self.started_at + 60).save(session) finished_at=self.started_at + 60).save(session)
CONF.set_override('timeout', 99) CONF.set_override('timeout', 99)
time_mock.return_value = (self.started_at + 100) time_mock.return_value = (self.started_at + 100)
self.assertEqual([self.uuid], node_cache.clean_up()) self.assertEqual([self.uuid], node_cache.clean_up())
res = [(row.finished_at, row.error) for row in res = [(row.state, row.finished_at, row.error) for row in
db.model_query(db.Node).all()] db.model_query(db.Node).all()]
self.assertEqual([(self.started_at + 100, 'Introspection timeout'), self.assertEqual([(istate.States.error,
(self.started_at + 60, None)], self.started_at + 100,
'Introspection timeout'),
(istate.States.waiting,
self.started_at + 60, None)],
res) res)
self.assertEqual([], db.model_query(db.Attribute).all()) self.assertEqual([], db.model_query(db.Attribute).all())
self.assertEqual([], db.model_query(db.Option).all()) self.assertEqual([], db.model_query(db.Option).all())
@ -313,7 +340,9 @@ class TestNodeCacheGetNode(test_base.NodeTest):
started_at = time.time() - 42 started_at = time.time() - 42
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Node(uuid=self.uuid, started_at=started_at).save(session) db.Node(uuid=self.uuid,
state=istate.States.starting,
started_at=started_at).save(session)
info = node_cache.get_node(self.uuid) info = node_cache.get_node(self.uuid)
self.assertEqual(self.uuid, info.uuid) self.assertEqual(self.uuid, info.uuid)
@ -326,7 +355,9 @@ class TestNodeCacheGetNode(test_base.NodeTest):
started_at = time.time() - 42 started_at = time.time() - 42
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Node(uuid=self.uuid, started_at=started_at).save(session) db.Node(uuid=self.uuid,
state=istate.States.starting,
started_at=started_at).save(session)
info = node_cache.get_node(self.uuid, locked=True) info = node_cache.get_node(self.uuid, locked=True)
self.assertEqual(self.uuid, info.uuid) self.assertEqual(self.uuid, info.uuid)
@ -343,7 +374,9 @@ class TestNodeCacheGetNode(test_base.NodeTest):
started_at = time.time() - 42 started_at = time.time() - 42
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Node(uuid=self.uuid, started_at=started_at).save(session) db.Node(uuid=self.uuid,
state=istate.States.starting,
started_at=started_at).save(session)
ironic = mock.Mock() ironic = mock.Mock()
ironic.node.get.return_value = self.node ironic.node.get.return_value = self.node
@ -362,6 +395,7 @@ class TestNodeInfoFinished(test_base.NodeTest):
def setUp(self): def setUp(self):
super(TestNodeInfoFinished, self).setUp() super(TestNodeInfoFinished, self).setUp()
node_cache.add_node(self.uuid, node_cache.add_node(self.uuid,
istate.States.processing,
bmc_address='1.2.3.4', bmc_address='1.2.3.4',
mac=self.macs) mac=self.macs)
self.node_info = node_cache.NodeInfo(uuid=self.uuid, started_at=3.14) self.node_info = node_cache.NodeInfo(uuid=self.uuid, started_at=3.14)
@ -403,6 +437,7 @@ class TestNodeInfoOptions(test_base.NodeTest):
def setUp(self): def setUp(self):
super(TestNodeInfoOptions, self).setUp() super(TestNodeInfoOptions, self).setUp()
node_cache.add_node(self.uuid, node_cache.add_node(self.uuid,
istate.States.starting,
bmc_address='1.2.3.4', bmc_address='1.2.3.4',
mac=self.macs) mac=self.macs)
self.node_info = node_cache.NodeInfo(uuid=self.uuid, started_at=3.14) self.node_info = node_cache.NodeInfo(uuid=self.uuid, started_at=3.14)
@ -700,8 +735,10 @@ class TestNodeCreate(test_base.NodeTest):
node_cache.create_node('fake') node_cache.create_node('fake')
self.mock_client.node.create.assert_called_once_with(driver='fake') self.mock_client.node.create.assert_called_once_with(driver='fake')
mock_add_node.assert_called_once_with(self.node.uuid, mock_add_node.assert_called_once_with(
ironic=self.mock_client) self.node.uuid,
istate.States.enrolling,
ironic=self.mock_client)
def test_create_with_args(self, mock_get_client, mock_add_node): def test_create_with_args(self, mock_get_client, mock_add_node):
mock_get_client.return_value = self.mock_client mock_get_client.return_value = self.mock_client
@ -712,8 +749,10 @@ class TestNodeCreate(test_base.NodeTest):
self.assertFalse(mock_get_client.called) self.assertFalse(mock_get_client.called)
self.mock_client.node.create.assert_called_once_with( self.mock_client.node.create.assert_called_once_with(
driver='agent_ipmitool') driver='agent_ipmitool')
mock_add_node.assert_called_once_with(self.node.uuid, mock_add_node.assert_called_once_with(
ironic=self.mock_client) self.node.uuid,
istate.States.enrolling,
ironic=self.mock_client)
def test_create_client_error(self, mock_get_client, mock_add_node): def test_create_client_error(self, mock_get_client, mock_add_node):
mock_get_client.return_value = self.mock_client mock_get_client.return_value = self.mock_client
@ -758,3 +797,311 @@ class TestNodeCacheListNode(test_base.NodeTest):
def test_list_node_wrong_marker(self): def test_list_node_wrong_marker(self):
self.assertRaises(utils.Error, node_cache.get_node_list, self.assertRaises(utils.Error, node_cache.get_node_list,
marker='foo-bar') marker='foo-bar')
class TestNodeInfoVersionId(test_base.NodeStateTest):
def test_get(self):
self.node_info._version_id = None
self.assertEqual(self.db_node.version_id, self.node_info.version_id)
def test_get_missing_uuid(self):
self.node_info.uuid = 'foo'
self.node_info._version_id = None
def func():
return self.node_info.version_id
six.assertRaisesRegex(self, utils.NotFoundInCacheError, '.*', func)
def test_set(self):
with db.ensure_transaction() as session:
self.node_info._set_version_id(uuidutils.generate_uuid(),
session)
row = db.model_query(db.Node).get(self.node_info.uuid)
self.assertEqual(self.node_info.version_id, row.version_id)
def test_set_race(self):
with db.ensure_transaction() as session:
row = db.model_query(db.Node, session=session).get(
self.node_info.uuid)
row.update({'version_id': uuidutils.generate_uuid()})
row.save(session)
six.assertRaisesRegex(self, utils.NodeStateRaceCondition,
'Node state mismatch', self.node_info._set_state,
istate.States.finished)
class TestNodeInfoState(test_base.NodeStateTest):
def test_get(self):
self.node_info._state = None
self.assertEqual(self.db_node.state, self.node_info.state)
def test_set(self):
self.node_info._set_state(istate.States.finished)
row = db.model_query(db.Node).get(self.node_info.uuid)
self.assertEqual(self.node_info.state, row.state)
def test_set_invalid_state(self):
six.assertRaisesRegex(self, oslo_db.exception.DBError,
'CHECK constraint failed',
self.node_info._set_state, 'foo')
def test_commit(self):
current_time = time.time()
self.node_info.started_at = self.node_info.finished_at = current_time
self.node_info.error = "Boo!"
self.node_info.commit()
row = db.model_query(db.Node).get(self.node_info.uuid)
self.assertEqual(self.node_info.started_at, row.started_at)
self.assertEqual(self.node_info.finished_at, row.finished_at)
self.assertEqual(self.node_info.error, row.error)
class TestNodeInfoStateFsm(test_base.NodeStateTest):
def test__get_fsm(self):
self.node_info._fsm = None
fsm = self.node_info._get_fsm()
self.assertEqual(self.node_info.state, fsm.current_state)
def test__get_fsm_invalid_state(self):
self.node_info._fsm = None
self.node_info._state = 'foo'
six.assertRaisesRegex(self, automaton.exceptions.NotFound,
'.*undefined state.*',
self.node_info._get_fsm)
def test__fsm_ctx_set_state(self):
with self.node_info._fsm_ctx() as fsm:
fsm.process_event(istate.Events.wait)
self.assertEqual(self.node_info.state, istate.States.starting)
self.assertEqual(self.node_info.state, istate.States.waiting)
def test__fsm_ctx_set_same_state(self):
version_id = self.node_info.version_id
with self.node_info._fsm_ctx() as fsm:
fsm.initialize(self.node_info.state)
self.assertEqual(version_id, self.node_info.version_id)
def test__fsm_ctx_illegal_event(self):
with self.node_info._fsm_ctx() as fsm:
six.assertRaisesRegex(self, automaton.exceptions.NotFound,
'no defined transition', fsm.process_event,
istate.Events.finish)
self.assertEqual(self.node_info.state, istate.States.starting)
def test__fsm_ctx_generic_exception(self):
class CustomException(Exception):
pass
def func(fsm):
fsm.process_event(istate.Events.wait)
raise CustomException('Oops')
with self.node_info._fsm_ctx() as fsm:
self.assertRaises(CustomException, func, fsm)
self.assertEqual(self.node_info.state, istate.States.waiting)
def test_fsm_event(self):
self.node_info.fsm_event(istate.Events.wait)
self.assertEqual(self.node_info.state, istate.States.waiting)
def test_fsm_illegal_event(self):
six.assertRaisesRegex(self, utils.NodeStateInvalidEvent,
'no defined transition',
self.node_info.fsm_event, istate.Events.finish)
self.assertEqual(self.node_info.state, istate.States.starting)
def test_fsm_illegal_strict_event(self):
six.assertRaisesRegex(self, utils.NodeStateInvalidEvent,
'no defined transition',
self.node_info.fsm_event,
istate.Events.finish, strict=True)
self.assertIn('no defined transition', self.node_info.error)
self.assertEqual(self.node_info.state, istate.States.error)
class TestFsmEvent(test_base.NodeStateTest):
def test_event_before(self):
@node_cache.fsm_event_before(istate.Events.wait)
def function(node_info):
self.assertEqual(node_info.state, istate.States.waiting)
node_info.fsm_event(istate.Events.process)
function(self.node_info)
self.assertEqual(self.node_info.state, istate.States.processing)
def test_event_after(self):
@node_cache.fsm_event_after(istate.Events.process)
def function(node_info):
node_info.fsm_event(istate.Events.wait)
self.assertEqual(node_info.state, istate.States.waiting)
function(self.node_info)
self.assertEqual(self.node_info.state, istate.States.processing)
@mock.patch.object(node_cache, 'LOG', autospec=True)
def test_triggers_fsm_error_transition_no_errors(self, log_mock):
class CustomException(Exception):
pass
@node_cache.triggers_fsm_error_transition(no_errors=(CustomException,))
def function(node_info):
self.assertEqual(node_info.state, istate.States.starting)
raise CustomException('Oops')
function(self.node_info)
log_msg = ('Not processing error event for the exception: '
'%(exc)s raised by %(func)s')
log_mock.debug.assert_called_with(log_msg, mock.ANY,
node_info=mock.ANY)
self.assertEqual(self.node_info.state, istate.States.starting)
def test_triggers_fsm_error_transition_no_errors_empty(self):
class CustomException(Exception):
pass
@node_cache.triggers_fsm_error_transition(no_errors=())
def function(node_info):
self.assertEqual(node_info.state, istate.States.starting)
raise CustomException('Oops!')
# assert an error event was performed
self.assertRaises(CustomException, function, self.node_info)
self.assertEqual(self.node_info.state, istate.States.error)
def test_triggers_fsm_error_transition_no_errors_with_error(self):
class CustomException(Exception):
pass
@node_cache.triggers_fsm_error_transition(errors=(CustomException,))
def function(node_info):
self.assertEqual(node_info.state, istate.States.starting)
raise CustomException('Oops')
# assert a generic error triggers an error event
self.assertRaises(CustomException, function, self.node_info)
self.assertEqual(self.node_info.state, istate.States.error)
def test_triggers_fsm_error_transition_erros_masked(self):
class CustomException(Exception):
pass
@node_cache.triggers_fsm_error_transition(errors=())
def function(node_info):
self.assertEqual(node_info.state, istate.States.starting)
raise CustomException('Oops')
# assert no error event was triggered
self.assertRaises(CustomException, function, self.node_info)
self.assertEqual(self.node_info.state, istate.States.starting)
def test_unlock(self):
@node_cache.release_lock
def func(node_info):
self.assertTrue(node_info._locked)
self.node_info.acquire_lock(blocking=True)
with mock.patch.object(self.node_info, 'release_lock',
autospec=True) as release_lock_mock:
func(self.node_info)
release_lock_mock.assert_called_once_with()
def test_unlock_unlocked(self):
@node_cache.release_lock
def func(node_info):
self.assertFalse(node_info._locked)
self.node_info.release_lock()
with mock.patch.object(self.node_info, 'release_lock',
autospec=True) as release_lock_mock:
func(self.node_info)
self.assertEqual(0, release_lock_mock.call_count)
@mock.patch.object(node_cache, 'triggers_fsm_error_transition',
autospec=True)
@mock.patch.object(node_cache, 'fsm_event_after', autospec=True)
def test_fsm_transition(self, fsm_event_after_mock, trigger_mock):
@node_cache.fsm_transition(istate.Events.finish)
def func():
pass
fsm_event_after_mock.assert_called_once_with(istate.Events.finish)
trigger_mock.assert_called_once_with()
@mock.patch.object(node_cache, 'triggers_fsm_error_transition',
autospec=True)
@mock.patch.object(node_cache, 'fsm_event_before', autospec=True)
def test_nonreentrant_fsm_transition(self, fsm_event_before_mock,
trigger_mock):
@node_cache.fsm_transition(istate.Events.abort, reentrant=False)
def func():
pass
fsm_event_before_mock.assert_called_once_with(istate.Events.abort,
strict=True)
trigger_mock.assert_called_once_with()
@mock.patch.object(node_cache, 'add_node', autospec=True)
@mock.patch.object(node_cache, 'NodeInfo', autospec=True)
class TestStartIntrospection(test_base.NodeTest):
def prepare_mocks(fn):
@six.wraps(fn)
def inner(self, NodeMock, *args):
method_mock = mock.Mock()
NodeMock.return_value = self.node_info
self.node_info.fsm_event = method_mock
fn(self, method_mock, *args)
method_mock.assert_called_once_with(istate.Events.start)
return inner
@prepare_mocks
def test_node_in_db_ok_state(self, fsm_event_mock, add_node_mock):
def side_effect(*args):
self.node_info._state = 'foo'
fsm_event_mock.side_effect = side_effect
node_cache.start_introspection(self.node.uuid)
add_node_mock.assert_called_once_with(self.node_info.uuid, 'foo')
@prepare_mocks
def test_node_in_db_invalid_state(self, fsm_event_mock, add_node_mock):
fsm_event_mock.side_effect = utils.NodeStateInvalidEvent('Oops!')
six.assertRaisesRegex(self, utils.NodeStateInvalidEvent, 'Oops!',
node_cache.start_introspection,
self.node_info.uuid)
self.assertFalse(add_node_mock.called)
@prepare_mocks
def test_node_in_db_race_condition(self, fsm_event_mock, add_node_mock):
fsm_event_mock.side_effect = utils.NodeStateRaceCondition()
six.assertRaisesRegex(self, utils.NodeStateRaceCondition, '.*',
node_cache.start_introspection,
self.node_info.uuid)
self.assertFalse(add_node_mock.called)
@prepare_mocks
def test_error_fsm_event(self, fsm_event_mock, add_node_mock):
fsm_event_mock.side_effect = utils.Error('Oops!')
six.assertRaisesRegex(self, utils.Error, 'Oops!',
node_cache.start_introspection,
self.node_info.uuid)
self.assertFalse(add_node_mock.called)
@prepare_mocks
def test_node_not_in_db(self, fsm_event_mock, add_node_mock):
fsm_event_mock.side_effect = utils.NotFoundInCacheError('Oops!')
node_cache.start_introspection(self.node_info.uuid)
add_node_mock.assert_called_once_with(self.node_info.uuid,
istate.States.starting)
@prepare_mocks
def test_custom_exc_fsm_event(self, fsm_event_mock, add_node_mock):
class CustomError(Exception):
pass
fsm_event_mock.side_effect = CustomError('Oops!')
six.assertRaisesRegex(self, CustomError, 'Oops!',
node_cache.start_introspection,
self.node_info.uuid)
self.assertFalse(add_node_mock.called)

View File

@ -236,6 +236,7 @@ class TestRootDiskSelection(test_base.NodeTest):
] ]
self.matched = self.inventory['disks'][2].copy() self.matched = self.inventory['disks'][2].copy()
self.node_info = mock.Mock(spec=node_cache.NodeInfo, self.node_info = mock.Mock(spec=node_cache.NodeInfo,
_state='foo',
uuid=self.uuid, uuid=self.uuid,
**{'node.return_value': self.node}) **{'node.return_value': self.node})

View File

@ -28,7 +28,9 @@ from oslo_config import cfg
from oslo_utils import uuidutils from oslo_utils import uuidutils
from ironic_inspector.common import ironic as ir_utils from ironic_inspector.common import ironic as ir_utils
from ironic_inspector import db
from ironic_inspector import firewall from ironic_inspector import firewall
from ironic_inspector import introspection_state as istate
from ironic_inspector import node_cache from ironic_inspector import node_cache
from ironic_inspector.plugins import base as plugins_base from ironic_inspector.plugins import base as plugins_base
from ironic_inspector.plugins import example as example_plugin from ironic_inspector.plugins import example as example_plugin
@ -65,6 +67,7 @@ class BaseProcessTest(BaseTest):
self.find_mock = self.cache_fixture.mock self.find_mock = self.cache_fixture.mock
self.node_info = node_cache.NodeInfo( self.node_info = node_cache.NodeInfo(
uuid=self.node.uuid, uuid=self.node.uuid,
state=istate.States.waiting,
started_at=self.started_at) started_at=self.started_at)
self.node_info.finished = mock.Mock() self.node_info.finished = mock.Mock()
self.find_mock.return_value = self.node_info self.find_mock.return_value = self.node_info
@ -85,7 +88,7 @@ class TestProcess(BaseProcessTest):
self.assertEqual(sorted(self.all_macs), sorted(actual_macs)) self.assertEqual(sorted(self.all_macs), sorted(actual_macs))
self.cli.node.get.assert_called_once_with(self.uuid) self.cli.node.get.assert_called_once_with(self.uuid)
self.process_mock.assert_called_once_with( self.process_mock.assert_called_once_with(
self.node, self.data, self.node_info) self.node_info, self.node, self.data)
def test_no_ipmi(self): def test_no_ipmi(self):
del self.inventory['bmc_address'] del self.inventory['bmc_address']
@ -95,8 +98,8 @@ class TestProcess(BaseProcessTest):
actual_macs = self.find_mock.call_args[1]['mac'] actual_macs = self.find_mock.call_args[1]['mac']
self.assertEqual(sorted(self.all_macs), sorted(actual_macs)) self.assertEqual(sorted(self.all_macs), sorted(actual_macs))
self.cli.node.get.assert_called_once_with(self.uuid) self.cli.node.get.assert_called_once_with(self.uuid)
self.process_mock.assert_called_once_with(self.node, self.data, self.process_mock.assert_called_once_with(self.node_info, self.node,
self.node_info) self.data)
def test_not_found_in_cache(self): def test_not_found_in_cache(self):
self.find_mock.side_effect = utils.Error('not found') self.find_mock.side_effect = utils.Error('not found')
@ -365,14 +368,19 @@ class TestProcessNode(BaseTest):
self.useFixture(fixtures.MockPatchObject( self.useFixture(fixtures.MockPatchObject(
eventlet.greenthread, 'sleep', autospec=True)) eventlet.greenthread, 'sleep', autospec=True))
self.node_info._state = istate.States.waiting
db.Node(uuid=self.node_info.uuid, state=self.node_info._state,
started_at=self.node_info.started_at,
finished_at=self.node_info.finished_at,
error=self.node_info.error).save(self.session)
def test_return_includes_uuid(self): def test_return_includes_uuid(self):
ret_val = process._process_node(self.node, self.data, self.node_info) ret_val = process._process_node(self.node_info, self.node, self.data)
self.assertEqual(self.uuid, ret_val.get('uuid')) self.assertEqual(self.uuid, ret_val.get('uuid'))
def test_return_includes_uuid_with_ipmi_creds(self): def test_return_includes_uuid_with_ipmi_creds(self):
self.node_info.set_option('new_ipmi_credentials', self.new_creds) self.node_info.set_option('new_ipmi_credentials', self.new_creds)
ret_val = process._process_node(self.node, self.data, self.node_info) ret_val = process._process_node(self.node_info, self.node, self.data)
self.assertEqual(self.uuid, ret_val.get('uuid')) self.assertEqual(self.uuid, ret_val.get('uuid'))
self.assertTrue(ret_val.get('ipmi_setup_credentials')) self.assertTrue(ret_val.get('ipmi_setup_credentials'))
@ -381,13 +389,13 @@ class TestProcessNode(BaseTest):
self.node.provision_state = 'active' self.node.provision_state = 'active'
self.assertRaises(utils.Error, process._process_node, self.assertRaises(utils.Error, process._process_node,
self.node, self.data, self.node_info) self.node_info, self.node, self.data)
self.assertFalse(post_hook_mock.called) self.assertFalse(post_hook_mock.called)
@mock.patch.object(example_plugin.ExampleProcessingHook, 'before_update') @mock.patch.object(example_plugin.ExampleProcessingHook, 'before_update')
@mock.patch.object(node_cache.NodeInfo, 'finished', autospec=True) @mock.patch.object(node_cache.NodeInfo, 'finished', autospec=True)
def test_ok(self, finished_mock, post_hook_mock): def test_ok(self, finished_mock, post_hook_mock):
process._process_node(self.node, self.data, self.node_info) process._process_node(self.node_info, self.node, self.data)
self.cli.port.create.assert_any_call(node_uuid=self.uuid, self.cli.port.create.assert_any_call(node_uuid=self.uuid,
address=self.macs[0]) address=self.macs[0])
@ -403,7 +411,7 @@ class TestProcessNode(BaseTest):
self.cli.port.create.side_effect = ( self.cli.port.create.side_effect = (
[exceptions.Conflict()] + self.ports[1:]) [exceptions.Conflict()] + self.ports[1:])
process._process_node(self.node, self.data, self.node_info) process._process_node(self.node_info, self.node, self.data)
self.cli.port.create.assert_any_call(node_uuid=self.uuid, self.cli.port.create.assert_any_call(node_uuid=self.uuid,
address=self.macs[0]) address=self.macs[0])
@ -413,7 +421,7 @@ class TestProcessNode(BaseTest):
def test_set_ipmi_credentials(self): def test_set_ipmi_credentials(self):
self.node_info.set_option('new_ipmi_credentials', self.new_creds) self.node_info.set_option('new_ipmi_credentials', self.new_creds)
process._process_node(self.node, self.data, self.node_info) process._process_node(self.node_info, self.node, self.data)
self.cli.node.update.assert_any_call(self.uuid, self.patch_credentials) self.cli.node.update.assert_any_call(self.uuid, self.patch_credentials)
self.cli.node.set_power_state.assert_called_once_with(self.uuid, 'off') self.cli.node.set_power_state.assert_called_once_with(self.uuid, 'off')
@ -428,7 +436,7 @@ class TestProcessNode(BaseTest):
'path': '/driver_info/ipmi_address', 'path': '/driver_info/ipmi_address',
'value': self.bmc_address}) 'value': self.bmc_address})
process._process_node(self.node, self.data, self.node_info) process._process_node(self.node_info, self.node, self.data)
self.cli.node.update.assert_any_call(self.uuid, self.patch_credentials) self.cli.node.update.assert_any_call(self.uuid, self.patch_credentials)
self.cli.node.set_power_state.assert_called_once_with(self.uuid, 'off') self.cli.node.set_power_state.assert_called_once_with(self.uuid, 'off')
@ -441,7 +449,7 @@ class TestProcessNode(BaseTest):
self.node_info.set_option('new_ipmi_credentials', self.new_creds) self.node_info.set_option('new_ipmi_credentials', self.new_creds)
self.cli.node.get_boot_device.side_effect = RuntimeError('boom') self.cli.node.get_boot_device.side_effect = RuntimeError('boom')
process._process_node(self.node, self.data, self.node_info) process._process_node(self.node_info, self.node, self.data)
self.cli.node.update.assert_any_call(self.uuid, self.patch_credentials) self.cli.node.update.assert_any_call(self.uuid, self.patch_credentials)
self.assertEqual(2, self.cli.node.update.call_count) self.assertEqual(2, self.cli.node.update.call_count)
@ -457,7 +465,7 @@ class TestProcessNode(BaseTest):
def test_power_off_failed(self, finished_mock): def test_power_off_failed(self, finished_mock):
self.cli.node.set_power_state.side_effect = RuntimeError('boom') self.cli.node.set_power_state.side_effect = RuntimeError('boom')
process._process_node(self.node, self.data, self.node_info) process._process_node(self.node_info, self.node, self.data)
self.cli.node.set_power_state.assert_called_once_with(self.uuid, 'off') self.cli.node.set_power_state.assert_called_once_with(self.uuid, 'off')
finished_mock.assert_called_once_with( finished_mock.assert_called_once_with(
@ -472,7 +480,7 @@ class TestProcessNode(BaseTest):
self.node.provision_state = 'enroll' self.node.provision_state = 'enroll'
self.node_info.node = mock.Mock(return_value=self.node) self.node_info.node = mock.Mock(return_value=self.node)
process._process_node(self.node, self.data, self.node_info) process._process_node(self.node_info, self.node, self.data)
self.assertTrue(post_hook_mock.called) self.assertTrue(post_hook_mock.called)
self.assertTrue(self.cli.node.set_power_state.called) self.assertTrue(self.cli.node.set_power_state.called)
@ -481,7 +489,7 @@ class TestProcessNode(BaseTest):
@mock.patch.object(node_cache.NodeInfo, 'finished', autospec=True) @mock.patch.object(node_cache.NodeInfo, 'finished', autospec=True)
def test_no_power_off(self, finished_mock): def test_no_power_off(self, finished_mock):
CONF.set_override('power_off', False, 'processing') CONF.set_override('power_off', False, 'processing')
process._process_node(self.node, self.data, self.node_info) process._process_node(self.node_info, self.node, self.data)
self.assertFalse(self.cli.node.set_power_state.called) self.assertFalse(self.cli.node.set_power_state.called)
finished_mock.assert_called_once_with(self.node_info) finished_mock.assert_called_once_with(self.node_info)
@ -493,7 +501,7 @@ class TestProcessNode(BaseTest):
name = 'inspector_data-%s' % self.uuid name = 'inspector_data-%s' % self.uuid
expected = self.data expected = self.data
process._process_node(self.node, self.data, self.node_info) process._process_node(self.node_info, self.node, self.data)
swift_conn.create_object.assert_called_once_with(name, mock.ANY) swift_conn.create_object.assert_called_once_with(name, mock.ANY)
self.assertEqual(expected, self.assertEqual(expected,
@ -506,7 +514,7 @@ class TestProcessNode(BaseTest):
name = 'inspector_data-%s' % self.uuid name = 'inspector_data-%s' % self.uuid
self.data['logs'] = 'something' self.data['logs'] = 'something'
process._process_node(self.node, self.data, self.node_info) process._process_node(self.node_info, self.node, self.data)
swift_conn.create_object.assert_called_once_with(name, mock.ANY) swift_conn.create_object.assert_called_once_with(name, mock.ANY)
self.assertNotIn('logs', self.assertNotIn('logs',
@ -523,7 +531,7 @@ class TestProcessNode(BaseTest):
'value': name, 'op': 'add'}] 'value': name, 'op': 'add'}]
expected = self.data expected = self.data
process._process_node(self.node, self.data, self.node_info) process._process_node(self.node_info, self.node, self.data)
swift_conn.create_object.assert_called_once_with(name, mock.ANY) swift_conn.create_object.assert_called_once_with(name, mock.ANY)
self.assertEqual(expected, self.assertEqual(expected,
@ -596,6 +604,11 @@ class TestReapplyNode(BaseTest):
self.cli.port.create.side_effect = self.ports self.cli.port.create.side_effect = self.ports
self.cli.node.update.return_value = self.node self.cli.node.update.return_value = self.node
self.cli.node.list_ports.return_value = [] self.cli.node.list_ports.return_value = []
self.node_info._state = istate.States.finished
db.Node(uuid=self.node_info.uuid, state=self.node_info._state,
started_at=self.node_info.started_at,
finished_at=self.node_info.finished_at,
error=self.node_info.error).save(self.session)
def call(self): def call(self):
process._reapply(self.node_info) process._reapply(self.node_info)
@ -618,12 +631,7 @@ class TestReapplyNode(BaseTest):
swift_name = 'inspector_data-%s' % self.uuid swift_name = 'inspector_data-%s' % self.uuid
swift_mock.get_object.return_value = json.dumps(self.data) swift_mock.get_object.return_value = json.dumps(self.data)
with mock.patch.object(process.LOG, 'error', self.call()
autospec=True) as log_mock:
self.call()
# no failures logged
self.assertFalse(log_mock.called)
post_hook_mock.assert_called_once_with(mock.ANY, self.node_info) post_hook_mock.assert_called_once_with(mock.ANY, self.node_info)
swift_mock.create_object.assert_called_once_with(swift_name, swift_mock.create_object.assert_called_once_with(swift_name,
@ -656,14 +664,7 @@ class TestReapplyNode(BaseTest):
expected_error = ('Unexpected exception Exception while fetching ' expected_error = ('Unexpected exception Exception while fetching '
'unprocessed introspection data from Swift: Oops') 'unprocessed introspection data from Swift: Oops')
swift_mock.get_object.side_effect = exc swift_mock.get_object.side_effect = exc
with mock.patch.object(process.LOG, 'exception', self.call()
autospec=True) as log_mock:
self.call()
log_mock.assert_called_once_with('Encountered exception '
'while fetching stored '
'introspection data',
node_info=self.node_info)
self.assertFalse(swift_mock.create_object.called) self.assertFalse(swift_mock.create_object.called)
self.assertFalse(apply_mock.called) self.assertFalse(apply_mock.called)
@ -684,19 +685,14 @@ class TestReapplyNode(BaseTest):
with mock.patch.object(example_plugin.ExampleProcessingHook, with mock.patch.object(example_plugin.ExampleProcessingHook,
'before_processing') as before_processing_mock: 'before_processing') as before_processing_mock:
before_processing_mock.side_effect = exc before_processing_mock.side_effect = exc
with mock.patch.object(process.LOG, 'error', self.call()
autospec=True) as log_mock:
self.call()
exc_failure = ('Unexpected exception %(exc_class)s during ' exc_failure = ('Pre-processing failures detected reapplying '
'introspection on stored data:\n'
'Unexpected exception %(exc_class)s during '
'preprocessing in hook example: %(error)s' % 'preprocessing in hook example: %(error)s' %
{'exc_class': type(exc).__name__, 'error': {'exc_class': type(exc).__name__, 'error':
exc}) exc})
log_mock.assert_called_once_with('Pre-processing failures '
'detected reapplying '
'introspection on stored '
'data:\n%s', exc_failure,
node_info=self.node_info)
finished_mock.assert_called_once_with(self.node_info, finished_mock.assert_called_once_with(self.node_info,
error=exc_failure) error=exc_failure)
# assert _reapply ended having detected the failure # assert _reapply ended having detected the failure
@ -712,13 +708,8 @@ class TestReapplyNode(BaseTest):
exc = Exception('Oops') exc = Exception('Oops')
self.cli.port.create.side_effect = exc self.cli.port.create.side_effect = exc
with mock.patch.object(process.LOG, 'exception') as log_mock: self.call()
self.call()
log_mock.assert_called_once_with('Encountered exception reapplying'
' introspection on stored data',
node_info=self.node_info,
data=mock.ANY)
finished_mock.assert_called_once_with(self.node_info, error=str(exc)) finished_mock.assert_called_once_with(self.node_info, error=str(exc))
self.assertFalse(swift_mock.create_object.called) self.assertFalse(swift_mock.create_object.called)
self.assertFalse(apply_mock.called) self.assertFalse(apply_mock.called)

View File

@ -14,8 +14,10 @@
from keystonemiddleware import auth_token from keystonemiddleware import auth_token
from oslo_config import cfg from oslo_config import cfg
from ironic_inspector import node_cache
from ironic_inspector.test import base from ironic_inspector.test import base
from ironic_inspector import utils from ironic_inspector import utils
from ironicclient.v1 import node
try: try:
from unittest import mock from unittest import mock
@ -110,7 +112,7 @@ class TestProcessingLogger(base.BaseTest):
utils.processing_logger_prefix()) utils.processing_logger_prefix())
def test_prefix_only_uuid(self): def test_prefix_only_uuid(self):
node_info = mock.Mock(uuid='NNN') node_info = node.Node(mock.Mock(), dict(uuid='NNN'))
self.assertEqual('[node: NNN]', self.assertEqual('[node: NNN]',
utils.processing_logger_prefix(node_info=node_info)) utils.processing_logger_prefix(node_info=node_info))
@ -125,7 +127,7 @@ class TestProcessingLogger(base.BaseTest):
utils.processing_logger_prefix(data=data)) utils.processing_logger_prefix(data=data))
def test_prefix_everything(self): def test_prefix_everything(self):
node_info = mock.Mock(uuid='NNN') node_info = node.Node(mock.Mock(), dict(uuid='NNN'))
data = {'boot_interface': '01-aa-bb-cc-dd-ee-ff', data = {'boot_interface': '01-aa-bb-cc-dd-ee-ff',
'inventory': {'bmc_address': '1.2.3.4'}} 'inventory': {'bmc_address': '1.2.3.4'}}
self.assertEqual('[node: NNN MAC aa:bb:cc:dd:ee:ff BMC 1.2.3.4]', self.assertEqual('[node: NNN MAC aa:bb:cc:dd:ee:ff BMC 1.2.3.4]',
@ -133,13 +135,18 @@ class TestProcessingLogger(base.BaseTest):
data=data)) data=data))
def test_prefix_uuid_not_str(self): def test_prefix_uuid_not_str(self):
node_info = mock.Mock(uuid=None) node_info = node.Node(mock.Mock(), dict(uuid=None))
self.assertEqual('[node: None]', self.assertEqual('[node: None]',
utils.processing_logger_prefix(node_info=node_info)) utils.processing_logger_prefix(node_info=node_info))
def test_prefix_NodeInfo_instance(self):
node_info = node_cache.NodeInfo('NNN')
self.assertEqual('[node: NNN state None]',
utils.processing_logger_prefix(node_info=node_info))
def test_adapter_no_bmc(self): def test_adapter_no_bmc(self):
CONF.set_override('log_bmc_address', False, 'processing') CONF.set_override('log_bmc_address', False, 'processing')
node_info = mock.Mock(uuid='NNN') node_info = node.Node(mock.Mock(), dict(uuid='NNN'))
data = {'boot_interface': '01-aa-bb-cc-dd-ee-ff', data = {'boot_interface': '01-aa-bb-cc-dd-ee-ff',
'inventory': {'bmc_address': '1.2.3.4'}} 'inventory': {'bmc_address': '1.2.3.4'}}
logger = utils.getProcessingLogger(__name__) logger = utils.getProcessingLogger(__name__)
@ -150,7 +157,7 @@ class TestProcessingLogger(base.BaseTest):
msg) msg)
def test_adapter_with_bmc(self): def test_adapter_with_bmc(self):
node_info = mock.Mock(uuid='NNN') node_info = node.Node(mock.Mock(), dict(uuid='NNN'))
data = {'boot_interface': '01-aa-bb-cc-dd-ee-ff', data = {'boot_interface': '01-aa-bb-cc-dd-ee-ff',
'inventory': {'bmc_address': '1.2.3.4'}} 'inventory': {'bmc_address': '1.2.3.4'}}
logger = utils.getProcessingLogger(__name__) logger = utils.getProcessingLogger(__name__)

View File

@ -21,6 +21,7 @@ from oslo_log import log
from oslo_middleware import cors as cors_middleware from oslo_middleware import cors as cors_middleware
import pytz import pytz
from ironicclient.v1 import node
from ironic_inspector.common.i18n import _, _LE from ironic_inspector.common.i18n import _, _LE
from ironic_inspector import conf # noqa from ironic_inspector import conf # noqa
@ -49,7 +50,7 @@ def processing_logger_prefix(data=None, node_info=None):
"""Calculate prefix for logging. """Calculate prefix for logging.
Tries to use: Tries to use:
* node UUID, * node UUID, node._state
* node PXE MAC, * node PXE MAC,
* node BMC address * node BMC address
@ -62,7 +63,10 @@ def processing_logger_prefix(data=None, node_info=None):
data = data or {} data = data or {}
if node_info is not None: if node_info is not None:
parts.append(str(node_info.uuid)) if isinstance(node_info, node.Node):
parts.append(str(node_info.uuid))
else:
parts.append(str(node_info))
pxe_mac = get_pxe_mac(data) pxe_mac = get_pxe_mac(data)
if pxe_mac: if pxe_mac:
@ -113,9 +117,22 @@ class Error(Exception):
class NotFoundInCacheError(Error): class NotFoundInCacheError(Error):
"""Exception when node was not found in cache during processing.""" """Exception when node was not found in cache during processing."""
def __init__(self, msg, code=404): def __init__(self, msg, code=404, **kwargs):
super(NotFoundInCacheError, self).__init__(msg, code, super(NotFoundInCacheError, self).__init__(msg, code,
log_level='info') log_level='info', **kwargs)
class NodeStateRaceCondition(Error):
"""State mismatch between the DB and a node_info."""
def __init__(self, *args, **kwargs):
message = _('Node state mismatch detected between the DB and the '
'cached node_info object')
kwargs.setdefault('code', 500)
super(NodeStateRaceCondition, self).__init__(message, *args, **kwargs)
class NodeStateInvalidEvent(Error):
"""Invalid event attempted."""
def executor(): def executor():

View File

@ -0,0 +1,16 @@
---
features:
- Node introspection state is now kept in a dedicated database column. The
introspection is now using a finite state machine. The state isn't exposed
to the user yet.
issues:
- Due to the nature of the NodeInfo.state attribute (being updated
independently from the rest of the node_info attributes) if a (DB)
connection was lost before the Node.state column was updated,
Node.finished_at and Node.error columns may not be in sync with the
Node.state column.
upgrade:
- Node.state and Node.version_id database columns are introduced.
- The introspection state column defaults to the finished state.
If the introspection error column value on a node row isn't null,
the node state is changed to error.

View File

@ -1,6 +1,7 @@
# The order of packages is significant, because pip processes them in the order # The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration # of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later. # process, which may cause wedges in the gate later.
automaton>=0.5.0 # Apache-2.0
alembic>=0.8.4 # MIT alembic>=0.8.4 # MIT
Babel>=2.3.4 # BSD Babel>=2.3.4 # BSD
eventlet!=0.18.3,>=0.18.2 # MIT eventlet!=0.18.3,>=0.18.2 # MIT