Add internal_info field to ports and portgroups
In case of ports, it is also added to the API, as a readonly field. It will be used for any port-specific internal information ironic needs to store inside the port object. In this change we start using it to store UUIDs of the cleaning ports that ironic creates, instead of fiddling with port.extra['vif_port_id'], as extra is intended for operator use only. Partial-bug: #1526403 Change-Id: Ib62c3e32305619d0c55f8ec7e45b067f0f0b32d4
This commit is contained in:
parent
22aba527fb
commit
0a5bb693ef
@ -32,6 +32,11 @@ always requests the newest supported API version.
|
|||||||
API Versions History
|
API Versions History
|
||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
|
**1.18**
|
||||||
|
|
||||||
|
Add ``internal_info`` readonly field to the port object, that will be used
|
||||||
|
by ironic to store internal port-related information.
|
||||||
|
|
||||||
**1.17**
|
**1.17**
|
||||||
|
|
||||||
Addition of provision_state verb ``adopt`` which allows an operator
|
Addition of provision_state verb ``adopt`` which allows an operator
|
||||||
|
@ -36,6 +36,12 @@ from ironic import objects
|
|||||||
_DEFAULT_RETURN_FIELDS = ('uuid', 'address')
|
_DEFAULT_RETURN_FIELDS = ('uuid', 'address')
|
||||||
|
|
||||||
|
|
||||||
|
def hide_fields_in_newer_versions(obj):
|
||||||
|
# if requested version is < 1.18, hide internal_info field
|
||||||
|
if not api_utils.allow_port_internal_info():
|
||||||
|
obj.internal_info = wsme.Unset
|
||||||
|
|
||||||
|
|
||||||
class Port(base.APIBase):
|
class Port(base.APIBase):
|
||||||
"""API representation of a port.
|
"""API representation of a port.
|
||||||
|
|
||||||
@ -77,6 +83,9 @@ class Port(base.APIBase):
|
|||||||
extra = {wtypes.text: types.jsontype}
|
extra = {wtypes.text: types.jsontype}
|
||||||
"""This port's meta data"""
|
"""This port's meta data"""
|
||||||
|
|
||||||
|
internal_info = wsme.wsattr({wtypes.text: types.jsontype}, readonly=True)
|
||||||
|
"""This port's internal information maintained by ironic"""
|
||||||
|
|
||||||
node_uuid = wsme.wsproperty(types.uuid, _get_node_uuid, _set_node_uuid,
|
node_uuid = wsme.wsproperty(types.uuid, _get_node_uuid, _set_node_uuid,
|
||||||
mandatory=True)
|
mandatory=True)
|
||||||
"""The UUID of the node this port belongs to"""
|
"""The UUID of the node this port belongs to"""
|
||||||
@ -130,6 +139,8 @@ class Port(base.APIBase):
|
|||||||
if fields is not None:
|
if fields is not None:
|
||||||
api_utils.check_for_invalid_fields(fields, port.as_dict())
|
api_utils.check_for_invalid_fields(fields, port.as_dict())
|
||||||
|
|
||||||
|
hide_fields_in_newer_versions(port)
|
||||||
|
|
||||||
return cls._convert_with_links(port, pecan.request.public_url,
|
return cls._convert_with_links(port, pecan.request.public_url,
|
||||||
fields=fields)
|
fields=fields)
|
||||||
|
|
||||||
@ -138,6 +149,7 @@ class Port(base.APIBase):
|
|||||||
sample = cls(uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c',
|
sample = cls(uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c',
|
||||||
address='fe:54:00:77:07:d9',
|
address='fe:54:00:77:07:d9',
|
||||||
extra={'foo': 'bar'},
|
extra={'foo': 'bar'},
|
||||||
|
internal_info={},
|
||||||
created_at=datetime.datetime.utcnow(),
|
created_at=datetime.datetime.utcnow(),
|
||||||
updated_at=datetime.datetime.utcnow())
|
updated_at=datetime.datetime.utcnow())
|
||||||
# NOTE(lucasagomes): node_uuid getter() method look at the
|
# NOTE(lucasagomes): node_uuid getter() method look at the
|
||||||
@ -151,6 +163,11 @@ class Port(base.APIBase):
|
|||||||
class PortPatchType(types.JsonPatchType):
|
class PortPatchType(types.JsonPatchType):
|
||||||
_api_base = Port
|
_api_base = Port
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def internal_attrs():
|
||||||
|
defaults = types.JsonPatchType.internal_attrs()
|
||||||
|
return defaults + ['/internal_info']
|
||||||
|
|
||||||
|
|
||||||
class PortCollection(collection.Collection):
|
class PortCollection(collection.Collection):
|
||||||
"""API representation of a collection of ports."""
|
"""API representation of a collection of ports."""
|
||||||
@ -187,7 +204,7 @@ class PortsController(rest.RestController):
|
|||||||
'detail': ['GET'],
|
'detail': ['GET'],
|
||||||
}
|
}
|
||||||
|
|
||||||
invalid_sort_key_list = ['extra']
|
invalid_sort_key_list = ['extra', 'internal_info']
|
||||||
|
|
||||||
def _get_ports_collection(self, node_ident, address, marker, limit,
|
def _get_ports_collection(self, node_ident, address, marker, limit,
|
||||||
sort_key, sort_dir, resource_url=None,
|
sort_key, sort_dir, resource_url=None,
|
||||||
|
@ -290,6 +290,15 @@ def allow_links_node_states_and_driver_properties():
|
|||||||
versions.MINOR_14_LINKS_NODESTATES_DRIVERPROPERTIES)
|
versions.MINOR_14_LINKS_NODESTATES_DRIVERPROPERTIES)
|
||||||
|
|
||||||
|
|
||||||
|
def allow_port_internal_info():
|
||||||
|
"""Check if accessing internal_info is allowed for the port.
|
||||||
|
|
||||||
|
Version 1.18 of the API exposes internal_info readonly field for the port.
|
||||||
|
"""
|
||||||
|
return (pecan.request.version.minor >=
|
||||||
|
versions.MINOR_18_PORT_INTERNAL_INFO)
|
||||||
|
|
||||||
|
|
||||||
def get_controller_reserved_names(cls):
|
def get_controller_reserved_names(cls):
|
||||||
"""Get reserved names for a given controller.
|
"""Get reserved names for a given controller.
|
||||||
|
|
||||||
|
@ -47,6 +47,7 @@ BASE_VERSION = 1
|
|||||||
# v1.15: Add ability to do manual cleaning of nodes
|
# v1.15: Add ability to do manual cleaning of nodes
|
||||||
# v1.16: Add ability to filter nodes by driver.
|
# v1.16: Add ability to filter nodes by driver.
|
||||||
# v1.17: Add 'adopt' verb for ADOPTING active nodes.
|
# v1.17: Add 'adopt' verb for ADOPTING active nodes.
|
||||||
|
# v1.18: Add port.internal_info.
|
||||||
|
|
||||||
MINOR_0_JUNO = 0
|
MINOR_0_JUNO = 0
|
||||||
MINOR_1_INITIAL_VERSION = 1
|
MINOR_1_INITIAL_VERSION = 1
|
||||||
@ -66,11 +67,12 @@ MINOR_14_LINKS_NODESTATES_DRIVERPROPERTIES = 14
|
|||||||
MINOR_15_MANUAL_CLEAN = 15
|
MINOR_15_MANUAL_CLEAN = 15
|
||||||
MINOR_16_DRIVER_FILTER = 16
|
MINOR_16_DRIVER_FILTER = 16
|
||||||
MINOR_17_ADOPT_VERB = 17
|
MINOR_17_ADOPT_VERB = 17
|
||||||
|
MINOR_18_PORT_INTERNAL_INFO = 18
|
||||||
|
|
||||||
# When adding another version, update MINOR_MAX_VERSION and also update
|
# When adding another version, update MINOR_MAX_VERSION and also update
|
||||||
# doc/source/webapi/v1.rst with a detailed explanation of what the version has
|
# doc/source/webapi/v1.rst with a detailed explanation of what the version has
|
||||||
# changed.
|
# changed.
|
||||||
MINOR_MAX_VERSION = MINOR_17_ADOPT_VERB
|
MINOR_MAX_VERSION = MINOR_18_PORT_INTERNAL_INFO
|
||||||
|
|
||||||
# String representations of the minor and maximum versions
|
# String representations of the minor and maximum versions
|
||||||
MIN_VERSION_STRING = '{}.{}'.format(BASE_VERSION, MINOR_1_INITIAL_VERSION)
|
MIN_VERSION_STRING = '{}.{}'.format(BASE_VERSION, MINOR_1_INITIAL_VERSION)
|
||||||
|
@ -32,12 +32,16 @@ def get_node_vif_ids(task):
|
|||||||
portgroup_vifs = {}
|
portgroup_vifs = {}
|
||||||
port_vifs = {}
|
port_vifs = {}
|
||||||
for portgroup in task.portgroups:
|
for portgroup in task.portgroups:
|
||||||
vif = portgroup.extra.get('vif_port_id')
|
# NOTE(vdrok): This works because cleaning_vif_port_id doesn't exist
|
||||||
|
# when we're in deployment/tenant network
|
||||||
|
vif = (portgroup.internal_info.get('cleaning_vif_port_id') or
|
||||||
|
portgroup.extra.get('vif_port_id'))
|
||||||
if vif:
|
if vif:
|
||||||
portgroup_vifs[portgroup.uuid] = vif
|
portgroup_vifs[portgroup.uuid] = vif
|
||||||
vifs['portgroups'] = portgroup_vifs
|
vifs['portgroups'] = portgroup_vifs
|
||||||
for port in task.ports:
|
for port in task.ports:
|
||||||
vif = port.extra.get('vif_port_id')
|
vif = (port.internal_info.get('cleaning_vif_port_id') or
|
||||||
|
port.extra.get('vif_port_id'))
|
||||||
if vif:
|
if vif:
|
||||||
port_vifs[port.uuid] = vif
|
port_vifs[port.uuid] = vif
|
||||||
vifs['ports'] = port_vifs
|
vifs['ports'] = port_vifs
|
||||||
|
@ -0,0 +1,35 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""add port portgroup internal info
|
||||||
|
|
||||||
|
Revision ID: 10b163d4481e
|
||||||
|
Revises: e294876e8028
|
||||||
|
Create Date: 2016-07-06 17:43:55.846837
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '10b163d4481e'
|
||||||
|
down_revision = 'e294876e8028'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.add_column('ports', sa.Column('internal_info',
|
||||||
|
sa.Text(),
|
||||||
|
nullable=True))
|
||||||
|
op.add_column('portgroups', sa.Column('internal_info',
|
||||||
|
sa.Text(),
|
||||||
|
nullable=True))
|
@ -162,6 +162,7 @@ class Port(Base):
|
|||||||
local_link_connection = Column(db_types.JsonEncodedDict)
|
local_link_connection = Column(db_types.JsonEncodedDict)
|
||||||
portgroup_id = Column(Integer, ForeignKey('portgroups.id'), nullable=True)
|
portgroup_id = Column(Integer, ForeignKey('portgroups.id'), nullable=True)
|
||||||
pxe_enabled = Column(Boolean, default=True)
|
pxe_enabled = Column(Boolean, default=True)
|
||||||
|
internal_info = Column(db_types.JsonEncodedDict)
|
||||||
|
|
||||||
|
|
||||||
class Portgroup(Base):
|
class Portgroup(Base):
|
||||||
@ -179,6 +180,7 @@ class Portgroup(Base):
|
|||||||
node_id = Column(Integer, ForeignKey('nodes.id'), nullable=True)
|
node_id = Column(Integer, ForeignKey('nodes.id'), nullable=True)
|
||||||
address = Column(String(18))
|
address = Column(String(18))
|
||||||
extra = Column(db_types.JsonEncodedDict)
|
extra = Column(db_types.JsonEncodedDict)
|
||||||
|
internal_info = Column(db_types.JsonEncodedDict)
|
||||||
|
|
||||||
|
|
||||||
class NodeTag(Base):
|
class NodeTag(Base):
|
||||||
|
@ -205,7 +205,10 @@ class NeutronDHCPApi(base.BaseDHCP):
|
|||||||
:raises: InvalidIPv4Address
|
:raises: InvalidIPv4Address
|
||||||
"""
|
"""
|
||||||
|
|
||||||
vif = p_obj.extra.get('vif_port_id')
|
# NOTE(vdrok): This works because cleaning_vif_port_id doesn't exist
|
||||||
|
# when we're in deployment/tenant network
|
||||||
|
vif = (p_obj.internal_info.get('cleaning_vif_port_id') or
|
||||||
|
p_obj.extra.get('vif_port_id'))
|
||||||
if not vif:
|
if not vif:
|
||||||
obj_name = 'portgroup'
|
obj_name = 'portgroup'
|
||||||
if isinstance(p_obj, objects.Port):
|
if isinstance(p_obj, objects.Port):
|
||||||
|
@ -519,7 +519,8 @@ def get_single_nic_with_vif_port_id(task):
|
|||||||
None if it cannot find any port with vif id.
|
None if it cannot find any port with vif id.
|
||||||
"""
|
"""
|
||||||
for port in task.ports:
|
for port in task.ports:
|
||||||
if port.extra.get('vif_port_id'):
|
if (port.internal_info.get('cleaning_vif_port_id') or
|
||||||
|
port.extra.get('vif_port_id')):
|
||||||
return port.address
|
return port.address
|
||||||
|
|
||||||
|
|
||||||
@ -914,7 +915,7 @@ def prepare_cleaning_ports(task):
|
|||||||
|
|
||||||
This method deletes the cleaning ports currently existing
|
This method deletes the cleaning ports currently existing
|
||||||
for all the ports of the node and then creates a new one
|
for all the ports of the node and then creates a new one
|
||||||
for each one of them. It also adds 'vif_port_id' to port.extra
|
for each one of them. It also adds 'cleaning_vif_port_id' to internal_info
|
||||||
of each Ironic port, after creating the cleaning ports.
|
of each Ironic port, after creating the cleaning ports.
|
||||||
|
|
||||||
:param task: a TaskManager object containing the node
|
:param task: a TaskManager object containing the node
|
||||||
@ -932,12 +933,12 @@ def prepare_cleaning_ports(task):
|
|||||||
# Allow to raise if it fails, is caught and handled in conductor
|
# Allow to raise if it fails, is caught and handled in conductor
|
||||||
ports = provider.provider.create_cleaning_ports(task)
|
ports = provider.provider.create_cleaning_ports(task)
|
||||||
|
|
||||||
# Add vif_port_id for each of the ports because some boot
|
# Add cleaning_vif_port_id for each of the ports because some boot
|
||||||
# interfaces expects these to prepare for booting ramdisk.
|
# interfaces expects these to prepare for booting ramdisk.
|
||||||
for port in task.ports:
|
for port in task.ports:
|
||||||
extra_dict = port.extra
|
internal_info = port.internal_info
|
||||||
try:
|
try:
|
||||||
extra_dict['vif_port_id'] = ports[port.uuid]
|
internal_info['cleaning_vif_port_id'] = ports[port.uuid]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# This is an internal error in Ironic. All DHCP providers
|
# This is an internal error in Ironic. All DHCP providers
|
||||||
# implementing create_cleaning_ports are supposed to
|
# implementing create_cleaning_ports are supposed to
|
||||||
@ -948,7 +949,7 @@ def prepare_cleaning_ports(task):
|
|||||||
raise exception.NodeCleaningFailure(
|
raise exception.NodeCleaningFailure(
|
||||||
node=task.node.uuid, reason=error)
|
node=task.node.uuid, reason=error)
|
||||||
else:
|
else:
|
||||||
port.extra = extra_dict
|
port.internal_info = internal_info
|
||||||
port.save()
|
port.save()
|
||||||
|
|
||||||
|
|
||||||
@ -969,9 +970,18 @@ def tear_down_cleaning_ports(task):
|
|||||||
provider.provider.delete_cleaning_ports(task)
|
provider.provider.delete_cleaning_ports(task)
|
||||||
|
|
||||||
for port in task.ports:
|
for port in task.ports:
|
||||||
if 'vif_port_id' in port.extra:
|
if 'cleaning_vif_port_id' in port.internal_info:
|
||||||
|
internal_info = port.internal_info
|
||||||
|
del internal_info['cleaning_vif_port_id']
|
||||||
|
port.internal_info = internal_info
|
||||||
|
port.save()
|
||||||
|
elif 'vif_port_id' in port.extra:
|
||||||
|
# TODO(vdrok): This piece is left for backwards compatibility,
|
||||||
|
# if ironic was upgraded during cleaning, vif_port_id
|
||||||
|
# containing cleaning neutron port UUID should be cleared,
|
||||||
|
# remove in Ocata
|
||||||
extra_dict = port.extra
|
extra_dict = port.extra
|
||||||
extra_dict.pop('vif_port_id', None)
|
del extra_dict['vif_port_id']
|
||||||
port.extra = extra_dict
|
port.extra = extra_dict
|
||||||
port.save()
|
port.save()
|
||||||
|
|
||||||
|
@ -34,7 +34,8 @@ class Port(base.IronicObject, object_base.VersionedObjectDictCompat):
|
|||||||
# Version 1.4: Add list_by_node_id()
|
# Version 1.4: Add list_by_node_id()
|
||||||
# Version 1.5: Add list_by_portgroup_id() and new fields
|
# Version 1.5: Add list_by_portgroup_id() and new fields
|
||||||
# local_link_connection, portgroup_id and pxe_enabled
|
# local_link_connection, portgroup_id and pxe_enabled
|
||||||
VERSION = '1.5'
|
# Version 1.6: Add internal_info field
|
||||||
|
VERSION = '1.6'
|
||||||
|
|
||||||
dbapi = dbapi.get_instance()
|
dbapi = dbapi.get_instance()
|
||||||
|
|
||||||
@ -47,7 +48,8 @@ class Port(base.IronicObject, object_base.VersionedObjectDictCompat):
|
|||||||
'local_link_connection': object_fields.FlexibleDictField(
|
'local_link_connection': object_fields.FlexibleDictField(
|
||||||
nullable=True),
|
nullable=True),
|
||||||
'portgroup_id': object_fields.IntegerField(nullable=True),
|
'portgroup_id': object_fields.IntegerField(nullable=True),
|
||||||
'pxe_enabled': object_fields.BooleanField()
|
'pxe_enabled': object_fields.BooleanField(),
|
||||||
|
'internal_info': object_fields.FlexibleDictField(nullable=True),
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -27,7 +27,8 @@ from ironic.objects import fields as object_fields
|
|||||||
@base.IronicObjectRegistry.register
|
@base.IronicObjectRegistry.register
|
||||||
class Portgroup(base.IronicObject, object_base.VersionedObjectDictCompat):
|
class Portgroup(base.IronicObject, object_base.VersionedObjectDictCompat):
|
||||||
# Version 1.0: Initial version
|
# Version 1.0: Initial version
|
||||||
VERSION = '1.0'
|
# Version 1.1: Add internal_info field
|
||||||
|
VERSION = '1.1'
|
||||||
|
|
||||||
dbapi = dbapi.get_instance()
|
dbapi = dbapi.get_instance()
|
||||||
|
|
||||||
@ -38,6 +39,7 @@ class Portgroup(base.IronicObject, object_base.VersionedObjectDictCompat):
|
|||||||
'node_id': object_fields.IntegerField(nullable=True),
|
'node_id': object_fields.IntegerField(nullable=True),
|
||||||
'address': object_fields.MACAddressField(nullable=True),
|
'address': object_fields.MACAddressField(nullable=True),
|
||||||
'extra': object_fields.FlexibleDictField(nullable=True),
|
'extra': object_fields.FlexibleDictField(nullable=True),
|
||||||
|
'internal_info': object_fields.FlexibleDictField(nullable=True),
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -95,6 +95,18 @@ class TestListPorts(test_api_base.BaseApiTest):
|
|||||||
# We always append "links"
|
# We always append "links"
|
||||||
self.assertItemsEqual(['address', 'extra', 'links'], data)
|
self.assertItemsEqual(['address', 'extra', 'links'], data)
|
||||||
|
|
||||||
|
def test_hide_fields_in_newer_versions_internal_info(self):
|
||||||
|
port = obj_utils.create_test_port(self.context, node_id=self.node.id,
|
||||||
|
internal_info={"foo": "bar"})
|
||||||
|
data = self.get_json(
|
||||||
|
'/ports/%s' % port.uuid,
|
||||||
|
headers={api_base.Version.string: str(api_v1.MIN_VER)})
|
||||||
|
self.assertNotIn('internal_info', data)
|
||||||
|
|
||||||
|
data = self.get_json('/ports/%s' % port.uuid,
|
||||||
|
headers={api_base.Version.string: "1.18"})
|
||||||
|
self.assertEqual({"foo": "bar"}, data['internal_info'])
|
||||||
|
|
||||||
def test_get_collection_custom_fields(self):
|
def test_get_collection_custom_fields(self):
|
||||||
fields = 'uuid,extra'
|
fields = 'uuid,extra'
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
@ -133,10 +145,14 @@ class TestListPorts(test_api_base.BaseApiTest):
|
|||||||
self.assertEqual(http_client.NOT_ACCEPTABLE, response.status_int)
|
self.assertEqual(http_client.NOT_ACCEPTABLE, response.status_int)
|
||||||
|
|
||||||
def test_detail(self):
|
def test_detail(self):
|
||||||
port = obj_utils.create_test_port(self.context, node_id=self.node.id)
|
port = obj_utils.create_test_port(self.context, node_id=self.node.id,)
|
||||||
data = self.get_json('/ports/detail')
|
data = self.get_json(
|
||||||
|
'/ports/detail',
|
||||||
|
headers={api_base.Version.string: str(api_v1.MAX_VER)}
|
||||||
|
)
|
||||||
self.assertEqual(port.uuid, data['ports'][0]["uuid"])
|
self.assertEqual(port.uuid, data['ports'][0]["uuid"])
|
||||||
self.assertIn('extra', data['ports'][0])
|
self.assertIn('extra', data['ports'][0])
|
||||||
|
self.assertIn('internal_info', data['ports'][0])
|
||||||
self.assertIn('node_uuid', data['ports'][0])
|
self.assertIn('node_uuid', data['ports'][0])
|
||||||
# never expose the node_id
|
# never expose the node_id
|
||||||
self.assertNotIn('node_id', data['ports'][0])
|
self.assertNotIn('node_id', data['ports'][0])
|
||||||
@ -261,10 +277,12 @@ class TestListPorts(test_api_base.BaseApiTest):
|
|||||||
self.assertEqual(sorted(ports), uuids)
|
self.assertEqual(sorted(ports), uuids)
|
||||||
|
|
||||||
def test_sort_key_invalid(self):
|
def test_sort_key_invalid(self):
|
||||||
invalid_keys_list = ['foo', 'extra']
|
invalid_keys_list = ['foo', 'extra', 'internal_info']
|
||||||
for invalid_key in invalid_keys_list:
|
for invalid_key in invalid_keys_list:
|
||||||
response = self.get_json('/ports?sort_key=%s' % invalid_key,
|
response = self.get_json(
|
||||||
expect_errors=True)
|
'/ports?sort_key=%s' % invalid_key, expect_errors=True,
|
||||||
|
headers={api_base.Version.string: str(api_v1.MAX_VER)}
|
||||||
|
)
|
||||||
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
|
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
|
||||||
self.assertEqual('application/json', response.content_type)
|
self.assertEqual('application/json', response.content_type)
|
||||||
self.assertIn(invalid_key, response.json['error_message'])
|
self.assertIn(invalid_key, response.json['error_message'])
|
||||||
@ -771,6 +789,14 @@ class TestPost(test_api_base.BaseApiTest):
|
|||||||
self.assertTrue(error_msg)
|
self.assertTrue(error_msg)
|
||||||
self.assertIn(address, error_msg.upper())
|
self.assertIn(address, error_msg.upper())
|
||||||
|
|
||||||
|
def test_create_port_with_internal_field(self):
|
||||||
|
pdict = post_get_test_port()
|
||||||
|
pdict['internal_info'] = {'a': 'b'}
|
||||||
|
response = self.post_json('/ports', pdict, expect_errors=True)
|
||||||
|
self.assertEqual('application/json', response.content_type)
|
||||||
|
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
|
||||||
|
self.assertTrue(response.json['error_message'])
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.object(rpcapi.ConductorAPI, 'destroy_port')
|
@mock.patch.object(rpcapi.ConductorAPI, 'destroy_port')
|
||||||
class TestDelete(test_api_base.BaseApiTest):
|
class TestDelete(test_api_base.BaseApiTest):
|
||||||
|
@ -193,6 +193,13 @@ class TestApiUtils(base.TestCase):
|
|||||||
mock_request.version.minor = 17
|
mock_request.version.minor = 17
|
||||||
utils.check_allow_management_verbs('adopt')
|
utils.check_allow_management_verbs('adopt')
|
||||||
|
|
||||||
|
@mock.patch.object(pecan, 'request', spec_set=['version'])
|
||||||
|
def test_allow_port_internal_info(self, mock_request):
|
||||||
|
mock_request.version.minor = 18
|
||||||
|
self.assertTrue(utils.allow_port_internal_info())
|
||||||
|
mock_request.version.minor = 17
|
||||||
|
self.assertFalse(utils.allow_port_internal_info())
|
||||||
|
|
||||||
|
|
||||||
class TestNodeIdent(base.TestCase):
|
class TestNodeIdent(base.TestCase):
|
||||||
|
|
||||||
|
@ -94,3 +94,16 @@ class TestNetwork(db_base.DbTestCase):
|
|||||||
with task_manager.acquire(self.context, self.node.uuid) as task:
|
with task_manager.acquire(self.context, self.node.uuid) as task:
|
||||||
result = network.get_node_vif_ids(task)
|
result = network.get_node_vif_ids(task)
|
||||||
self.assertEqual(expected, result)
|
self.assertEqual(expected, result)
|
||||||
|
|
||||||
|
def test_get_node_vif_ids_during_cleaning(self):
|
||||||
|
port = db_utils.create_test_port(
|
||||||
|
node_id=self.node.id, address='aa:bb:cc:dd:ee:ff',
|
||||||
|
internal_info={'cleaning_vif_port_id': 'test-vif-A'})
|
||||||
|
portgroup = db_utils.create_test_portgroup(
|
||||||
|
node_id=self.node.id, address='dd:ee:ff:aa:bb:cc',
|
||||||
|
internal_info={'cleaning_vif_port_id': 'test-vif-B'})
|
||||||
|
expected = {'portgroups': {portgroup.uuid: 'test-vif-B'},
|
||||||
|
'ports': {port.uuid: 'test-vif-A'}}
|
||||||
|
with task_manager.acquire(self.context, self.node.uuid) as task:
|
||||||
|
result = network.get_node_vif_ids(task)
|
||||||
|
self.assertEqual(expected, result)
|
||||||
|
@ -421,6 +421,18 @@ class MigrationCheckersMixin(object):
|
|||||||
self.assertIsInstance(nodes.c.network_interface.type,
|
self.assertIsInstance(nodes.c.network_interface.type,
|
||||||
sqlalchemy.types.String)
|
sqlalchemy.types.String)
|
||||||
|
|
||||||
|
def _check_10b163d4481e(self, engine, data):
|
||||||
|
ports = db_utils.get_table(engine, 'ports')
|
||||||
|
portgroups = db_utils.get_table(engine, 'portgroups')
|
||||||
|
port_col_names = [column.name for column in ports.c]
|
||||||
|
portgroup_col_names = [column.name for column in portgroups.c]
|
||||||
|
self.assertIn('internal_info', port_col_names)
|
||||||
|
self.assertIn('internal_info', portgroup_col_names)
|
||||||
|
self.assertIsInstance(ports.c.internal_info.type,
|
||||||
|
sqlalchemy.types.TEXT)
|
||||||
|
self.assertIsInstance(portgroups.c.internal_info.type,
|
||||||
|
sqlalchemy.types.TEXT)
|
||||||
|
|
||||||
def test_upgrade_and_version(self):
|
def test_upgrade_and_version(self):
|
||||||
with patch_with_engine(self.engine):
|
with patch_with_engine(self.engine):
|
||||||
self.migration_api.upgrade('head')
|
self.migration_api.upgrade('head')
|
||||||
|
@ -266,6 +266,7 @@ def get_test_port(**kw):
|
|||||||
'switch_info': 'switch1'}),
|
'switch_info': 'switch1'}),
|
||||||
'portgroup_id': kw.get('portgroup_id'),
|
'portgroup_id': kw.get('portgroup_id'),
|
||||||
'pxe_enabled': kw.get('pxe_enabled', True),
|
'pxe_enabled': kw.get('pxe_enabled', True),
|
||||||
|
'internal_info': kw.get('internal_info', {"bar": "buzz"}),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -369,6 +370,7 @@ def get_test_portgroup(**kw):
|
|||||||
'extra': kw.get('extra', {}),
|
'extra': kw.get('extra', {}),
|
||||||
'created_at': kw.get('created_at'),
|
'created_at': kw.get('created_at'),
|
||||||
'updated_at': kw.get('updated_at'),
|
'updated_at': kw.get('updated_at'),
|
||||||
|
'internal_info': kw.get('internal_info', {"bar": "buzz"}),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -341,6 +341,22 @@ class TestNeutron(db_base.DbTestCase):
|
|||||||
self.assertEqual(expected, result)
|
self.assertEqual(expected, result)
|
||||||
mock_gfia.assert_called_once_with('test-vif-A', mock.sentinel.client)
|
mock_gfia.assert_called_once_with('test-vif-A', mock.sentinel.client)
|
||||||
|
|
||||||
|
@mock.patch('ironic.dhcp.neutron.NeutronDHCPApi._get_fixed_ip_address')
|
||||||
|
def test__get_port_ip_address_cleaning(self, mock_gfia):
|
||||||
|
expected = "192.168.1.3"
|
||||||
|
port = object_utils.create_test_port(
|
||||||
|
self.context, node_id=self.node.id, address='aa:bb:cc:dd:ee:ff',
|
||||||
|
uuid=uuidutils.generate_uuid(),
|
||||||
|
internal_info={'cleaning_vif_port_id': 'test-vif-A'})
|
||||||
|
mock_gfia.return_value = expected
|
||||||
|
with task_manager.acquire(self.context,
|
||||||
|
self.node.uuid) as task:
|
||||||
|
api = dhcp_factory.DHCPFactory().provider
|
||||||
|
result = api._get_port_ip_address(task, port,
|
||||||
|
mock.sentinel.client)
|
||||||
|
self.assertEqual(expected, result)
|
||||||
|
mock_gfia.assert_called_once_with('test-vif-A', mock.sentinel.client)
|
||||||
|
|
||||||
@mock.patch('ironic.dhcp.neutron.NeutronDHCPApi._get_fixed_ip_address')
|
@mock.patch('ironic.dhcp.neutron.NeutronDHCPApi._get_fixed_ip_address')
|
||||||
def test__get_port_ip_address_for_portgroup(self, mock_gfia):
|
def test__get_port_ip_address_for_portgroup(self, mock_gfia):
|
||||||
expected = "192.168.1.3"
|
expected = "192.168.1.3"
|
||||||
|
@ -1394,6 +1394,17 @@ class VirtualMediaDeployUtilsTestCase(db_base.DbTestCase):
|
|||||||
address = utils.get_single_nic_with_vif_port_id(task)
|
address = utils.get_single_nic_with_vif_port_id(task)
|
||||||
self.assertEqual('aa:bb:cc:dd:ee:ff', address)
|
self.assertEqual('aa:bb:cc:dd:ee:ff', address)
|
||||||
|
|
||||||
|
def test_get_single_nic_with_cleaning_vif_port_id(self):
|
||||||
|
obj_utils.create_test_port(
|
||||||
|
self.context, node_id=self.node.id, address='aa:bb:cc:dd:ee:ff',
|
||||||
|
uuid=uuidutils.generate_uuid(),
|
||||||
|
internal_info={'cleaning_vif_port_id': 'test-vif-A'},
|
||||||
|
driver='iscsi_ilo')
|
||||||
|
with task_manager.acquire(self.context, self.node.uuid,
|
||||||
|
shared=False) as task:
|
||||||
|
address = utils.get_single_nic_with_vif_port_id(task)
|
||||||
|
self.assertEqual('aa:bb:cc:dd:ee:ff', address)
|
||||||
|
|
||||||
|
|
||||||
class ParseInstanceInfoCapabilitiesTestCase(tests_base.TestCase):
|
class ParseInstanceInfoCapabilitiesTestCase(tests_base.TestCase):
|
||||||
|
|
||||||
@ -1741,7 +1752,8 @@ class AgentMethodsTestCase(db_base.DbTestCase):
|
|||||||
delete_mock.assert_called_once_with(mock.ANY, task)
|
delete_mock.assert_called_once_with(mock.ANY, task)
|
||||||
|
|
||||||
self.ports[0].refresh()
|
self.ports[0].refresh()
|
||||||
self.assertEqual('vif-port-id', self.ports[0].extra['vif_port_id'])
|
self.assertEqual('vif-port-id',
|
||||||
|
self.ports[0].internal_info['cleaning_vif_port_id'])
|
||||||
|
|
||||||
def test_prepare_inband_cleaning_ports(self):
|
def test_prepare_inband_cleaning_ports(self):
|
||||||
self._test_prepare_inband_cleaning_ports()
|
self._test_prepare_inband_cleaning_ports()
|
||||||
@ -1755,9 +1767,9 @@ class AgentMethodsTestCase(db_base.DbTestCase):
|
|||||||
@mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.delete_cleaning_ports',
|
@mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.delete_cleaning_ports',
|
||||||
autospec=True)
|
autospec=True)
|
||||||
def test_tear_down_inband_cleaning_ports(self, neutron_mock):
|
def test_tear_down_inband_cleaning_ports(self, neutron_mock):
|
||||||
extra_dict = self.ports[0].extra
|
internal_info = self.ports[0].internal_info
|
||||||
extra_dict['vif_port_id'] = 'vif-port-id'
|
internal_info['cleaning_vif_port_id'] = 'vif-port-id-1'
|
||||||
self.ports[0].extra = extra_dict
|
self.ports[0].internal_info = internal_info
|
||||||
self.ports[0].save()
|
self.ports[0].save()
|
||||||
with task_manager.acquire(
|
with task_manager.acquire(
|
||||||
self.context, self.node.uuid, shared=False) as task:
|
self.context, self.node.uuid, shared=False) as task:
|
||||||
@ -1765,6 +1777,7 @@ class AgentMethodsTestCase(db_base.DbTestCase):
|
|||||||
neutron_mock.assert_called_once_with(mock.ANY, task)
|
neutron_mock.assert_called_once_with(mock.ANY, task)
|
||||||
|
|
||||||
self.ports[0].refresh()
|
self.ports[0].refresh()
|
||||||
|
self.assertNotIn('cleaning_vif_port_id', self.ports[0].internal_info)
|
||||||
self.assertNotIn('vif_port_id', self.ports[0].extra)
|
self.assertNotIn('vif_port_id', self.ports[0].extra)
|
||||||
|
|
||||||
@mock.patch.object(pxe.PXEBoot, 'prepare_ramdisk', autospec=True)
|
@mock.patch.object(pxe.PXEBoot, 'prepare_ramdisk', autospec=True)
|
||||||
|
@ -407,8 +407,8 @@ expected_object_fingerprints = {
|
|||||||
'Node': '1.16-2a6646627cb937f083f428f5d54e6458',
|
'Node': '1.16-2a6646627cb937f083f428f5d54e6458',
|
||||||
'MyObj': '1.5-4f5efe8f0fcaf182bbe1c7fe3ba858db',
|
'MyObj': '1.5-4f5efe8f0fcaf182bbe1c7fe3ba858db',
|
||||||
'Chassis': '1.3-d656e039fd8ae9f34efc232ab3980905',
|
'Chassis': '1.3-d656e039fd8ae9f34efc232ab3980905',
|
||||||
'Port': '1.5-a224755c3da5bc5cf1a14a11c0d00f3f',
|
'Port': '1.6-609504503d68982a10f495659990084b',
|
||||||
'Portgroup': '1.0-1ac4db8fa31edd9e1637248ada4c25a1',
|
'Portgroup': '1.1-e57da9ca808d3696c34dad8125564696',
|
||||||
'Conductor': '1.1-5091f249719d4a465062a1b3dc7f860d'
|
'Conductor': '1.1-5091f249719d4a465062a1b3dc7f860d'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
features:
|
||||||
|
- A new dictionary field ``internal_info`` is added to the port API object.
|
||||||
|
It is readonly from the API side, and can contain any internal information
|
||||||
|
ironic needs to store for the port. ``cleaning_vif_port_id`` is being
|
||||||
|
stored inside this dictionary.
|
Loading…
Reference in New Issue
Block a user