Merge NodeAttributes model into Node model

* Move vms_conf from NodeAttributes to Node.
* Drop interfaces field, that is not used anymore.
* Fix all references to vms_conf across all sources.
* Remove explicit changed() call, since JSON fields
  can detect updates on all levels.
* Replace objects.Node.set_vms_conf() function
  with direct attribute assignment

Change-Id: I2112a6dba30b49e3d658ef1503ee48ad1f92e693
Implements: blueprint support-numa-cpu-pinning
This commit is contained in:
Alexander Saprykin 2016-02-08 14:43:29 +02:00
parent 65f59a458a
commit 5a2d4f0d3e
16 changed files with 117 additions and 85 deletions

View File

@ -87,7 +87,7 @@ class NodeVMsHandler(BaseHandler):
* 404 (node not found in db) * 404 (node not found in db)
""" """
node = self.get_object_or_404(objects.Node, node_id) node = self.get_object_or_404(objects.Node, node_id)
node_vms = node.attributes.vms_conf node_vms = node.vms_conf
return {"vms_conf": node_vms} return {"vms_conf": node_vms}
@content @content
@ -101,5 +101,5 @@ class NodeVMsHandler(BaseHandler):
node = self.get_object_or_404(objects.Node, node_id) node = self.get_object_or_404(objects.Node, node_id)
data = self.checked_data() data = self.checked_data()
objects.Node.set_vms_conf(node, data.get("vms_conf")) node.vms_conf = data.get("vms_conf")
return {"vms_conf": node.attributes.vms_conf} return {"vms_conf": node.vms_conf}

View File

@ -28,6 +28,7 @@ import sqlalchemy as sa
from oslo_serialization import jsonutils from oslo_serialization import jsonutils
from nailgun import consts from nailgun import consts
from nailgun.db.sqlalchemy.models import fields
revision = '11a9adc6d36a' revision = '11a9adc6d36a'
down_revision = '43b2cb64dae6' down_revision = '43b2cb64dae6'
@ -38,9 +39,11 @@ def upgrade():
upgrade_ip_address() upgrade_ip_address()
update_vips_from_network_roles() update_vips_from_network_roles()
upgrade_node_roles_metadata() upgrade_node_roles_metadata()
merge_node_attributes_with_nodes()
def downgrade(): def downgrade():
downgrade_merge_node_attributes_with_nodes()
downgrade_node_roles_metadata() downgrade_node_roles_metadata()
remove_foreign_key_ondelete() remove_foreign_key_ondelete()
downgrade_ip_address() downgrade_ip_address()
@ -588,3 +591,41 @@ def downgrade_node_roles_metadata():
id=id, id=id,
roles_metadata=jsonutils.dumps(roles_metadata), roles_metadata=jsonutils.dumps(roles_metadata),
) )
def merge_node_attributes_with_nodes():
connection = op.get_bind()
op.add_column(
'nodes',
sa.Column(
'vms_conf',
fields.JSON(),
nullable=False,
server_default='[]'
)
)
select_query = sa.sql.text('SELECT node_id, vms_conf FROM node_attributes')
update_query = sa.sql.text(
'UPDATE nodes SET vms_conf = :vms_conf WHERE id = :node_id')
for node_id, vms_conf in connection.execute(select_query):
connection.execute(update_query, node_id=node_id, vms_conf=vms_conf)
op.drop_table('node_attributes')
def downgrade_merge_node_attributes_with_nodes():
op.create_table(
'node_attributes',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('node_id', sa.Integer(), nullable=True),
sa.Column('interfaces', fields.JSON(), nullable=True),
sa.Column('vms_conf', fields.JSON(),
nullable=False, server_default='[]'),
sa.ForeignKeyConstraint(['node_id'], ['nodes.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.drop_column('nodes', 'vms_conf')

View File

@ -197,7 +197,6 @@ def upload_fixture(fileobj, loader=None):
# UGLY HACK for testing # UGLY HACK for testing
if new_obj.__class__.__name__ == 'Node': if new_obj.__class__.__name__ == 'Node':
objects.Node.create_attributes(new_obj)
objects.Node.update_interfaces(new_obj) objects.Node.update_interfaces(new_obj)
fire_callback_on_node_create(new_obj) fire_callback_on_node_create(new_obj)
db().commit() db().commit()

View File

@ -27,7 +27,6 @@ from nailgun.db.sqlalchemy.models.cluster import VmwareAttributes
from nailgun.db.sqlalchemy.models.release import Release from nailgun.db.sqlalchemy.models.release import Release
from nailgun.db.sqlalchemy.models.node import Node from nailgun.db.sqlalchemy.models.node import Node
from nailgun.db.sqlalchemy.models.node import NodeAttributes
from nailgun.db.sqlalchemy.models.node import NodeBondInterface from nailgun.db.sqlalchemy.models.node import NodeBondInterface
from nailgun.db.sqlalchemy.models.node import NodeNICInterface from nailgun.db.sqlalchemy.models.node import NodeNICInterface
from nailgun.db.sqlalchemy.models.node import NodeGroup from nailgun.db.sqlalchemy.models.node import NodeGroup

View File

@ -26,9 +26,10 @@ from sqlalchemy import String
from sqlalchemy import Text from sqlalchemy import Text
from sqlalchemy import Unicode from sqlalchemy import Unicode
from sqlalchemy import UniqueConstraint from sqlalchemy import UniqueConstraint
from sqlalchemy.orm import relationship, backref
from sqlalchemy.dialects import postgresql as psql from sqlalchemy.dialects import postgresql as psql
from sqlalchemy.ext.mutable import MutableDict from sqlalchemy.ext.mutable import MutableDict
from sqlalchemy.orm import relationship
from nailgun import consts from nailgun import consts
from nailgun.db.sqlalchemy.models.base import Base from nailgun.db.sqlalchemy.models.base import Base
@ -104,10 +105,7 @@ class Node(Base):
default=[], nullable=False, server_default='{}') default=[], nullable=False, server_default='{}')
primary_roles = Column(psql.ARRAY(String(consts.ROLE_NAME_MAX_SIZE)), primary_roles = Column(psql.ARRAY(String(consts.ROLE_NAME_MAX_SIZE)),
default=[], nullable=False, server_default='{}') default=[], nullable=False, server_default='{}')
attributes = relationship("NodeAttributes",
backref=backref("node"),
uselist=False,
cascade="all,delete")
nic_interfaces = relationship("NodeNICInterface", backref="node", nic_interfaces = relationship("NodeNICInterface", backref="node",
cascade="all, delete-orphan", cascade="all, delete-orphan",
order_by="NodeNICInterface.name") order_by="NodeNICInterface.name")
@ -125,6 +123,8 @@ class Node(Base):
server_default=None, nullable=True) server_default=None, nullable=True)
extensions = Column(psql.ARRAY(String(consts.EXTENSION_NAME_MAX_SIZE)), extensions = Column(psql.ARRAY(String(consts.EXTENSION_NAME_MAX_SIZE)),
default=[], nullable=False, server_default='{}') default=[], nullable=False, server_default='{}')
vms_conf = Column(MutableList.as_mutable(JSON),
default=[], server_default='[]', nullable=False)
@property @property
def interfaces(self): def interfaces(self):
@ -227,15 +227,6 @@ class Node(Base):
self.meta = data self.meta = data
class NodeAttributes(Base):
__tablename__ = 'node_attributes'
id = Column(Integer, primary_key=True)
node_id = Column(Integer, ForeignKey('nodes.id', ondelete='CASCADE'))
interfaces = Column(MutableDict.as_mutable(JSON), default={})
vms_conf = Column(MutableList.as_mutable(JSON),
default=[], server_default='[]')
class NodeNICInterface(Base): class NodeNICInterface(Base):
__tablename__ = 'node_nic_interfaces' __tablename__ = 'node_nic_interfaces'
id = Column(Integer, primary_key=True) id = Column(Integer, primary_key=True)

View File

@ -82,8 +82,6 @@ class NodeDefaultsDisksHandler(BaseHandler):
* 404 (node or its attributes not found in db) * 404 (node or its attributes not found in db)
""" """
node = self.get_object_or_404(objects.Node, node_id) node = self.get_object_or_404(objects.Node, node_id)
if not node.attributes:
raise self.http(404)
volumes = DisksFormatConvertor.format_disks_to_simple( volumes = DisksFormatConvertor.format_disks_to_simple(
node.volume_manager.gen_volumes_info()) node.volume_manager.gen_volumes_info())

View File

@ -1124,7 +1124,7 @@ class Cluster(NailgunObject):
nodes = [] nodes = []
for node in cls.get_nodes_by_role(instance, for node in cls.get_nodes_by_role(instance,
consts.VIRTUAL_NODE_TYPES.virt): consts.VIRTUAL_NODE_TYPES.virt):
for vm in node.attributes.vms_conf: for vm in node.vms_conf:
if not vm.get('created'): if not vm.get('created'):
nodes.append(node) nodes.append(node)
return nodes return nodes
@ -1133,11 +1133,9 @@ class Cluster(NailgunObject):
def set_vms_created_state(cls, instance): def set_vms_created_state(cls, instance):
nodes = cls.get_nodes_by_role(instance, consts.VIRTUAL_NODE_TYPES.virt) nodes = cls.get_nodes_by_role(instance, consts.VIRTUAL_NODE_TYPES.virt)
for node in nodes: for node in nodes:
for vm in node.attributes.vms_conf: for vm in node.vms_conf:
if not vm.get('created'): if not vm.get('created'):
vm['created'] = True vm['created'] = True
# Second level data was changed in 'vms_conf'.
node.attributes.vms_conf.changed()
db().flush() db().flush()
@classmethod @classmethod

View File

@ -245,7 +245,6 @@ class Node(NailgunObject):
cls.update_primary_roles(new_node, primary_roles) cls.update_primary_roles(new_node, primary_roles)
# creating attributes # creating attributes
cls.create_attributes(new_node)
cls.create_discover_notification(new_node) cls.create_discover_notification(new_node)
if new_node.ip: if new_node.ip:
@ -332,20 +331,6 @@ class Node(NailgunObject):
db().add(instance) db().add(instance)
db().flush() db().flush()
@classmethod
def create_attributes(cls, instance):
"""Create attributes for Node instance
:param instance: Node instance
:returns: NodeAttributes instance
"""
new_attributes = models.NodeAttributes()
instance.attributes = new_attributes
db().add(new_attributes)
db().add(instance)
db().flush()
return new_attributes
@classmethod @classmethod
def is_interfaces_configuration_locked(cls, instance): def is_interfaces_configuration_locked(cls, instance):
"""Returns true if update of network configuration is not allowed. """Returns true if update of network configuration is not allowed.
@ -391,19 +376,6 @@ class Node(NailgunObject):
) )
logger.warning(traceback.format_exc()) logger.warning(traceback.format_exc())
@classmethod
def set_vms_conf(cls, instance, vms_conf):
"""Set vms_conf for Node instance from JSON data.
:param instance: Node instance
:param volumes_data: JSON with new vms_conf data
:returns: None
"""
db().query(models.NodeAttributes).filter_by(
node_id=instance.id).update({'vms_conf': vms_conf})
db().flush()
db().refresh(instance)
@classmethod @classmethod
def create_discover_notification(cls, instance): def create_discover_notification(cls, instance):
"""Create notification about discovering new Node """Create notification about discovering new Node
@ -1006,10 +978,8 @@ class Node(NailgunObject):
if consts.VIRTUAL_NODE_TYPES.virt not in node.all_roles: if consts.VIRTUAL_NODE_TYPES.virt not in node.all_roles:
return return
for vm in node.attributes.vms_conf: for vm in node.vms_conf:
vm['created'] = False vm['created'] = False
# Was changed second level data in 'vms_conf'
node.attributes.vms_conf.changed()
class NodeCollection(NailgunCollection): class NodeCollection(NailgunCollection):

View File

@ -19,10 +19,8 @@
from copy import deepcopy from copy import deepcopy
from itertools import groupby from itertools import groupby
import sqlalchemy as sa
from sqlalchemy.orm import joinedload
import six import six
import sqlalchemy as sa
from nailgun import consts from nailgun import consts
from nailgun.db import db from nailgun.db import db
@ -171,7 +169,7 @@ class DeploymentMultinodeSerializer(object):
).filter(sa.or_( ).filter(sa.or_(
Node.roles.any('ceph-osd'), Node.roles.any('ceph-osd'),
Node.pending_roles.any('ceph-osd') Node.pending_roles.any('ceph-osd')
)).options(joinedload('attributes')) ))
for node in nodes: for node in nodes:
for disk in node_extension_call('get_node_volumes', node): for disk in node_extension_call('get_node_volumes', node):
@ -250,7 +248,7 @@ class DeploymentMultinodeSerializer(object):
'fqdn': objects.Node.get_node_fqdn(node), 'fqdn': objects.Node.get_node_fqdn(node),
'status': node.status, 'status': node.status,
'role': role, 'role': role,
'vms_conf': node.attributes.vms_conf, 'vms_conf': node.vms_conf,
# TODO(eli): need to remove, requried for the fake thread only # TODO(eli): need to remove, requried for the fake thread only
'online': node.online 'online': node.online
} }

View File

@ -56,7 +56,6 @@ from nailgun.db.sqlalchemy.fixman import load_fake_deployment_tasks
from nailgun.db.sqlalchemy.fixman import load_fixture from nailgun.db.sqlalchemy.fixman import load_fixture
from nailgun.db.sqlalchemy.fixman import upload_fixture from nailgun.db.sqlalchemy.fixman import upload_fixture
from nailgun.db.sqlalchemy.models import ClusterPluginLink from nailgun.db.sqlalchemy.models import ClusterPluginLink
from nailgun.db.sqlalchemy.models import NodeAttributes
from nailgun.db.sqlalchemy.models import NodeNICInterface from nailgun.db.sqlalchemy.models import NodeNICInterface
from nailgun.db.sqlalchemy.models import Notification from nailgun.db.sqlalchemy.models import Notification
from nailgun.db.sqlalchemy.models import PluginLink from nailgun.db.sqlalchemy.models import PluginLink
@ -393,9 +392,6 @@ class EnvironmentManager(object):
self.db.commit() self.db.commit()
return task return task
def create_attributes(self):
return NodeAttributes()
def create_notification(self, **kwargs): def create_notification(self, **kwargs):
notif_data = { notif_data = {
"topic": "discover", "topic": "discover",

View File

@ -249,7 +249,7 @@ class TestNovaOrchestratorSerializer(OrchestratorSerializerTestBase):
node_db = self.db.query(Node).get(node['id']) node_db = self.db.query(Node).get(node['id'])
vms_conf = [{'id': 1, 'cluster_id': self.cluster.id}] vms_conf = [{'id': 1, 'cluster_id': self.cluster.id}]
objects.Node.set_vms_conf(node_db, vms_conf) node_db.vms_conf = vms_conf
serialized_data = self.serializer.serialize_node(node_db, 'controller') serialized_data = self.serializer.serialize_node(node_db, 'controller')
self.assertEqual(serialized_data['vms_conf'], vms_conf) self.assertEqual(serialized_data['vms_conf'], vms_conf)

View File

@ -118,16 +118,16 @@ class TestProvisioning(BaseIntegrationTest):
) )
nodes = self.env.nodes nodes = self.env.nodes
nodes[0].attributes.vms_conf = [ nodes[0].vms_conf = [
{'id': 1, 'cpu': 1, 'mem': 2, 'created': True}, {'id': 1, 'cpu': 1, 'mem': 2, 'created': True},
{'id': 2, 'cpu': 1, 'mem': 2, 'created': True} {'id': 2, 'cpu': 1, 'mem': 2, 'created': True}
] ]
nodes[1].attributes.vms_conf = [ nodes[1].vms_conf = [
{'id': 1, 'cpu': 2, 'mem': 4} {'id': 1, 'cpu': 2, 'mem': 4}
] ]
self.db.commit() self.db.commit()
self.env.launch_provisioning_selected([str(nodes[0].id)]) self.env.launch_provisioning_selected([str(nodes[0].id)])
for conf in nodes[0].attributes.vms_conf: for conf in nodes[0].vms_conf:
self.assertFalse(conf['created']) self.assertFalse(conf['created'])

View File

@ -34,8 +34,7 @@ class TestSpawnVMs(BaseIntegrationTest):
] ]
) )
cluster = self.env.clusters[0] cluster = self.env.clusters[0]
objects.Node.set_vms_conf(cluster.nodes[0], cluster.nodes[0].vms_conf = [{'id': 1, 'cluster_id': cluster.id}]
[{'id': 1, 'cluster_id': cluster.id}])
resp = self.app.put( resp = self.app.put(
reverse( reverse(

View File

@ -67,13 +67,10 @@ class TestLoadDbDriverWithSAExceptions(unittest.TestCase):
def test_sa_relationship_constraint(self): def test_sa_relationship_constraint(self):
def handler(): def handler():
node = models.Node( ip_addr = models.IPAddr()
mac='60:a4:4c:35:28:95',
timestamp=datetime.datetime.now()
)
node.attributes = models.IPAddr() ip_addr.network_data = models.IPAddr()
db.add(node) db.add(ip_addr)
db.flush() db.flush()
self.assertRaises(AssertionError, load_db_driver, handler) self.assertRaises(AssertionError, load_db_driver, handler)

View File

@ -12,6 +12,8 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import datetime
import alembic import alembic
from oslo_serialization import jsonutils from oslo_serialization import jsonutils
import sqlalchemy as sa import sqlalchemy as sa
@ -141,6 +143,31 @@ def prepare():
'fuel_version': '8.0', 'fuel_version': '8.0',
}]) }])
db.execute(
meta.tables['nodes'].insert(),
[{
'uuid': '26b508d0-0d76-4159-bce9-f67ec2765480',
'cluster_id': None,
'group_id': None,
'status': 'discover',
'meta': '{}',
'mac': 'aa:aa:aa:aa:aa:aa',
'timestamp': datetime.datetime.utcnow(),
}]
)
node_id = result.inserted_primary_key[0]
db.execute(
meta.tables['node_attributes'].insert(),
[{
'node_id': node_id,
'vms_conf': jsonutils.dumps([
{'cpu': 1, 'mem': 2},
{'cpu': 1, 'mem': 2},
])
}]
)
db.execute( db.execute(
meta.tables['ip_addrs'].insert(), meta.tables['ip_addrs'].insert(),
[ [
@ -377,3 +404,23 @@ class TestNodeRolesMigration(base.BaseAlembicMigrationTest):
role_group, role_group,
role_groups.get(role_name, consts.NODE_ROLE_GROUPS.other) role_groups.get(role_name, consts.NODE_ROLE_GROUPS.other)
) )
class TestMergeNodeAttributes(base.BaseAlembicMigrationTest):
def test_node_attributes_not_exists(self):
self.assertNotIn('node_attributes', self.meta.tables)
def test_data_moved_into_nodes_table(self):
nodes_table = self.meta.tables['nodes']
records = list(db.execute(
sa.select([nodes_table.c.vms_conf])))
for record in records:
self.assertEqual(
jsonutils.loads(record[0]),
[
{'cpu': 1, 'mem': 2},
{'cpu': 1, 'mem': 2},
]
)

View File

@ -729,8 +729,7 @@ class TestTaskObject(BaseIntegrationTest):
kvm_node = self.cluster.nodes[0] kvm_node = self.cluster.nodes[0]
kvm_node.roles = [consts.VIRTUAL_NODE_TYPES.virt] kvm_node.roles = [consts.VIRTUAL_NODE_TYPES.virt]
self.db.flush() self.db.flush()
objects.Node.set_vms_conf(kvm_node, kvm_node.vms_conf = [{'id': 1, 'cluster_id': self.cluster.id}]
[{'id': 1, 'cluster_id': self.cluster.id}])
task = Task(name=consts.TASK_NAMES.spawn_vms, task = Task(name=consts.TASK_NAMES.spawn_vms,
cluster=self.cluster, cluster=self.cluster,
status=consts.TASK_STATUSES.ready) status=consts.TASK_STATUSES.ready)
@ -742,7 +741,7 @@ class TestTaskObject(BaseIntegrationTest):
for node in self.cluster.nodes: for node in self.cluster.nodes:
if consts.VIRTUAL_NODE_TYPES.virt in node.roles: if consts.VIRTUAL_NODE_TYPES.virt in node.roles:
self.assertTrue(node.attributes.vms_conf[0].get('created')) self.assertTrue(node.vms_conf[0].get('created'))
else: else:
self.assertNotEquals(node.status, consts.NODE_STATUSES.ready) self.assertNotEquals(node.status, consts.NODE_STATUSES.ready)
@ -1432,12 +1431,12 @@ class TestClusterObjectVirtRoles(BaseTestCase):
] ]
) )
self.env.nodes[0].attributes.vms_conf = [ self.env.nodes[0].vms_conf = [
{'id': 1, 'cpu': 1, 'mem': 2}, {'id': 1, 'cpu': 1, 'mem': 2},
{'id': 2, 'cpu': 1, 'mem': 2}, {'id': 2, 'cpu': 1, 'mem': 2},
] ]
self.env.nodes[1].attributes.vms_conf = [ self.env.nodes[1].vms_conf = [
{'id': 1, 'cpu': 1, 'mem': 2}, {'id': 1, 'cpu': 1, 'mem': 2},
{'id': 2, 'cpu': 1, 'mem': 2}, {'id': 2, 'cpu': 1, 'mem': 2},
] ]
@ -1446,7 +1445,7 @@ class TestClusterObjectVirtRoles(BaseTestCase):
objects.Cluster.set_vms_created_state(self.env.clusters[0]) objects.Cluster.set_vms_created_state(self.env.clusters[0])
for node in self.env.nodes: for node in self.env.nodes:
for conf in node.attributes.vms_conf: for conf in node.vms_conf:
self.assertTrue(conf['created']) self.assertTrue(conf['created'])
def test_reset_vms_created_state(self): def test_reset_vms_created_state(self):
@ -1454,10 +1453,10 @@ class TestClusterObjectVirtRoles(BaseTestCase):
objects.Node.reset_vms_created_state(self.env.nodes[0]) objects.Node.reset_vms_created_state(self.env.nodes[0])
for conf in self.env.nodes[0].attributes.vms_conf: for conf in self.env.nodes[0].vms_conf:
self.assertFalse(conf['created']) self.assertFalse(conf['created'])
for conf in self.env.nodes[1].attributes.vms_conf: for conf in self.env.nodes[1].vms_conf:
self.assertTrue(conf['created']) self.assertTrue(conf['created'])