add node_deployment_info table

Size of deployment_info field in tasks table grows as n**2
(depending on number of nodes). If we have 200 nodes, the
size of the structure is about 20Mb. In case of 600 nodes it
would be theoretically about 720Mb, in practice it doesn't fit
into 1Gb.

Good solution is to put common part to separate place. But it's
not so fast. Also it will not help if all nodes will be going to
be deployed with customized deployment info.

Change-Id: Id3154ab423b0863d9cc4952335293bf5fc30df38
Partial-Bug: #1596987
This commit is contained in:
Dmitry Guryanov 2016-07-01 13:08:02 +03:00
parent b5eb01ef71
commit 052082bd71
9 changed files with 213 additions and 10 deletions

View File

@ -23,6 +23,8 @@ Create Date: 2016-04-08 15:20:43.989472
from alembic import op
import sqlalchemy as sa
from oslo_serialization import jsonutils
from nailgun.db.sqlalchemy.models import fields
@ -34,9 +36,11 @@ down_revision = 'f2314e5d63c9'
def upgrade():
upgrade_plugin_links_constraints()
upgrade_plugin_with_nics_and_nodes_attributes()
upgrade_node_deployment_info()
def downgrade():
downgrade_node_deployment_info()
downgrade_plugin_with_nics_and_nodes_attributes()
downgrade_plugin_links_constraints()
@ -234,3 +238,46 @@ def downgrade_plugin_with_nics_and_nodes_attributes():
op.drop_column('plugins', 'node_attributes_metadata')
op.drop_column('plugins', 'bond_attributes_metadata')
op.drop_column('plugins', 'nic_attributes_metadata')
def upgrade_node_deployment_info():
op.create_table(
'node_deployment_info',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('node_uid', sa.String(20), nullable=True),
sa.Column('task_id', sa.Integer(), nullable=False),
sa.Column('deployment_info', fields.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.ForeignKeyConstraint(
['task_id'], ['tasks.id'], ondelete='CASCADE')
)
op.create_index('node_deployment_info_task_id_and_node_uid',
'node_deployment_info', ['task_id', 'node_uid'])
connection = op.get_bind()
select_query = sa.sql.text("""
SELECT id, deployment_info
FROM tasks
WHERE deployment_info IS NOT NULL""")
insert_query = sa.sql.text("""
INSERT INTO node_deployment_info
(task_id, node_uid, deployment_info)
VALUES
(:task_id, :node_uid, :deployment_info)""")
for (task_id, deployment_info_str) in connection.execute(select_query):
deployment_info = jsonutils.loads(deployment_info_str)
for node_uid, node_deployment_info in deployment_info.iteritems():
connection.execute(
insert_query,
task_id=task_id,
node_uid=node_uid,
deployment_info=jsonutils.dumps(node_deployment_info))
update_query = sa.sql.text("UPDATE tasks SET deployment_info=NULL")
connection.execute(update_query)
def downgrade_node_deployment_info():
op.drop_table('node_deployment_info')

View File

@ -80,3 +80,4 @@ from nailgun.db.sqlalchemy.models.plugins \
from nailgun.db.sqlalchemy.models.plugins import Plugin
from nailgun.db.sqlalchemy.models.openstack_config import OpenstackConfig
from nailgun.db.sqlalchemy.models.node_deployment_info import NodeDeploymentInfo

View File

@ -0,0 +1,43 @@
# -*- coding: utf-8 -*-
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as sa
from sqlalchemy.ext.mutable import MutableDict
from sqlalchemy.orm import deferred
from nailgun.db.sqlalchemy.models.base import Base
from nailgun.db.sqlalchemy.models.fields import JSON
class NodeDeploymentInfo(Base):
__tablename__ = 'node_deployment_info'
__table_args__ = (
sa.Index('node_deployment_info_task_id_and_node_uid',
'task_id', 'node_uid'),
)
id = sa.Column(sa.Integer, primary_key=True, nullable=False)
task_id = sa.Column(
sa.Integer,
sa.ForeignKey('tasks.id', ondelete='CASCADE'),
nullable=False)
node_uid = sa.Column(
sa.String(20),
nullable=True)
deployment_info = deferred(sa.Column(MutableDict.as_mutable(JSON),
nullable=True))

View File

@ -76,7 +76,6 @@ class Task(Base):
# sum([t.weight for t in supertask.subtasks])
weight = Column(Float, default=1.0)
deleted_at = Column(DateTime)
deployment_info = deferred(Column(MutableDict.as_mutable(JSON),
nullable=True))
cluster_settings = deferred(Column(MutableDict.as_mutable(JSON),

View File

@ -0,0 +1,29 @@
# -*- coding: utf-8 -*-
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nailgun.db.sqlalchemy import models
from nailgun.objects import NailgunCollection
from nailgun.objects import NailgunObject
class NodeDeploymentInfo(NailgunObject):
model = models.NodeDeploymentInfo
class NodeDeploymentInfoCollection(NailgunCollection):
single = NodeDeploymentInfo

View File

@ -20,6 +20,8 @@ from nailgun.db.sqlalchemy import models
from nailgun import errors
from nailgun.objects import NailgunCollection
from nailgun.objects import NailgunObject
from nailgun.objects.node_deployment_info import NodeDeploymentInfo
from nailgun.objects.node_deployment_info import NodeDeploymentInfoCollection
from nailgun.objects.serializers.transaction import TransactionSerializer
@ -45,12 +47,25 @@ class Transaction(NailgunObject):
@classmethod
def attach_deployment_info(cls, instance, deployment_info):
instance.deployment_info = deployment_info
for node_uid, node_di in deployment_info.items():
NodeDeploymentInfo.create({'task_id': instance.id,
'node_uid': node_uid,
'deployment_info': node_di})
@classmethod
def get_deployment_info(cls, instance):
if instance is not None:
return instance.deployment_info
def get_deployment_info(cls, instance, node_uids=None):
if instance is None:
return {}
node_di_list = NodeDeploymentInfoCollection.filter_by(
None, task_id=instance.id)
if node_uids:
node_di_list = NodeDeploymentInfoCollection.filter_by_list(
node_di_list, "node_uid", node_uids)
deployment_info = {node_di.node_uid: node_di.deployment_info
for node_di in node_di_list}
return deployment_info
@classmethod
def attach_network_settings(cls, instance, settings):

View File

@ -468,8 +468,11 @@ class ClusterTransaction(DeploymentTask):
state = {}
for transaction, data in groupby(transactions, lambda x: x[0]):
data = list(data)
transaction_nodes = [d[1] for d in data]
deployment_info = objects.Transaction.get_deployment_info(
transaction)
transaction, node_uids=transaction_nodes)
for _, node_uid, task_name in data:
task_state = state.setdefault(task_name, {})

View File

@ -55,6 +55,37 @@ JSON_TASKS = [
}
]
DEPLOYMENT_INFO = {
55: {
'master': {
'attr1': 1,
'attr2': 2
},
'1': {
'attr1': 3,
'attr2': 4
},
'2': {
'attr1': 5,
'attr2': 6
}
},
56: {
'master': {
'attr1': 7,
'attr2': 8
},
'1': {
'attr1': 9,
'attr2': 10
},
'2': {
'attr1': 11,
'attr2': 12
}
}
}
def setup_module():
dropdb()
@ -323,6 +354,25 @@ def prepare():
}]
)
result = db.execute(
meta.tables['tasks'].insert(),
[
{
'id': 55,
'uuid': '219eaafe-01a1-4f26-8edc-b9d9b0df06b3',
'name': 'deployment',
'status': 'running',
'deployment_info': jsonutils.dumps(DEPLOYMENT_INFO[55])
},
{
'id': 56,
'uuid': 'a45fbbcd-792c-4245-a619-f4fb2f094d38',
'name': 'deployment',
'status': 'running',
'deployment_info': jsonutils.dumps(DEPLOYMENT_INFO[56])
}
]
)
db.commit()
@ -421,3 +471,18 @@ class TestPluginAttributesMigration(base.BaseAlembicMigrationTest):
'node_id': node_id,
'attributes': jsonutils.dumps({'test_attr': 'test'})
}])
class TestSplitDeploymentInfo(base.BaseAlembicMigrationTest):
def test_split_deployment_info(self):
node_di_table = self.meta.tables['node_deployment_info']
res = db.execute(sa.select([node_di_table]))
for data in res:
self.assertEqual(jsonutils.loads(data.deployment_info),
DEPLOYMENT_INFO[data.task_id][data.node_uid])
tasks_table = self.meta.tables['tasks']
res = db.execute(sa.select([tasks_table]))
for data in res:
self.assertIsNone(data.deployment_info)

View File

@ -962,15 +962,16 @@ class TestTransactionObject(BaseIntegrationTest):
'name': consts.TASK_NAMES.deployment,
'status': consts.TASK_STATUSES.ready
})
self.assertIsNone(
objects.Transaction.get_deployment_info(transaction)
self.assertEquals(
objects.Transaction.get_deployment_info(transaction),
{}
)
info = {'test': 'test'}
info = {'test': {'test': 'test'}}
objects.Transaction.attach_deployment_info(transaction, info)
self.assertEqual(
info, objects.Transaction.get_deployment_info(transaction)
)
self.assertIsNone(objects.Transaction.get_deployment_info(None))
self.assertEqual(objects.Transaction.get_deployment_info(None), {})
def test_get_cluster_settings(self):
transaction = objects.Transaction.create({