Extend tasks table \w dry_run and graph_type attrs

By introducing transaction manager and moving everything to graphs, we
can't rely anymore on task names as a unique thing for transaction
identifications. Instead, we need to extend tasks model with two
columns - graph_type and dry_run - to make distinguish by executed
graphs.

Co-Authored-By: Igor Kalnitsky <igor@kalnitsky.org>

Change-Id: I967133033eb41143b6acf467702f12de50edb971
Blueprint: graph-concept-extension
This commit is contained in:
Bulat Gaifullin 2016-08-08 13:52:03 +03:00 committed by Igor Kalnitsky
parent 7a886021bb
commit 7d6203a04b
4 changed files with 83 additions and 1 deletions

View File

@ -38,9 +38,11 @@ def upgrade():
upgrade_plugin_with_nics_and_nodes_attributes()
upgrade_node_deployment_info()
upgrade_release_required_component_types()
upgrade_task_model()
def downgrade():
downgrade_task_model()
downgrade_release_required_component_types()
downgrade_node_deployment_info()
downgrade_plugin_with_nics_and_nodes_attributes()
@ -305,3 +307,21 @@ def downgrade_node_deployment_info():
def downgrade_release_required_component_types():
op.drop_column('releases', 'required_component_types')
def upgrade_task_model():
op.add_column(
'tasks',
sa.Column('graph_type', sa.String(255), nullable=True)
)
op.add_column(
'tasks',
sa.Column(
'dry_run', sa.Boolean(), nullable=False, server_default='false'
)
)
def downgrade_task_model():
op.drop_column('tasks', 'dry_run')
op.drop_column('tasks', 'graph_type')

View File

@ -16,6 +16,7 @@
import uuid
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import Enum
@ -76,6 +77,9 @@ class Task(Base):
# sum([t.weight for t in supertask.subtasks])
weight = Column(Float, default=1.0)
deleted_at = Column(DateTime)
dry_run = Column(Boolean(), nullable=False, default=False,
server_default='false')
graph_type = Column(String(255))
deployment_info = deferred(Column(MutableDict.as_mutable(JSON),
nullable=True))
cluster_settings = deferred(Column(MutableDict.as_mutable(JSON),

View File

@ -109,3 +109,32 @@ class TestRequiredComponentTypesField(base.BaseAlembicMigrationTest):
def test_downgrade_release_required_component_types(self):
releases_table = self.meta.tables['releases']
self.assertNotIn('required_component_types', releases_table.c)
class TestTasksSchemaDowngrade(base.BaseAlembicMigrationTest):
def test_dry_run_field_does_no_exist(self):
db.execute(
self.meta.tables['tasks'].insert(),
[{
'uuid': 'fake_task_uuid_0',
'name': 'dump',
'status': 'pending'
}]
)
result = db.execute(sa.select([self.meta.tables['tasks']])).first()
self.assertNotIn('dry_run', result)
def test_graph_type_field_does_no_exist(self):
db.execute(
self.meta.tables['tasks'].insert(),
[{
'uuid': 'fake_task_uuid_0',
'name': 'dump',
'status': 'pending'
}]
)
result = db.execute(sa.select([self.meta.tables['tasks']])).first()
self.assertNotIn('graph_type', result)

View File

@ -24,7 +24,7 @@ from nailgun.db import dropdb
from nailgun.db.migration import ALEMBIC_CONFIG
from nailgun.test import base
_prepare_revision = '675105097a69'
_prepare_revision = 'f2314e5d63c9'
_test_revision = 'c6edea552f1e'
JSON_TASKS = [
@ -534,3 +534,32 @@ class TestRequiredComponentTypesField(base.BaseAlembicMigrationTest):
'required_component_types': None
})
db.rollback()
class TestTasksSchemaMigration(base.BaseAlembicMigrationTest):
def test_dry_run_field_exist(self):
db.execute(
self.meta.tables['tasks'].insert(),
[{
'uuid': 'fake_task_uuid_0',
'name': 'dump',
'status': 'pending',
}]
)
result = db.execute(sa.select([self.meta.tables['tasks']])).first()
self.assertIn('dry_run', result)
def test_graph_type_field_exist(self):
db.execute(
self.meta.tables['tasks'].insert(),
[{
'uuid': 'fake_task_uuid_0',
'name': 'dump',
'status': 'pending',
}]
)
result = db.execute(sa.select([self.meta.tables['tasks']])).first()
self.assertIn('graph_type', result)