Merge "Drop downgrade field in alembic script.py.mako and version"
This commit is contained in:
commit
d738f538ba
@ -40,7 +40,6 @@ class DatabaseManager(object):
|
||||
title='subcommands',
|
||||
description='Action to perform')
|
||||
self.add_revision_args()
|
||||
self.add_downgrade_args()
|
||||
self.add_upgrade_args()
|
||||
self.add_history_args()
|
||||
self.add_current_args()
|
||||
@ -75,16 +74,6 @@ class DatabaseManager(object):
|
||||
'the latest/head if not specified.')
|
||||
create_parser.set_defaults(func=self.upgrade)
|
||||
|
||||
def add_downgrade_args(self):
|
||||
"""Create 'downgrade' command parser and arguments."""
|
||||
create_parser = self.subparsers.add_parser('downgrade',
|
||||
help='Downgrade to a '
|
||||
'previous DB '
|
||||
'version file.')
|
||||
create_parser.add_argument('--version', '-v', default='need version',
|
||||
help='the version to downgrade back to.')
|
||||
create_parser.set_defaults(func=self.downgrade)
|
||||
|
||||
def add_history_args(self):
|
||||
"""Create 'history' command parser and arguments."""
|
||||
create_parser = self.subparsers.add_parser(
|
||||
@ -115,10 +104,6 @@ class DatabaseManager(object):
|
||||
"""Process the 'upgrade' Alembic command."""
|
||||
commands.upgrade(to_version=args.version, sql_url=args.dburl)
|
||||
|
||||
def downgrade(self, args):
|
||||
"""Process the 'downgrade' Alembic command."""
|
||||
commands.downgrade(to_version=args.version, sql_url=args.dburl)
|
||||
|
||||
def history(self, args):
|
||||
commands.history(args.verbose, sql_url=args.dburl)
|
||||
|
||||
|
@ -16,7 +16,3 @@ ${imports if imports else ""}
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
@ -48,7 +48,3 @@ def upgrade():
|
||||
sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'],),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table('secret_store_metadata')
|
||||
|
@ -37,11 +37,3 @@ def upgrade():
|
||||
|
||||
# make project_id no longer nullable
|
||||
op.alter_column('secrets', 'project_id', nullable=False)
|
||||
|
||||
|
||||
def downgrade():
|
||||
metadata = _get_database_metadata()
|
||||
secrets = metadata.tables['secrets']
|
||||
op.alter_column('secrets', 'project_id', nullable=True)
|
||||
op.execute(secrets.update().
|
||||
values({'project_id': None}))
|
||||
|
@ -13,7 +13,3 @@ down_revision = None
|
||||
|
||||
def upgrade():
|
||||
pass
|
||||
|
||||
|
||||
def downgrade():
|
||||
pass
|
||||
|
@ -103,21 +103,3 @@ def upgrade():
|
||||
nullable=True))
|
||||
op.add_column(u'secrets', sa.Column('creator_id', sa.String(length=255),
|
||||
nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column(u'secrets', 'creator_id')
|
||||
op.drop_column(u'orders', 'creator_id')
|
||||
op.drop_column(u'containers', 'creator_id')
|
||||
op.drop_index(op.f('ix_container_acl_users_acl_id'),
|
||||
table_name='container_acl_users')
|
||||
op.drop_table('container_acl_users')
|
||||
op.drop_index(op.f('ix_secret_acl_users_acl_id'),
|
||||
table_name='secret_acl_users')
|
||||
op.drop_table('secret_acl_users')
|
||||
op.drop_index(op.f('ix_container_acls_container_id'),
|
||||
table_name='container_acls')
|
||||
op.drop_table('container_acls')
|
||||
op.drop_index(op.f('ix_secret_acls_secret_id'),
|
||||
table_name='secret_acls')
|
||||
op.drop_table('secret_acls')
|
||||
|
@ -21,9 +21,3 @@ def upgrade():
|
||||
unique=False)
|
||||
op.create_foreign_key('secrets_project_fk', 'secrets', 'projects',
|
||||
['project_id'], ['id'])
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_constraint('secrets_project_fk', 'secrets', type_='foreignkey')
|
||||
op.drop_index(op.f('ix_secrets_project_id'), table_name='secrets')
|
||||
op.drop_column('secrets', 'project_id')
|
||||
|
@ -17,34 +17,3 @@ from sqlalchemy.dialects import postgresql
|
||||
|
||||
def upgrade():
|
||||
op.drop_table('project_secret')
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.create_table(
|
||||
'project_secret',
|
||||
sa.Column('id', sa.VARCHAR(length=36), autoincrement=False,
|
||||
nullable=False),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False,
|
||||
nullable=False),
|
||||
sa.Column('updated_at', postgresql.TIMESTAMP(), autoincrement=False,
|
||||
nullable=False),
|
||||
sa.Column('deleted_at', postgresql.TIMESTAMP(), autoincrement=False,
|
||||
nullable=True),
|
||||
sa.Column('deleted', sa.BOOLEAN(), autoincrement=False,
|
||||
nullable=False),
|
||||
sa.Column('status', sa.VARCHAR(length=20), autoincrement=False,
|
||||
nullable=False),
|
||||
sa.Column('role', sa.VARCHAR(length=255), autoincrement=False,
|
||||
nullable=True),
|
||||
sa.Column('project_id', sa.VARCHAR(length=36), autoincrement=False,
|
||||
nullable=False),
|
||||
sa.Column('secret_id', sa.VARCHAR(length=36), autoincrement=False,
|
||||
nullable=False),
|
||||
sa.ForeignKeyConstraint(['project_id'], [u'projects.id'],
|
||||
name=u'project_secret_project_fk'),
|
||||
sa.ForeignKeyConstraint(['secret_id'], [u'secrets.id'],
|
||||
name=u'project_secret_secret_fk'),
|
||||
sa.PrimaryKeyConstraint('id', name=u'project_secret_pkey'),
|
||||
sa.UniqueConstraint('project_id', 'secret_id',
|
||||
name=u'_project_secret_uc')
|
||||
)
|
||||
|
@ -54,74 +54,3 @@ def upgrade():
|
||||
op.create_index(op.f('ix_project_secret_project_id'), 'project_secret', ['project_id'], unique=False)
|
||||
op.create_index(op.f('ix_project_secret_secret_id'), 'project_secret', ['secret_id'], unique=False)
|
||||
op.create_index(op.f('ix_secret_store_metadata_secret_id'), 'secret_store_metadata', ['secret_id'], unique=False)
|
||||
|
||||
|
||||
def downgrade():
|
||||
ctx = op.get_context()
|
||||
_drop_constraint(ctx, 'secret_store_metadata_ibfk_1', 'secret_store_metadata')
|
||||
op.drop_index(op.f('ix_secret_store_metadata_secret_id'), table_name='secret_store_metadata')
|
||||
|
||||
op.drop_constraint('project_secret_secret_fk', 'project_secret', type_='foreignkey')
|
||||
op.drop_index(op.f('ix_project_secret_secret_id'), table_name='project_secret')
|
||||
op.drop_index(op.f('ix_project_secret_project_id'), table_name='project_secret')
|
||||
op.drop_index(op.f('ix_project_certificate_authorities_project_id'), table_name='project_certificate_authorities')
|
||||
|
||||
_drop_constraint(ctx, 'project_certificate_authorities_ibfk_1', 'project_certificate_authorities')
|
||||
op.drop_index(op.f('ix_project_certificate_authorities_ca_id'), table_name='project_certificate_authorities')
|
||||
|
||||
op.drop_constraint('preferred_certificate_authorities_fk', 'preferred_certificate_authorities', type_='foreignkey')
|
||||
op.drop_index(op.f('ix_preferred_certificate_authorities_project_id'), table_name='preferred_certificate_authorities')
|
||||
|
||||
op.drop_index(op.f('ix_preferred_certificate_authorities_ca_id'), table_name='preferred_certificate_authorities')
|
||||
op.alter_column('preferred_certificate_authorities', 'ca_id',
|
||||
existing_type=sa.VARCHAR(length=36),
|
||||
nullable=True)
|
||||
|
||||
if ctx.dialect.name == 'mysql':
|
||||
# add the fk back in for the MySQL impl
|
||||
op.create_foreign_key('preferred_certificate_authorities_ibfk_1', 'preferred_certificate_authorities',
|
||||
'certificate_authorities', ['ca_id'], ['id'])
|
||||
|
||||
_drop_constraint(ctx, 'orders_ibfk_2', 'orders')
|
||||
op.drop_index(op.f('ix_orders_secret_id'), table_name='orders')
|
||||
|
||||
op.drop_constraint('orders_project_fk', 'orders', type_='foreignkey')
|
||||
op.drop_index(op.f('ix_orders_project_id'), table_name='orders')
|
||||
|
||||
_drop_constraint(ctx, 'orders_ibfk_3', 'orders')
|
||||
op.drop_index(op.f('ix_orders_container_id'), table_name='orders')
|
||||
|
||||
_drop_constraint(ctx, 'order_retry_tasks_ibfk_1', 'order_retry_tasks')
|
||||
op.drop_index(op.f('ix_order_retry_tasks_order_id'), table_name='order_retry_tasks')
|
||||
|
||||
_drop_constraint(ctx, 'order_plugin_metadata_ibfk_1', 'order_plugin_metadata')
|
||||
op.drop_index(op.f('ix_order_plugin_metadata_order_id'), table_name='order_plugin_metadata')
|
||||
|
||||
_drop_constraint(ctx, 'order_barbican_metadata_ibfk_1', 'order_barbican_metadata')
|
||||
op.drop_index(op.f('ix_order_barbican_metadata_order_id'), table_name='order_barbican_metadata')
|
||||
|
||||
op.drop_constraint('kek_data_project_fk', 'kek_data', type_='foreignkey')
|
||||
op.drop_index(op.f('ix_kek_data_project_id'), table_name='kek_data')
|
||||
|
||||
_drop_constraint(ctx, 'encrypted_data_ibfk_1', 'encrypted_data')
|
||||
op.drop_index(op.f('ix_encrypted_data_secret_id'), table_name='encrypted_data')
|
||||
|
||||
_drop_constraint(ctx, 'encrypted_data_ibfk_2', 'encrypted_data')
|
||||
op.drop_index(op.f('ix_encrypted_data_kek_id'), table_name='encrypted_data')
|
||||
|
||||
op.drop_constraint('containers_project_fk', 'containers', type_='foreignkey')
|
||||
op.drop_index(op.f('ix_containers_project_id'), table_name='containers')
|
||||
|
||||
_drop_constraint(ctx, 'container_secret_ibfk_2', 'container_secret')
|
||||
op.drop_index(op.f('ix_container_secret_secret_id'), table_name='container_secret')
|
||||
|
||||
_drop_constraint(ctx, 'container_secret_ibfk_1', 'container_secret')
|
||||
op.drop_index(op.f('ix_container_secret_container_id'), table_name='container_secret')
|
||||
|
||||
_drop_constraint(ctx, 'container_consumer_metadata_ibfk_1', 'container_consumer_metadata')
|
||||
op.drop_index(op.f('ix_container_consumer_metadata_container_id'), table_name='container_consumer_metadata')
|
||||
|
||||
op.drop_index(op.f('ix_certificate_authority_metadata_key'), table_name='certificate_authority_metadata')
|
||||
|
||||
_drop_constraint(ctx, 'certificate_authority_metadata_ibfk_1', 'certificate_authority_metadata')
|
||||
op.drop_index(op.f('ix_certificate_authority_metadata_ca_id'), table_name='certificate_authority_metadata')
|
||||
|
@ -20,11 +20,3 @@ def upgrade():
|
||||
op.add_column('orders', sa.Column('meta', sa.Text, nullable=True))
|
||||
op.add_column('orders', sa.Column('type', sa.String(length=255), nullable=True))
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('orders', 'type')
|
||||
op.drop_column('orders', 'meta')
|
||||
op.drop_column('orders', 'container_id')
|
||||
### end Alembic commands ###
|
||||
|
@ -21,22 +21,3 @@ def upgrade():
|
||||
op.drop_column('orders', 'secret_expiration')
|
||||
op.drop_column('orders', 'secret_payload_content_type')
|
||||
op.drop_column('orders', 'secret_name')
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.add_column('orders', sa.Column('secret_name', sa.String(length=255),
|
||||
nullable=True))
|
||||
op.add_column('orders', sa.Column('secret_payload_content_type',
|
||||
sa.String(length=255),
|
||||
nullable=True))
|
||||
op.add_column('orders', sa.Column('secret_expiration',
|
||||
sa.DateTime(), nullable=True))
|
||||
op.add_column('orders', sa.Column('secret_bit_length',
|
||||
sa.Integer(),
|
||||
autoincrement=False,
|
||||
nullable=True))
|
||||
op.add_column('orders', sa.Column('secret_algorithm',
|
||||
sa.String(length=255),
|
||||
nullable=True))
|
||||
op.add_column('orders', sa.Column('secret_mode', sa.String(length=255),
|
||||
nullable=True))
|
||||
|
@ -18,9 +18,3 @@ def upgrade():
|
||||
op.alter_column('projects', 'keystone_id',
|
||||
type_=sa.String(36),
|
||||
new_column_name='external_id')
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.alter_column('projects', 'external_id',
|
||||
type_=sa.String(36),
|
||||
new_column_name='keystone_id')
|
||||
|
@ -17,9 +17,3 @@ import sqlalchemy as sa
|
||||
def upgrade():
|
||||
op.add_column('orders', sa.Column('sub_status', sa.String(length=36), nullable=True))
|
||||
op.add_column('orders', sa.Column('sub_status_message', sa.String(length=255), nullable=True))
|
||||
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('orders', 'sub_status')
|
||||
op.drop_column('orders', 'sub_status_message')
|
||||
|
@ -25,16 +25,3 @@ def upgrade():
|
||||
op.execute( 'ALTER TABLE container_secret DROP PRIMARY KEY, ADD PRIMARY KEY(`id`,`container_id`,`secret_id`)');
|
||||
op.execute( 'ALTER TABLE containers CHANGE type type ENUM(\'generic\',\'rsa\',\'dsa\',\'certificate\') DEFAULT NULL');
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('container_secret', 'updated_at')
|
||||
op.drop_column('container_secret', 'status')
|
||||
op.drop_column('container_secret', 'id')
|
||||
op.drop_column('container_secret', 'deleted_at')
|
||||
op.drop_column('container_secret', 'deleted')
|
||||
op.drop_column('container_secret', 'created_at')
|
||||
op.execute( 'ALTER TABLE container_secret DROP PRIMARY KEY, ADD PRIMARY KEY(`container_id`,`secret_id`)');
|
||||
op.execute( 'ALTER TABLE containers CHANGE type type ENUM(\'generic\',\'rsa\',\'certificate\') DEFAULT NULL');
|
||||
### end Alembic commands ###
|
||||
|
@ -98,10 +98,3 @@ def upgrade():
|
||||
sa.PrimaryKeyConstraint('id', 'project_id'),
|
||||
sa.UniqueConstraint('project_id')
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table('preferred_certificate_authorities')
|
||||
op.drop_table('certificate_authority_metadata')
|
||||
op.drop_table('project_certificate_authorities')
|
||||
op.drop_table('certificate_authorities')
|
||||
|
@ -26,17 +26,3 @@ def upgrade():
|
||||
'value',
|
||||
type_=sa.Text()
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.alter_column(
|
||||
'order_barbican_metadata',
|
||||
'value',
|
||||
type_=sa.String(255)
|
||||
)
|
||||
|
||||
op.alter_column(
|
||||
'certificate_authority_metadata',
|
||||
'value',
|
||||
type_=sa.String(255)
|
||||
)
|
||||
|
@ -49,15 +49,3 @@ def upgrade():
|
||||
sa.DateTime(),
|
||||
nullable=False,
|
||||
server_default=str(timeutils.utcnow())))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('order_retry_tasks', 'created_at')
|
||||
op.drop_column('order_retry_tasks', 'deleted')
|
||||
op.drop_column('order_retry_tasks', 'deleted_at')
|
||||
op.drop_column('order_retry_tasks', 'status')
|
||||
op.drop_column('order_retry_tasks', 'updated_at')
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -33,7 +33,3 @@ def upgrade():
|
||||
sa.ForeignKeyConstraint(['order_id'], ['orders.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table('order_barbican_metadata')
|
||||
|
@ -33,7 +33,3 @@ def upgrade():
|
||||
sa.ForeignKeyConstraint(['order_id'], ['orders.id'],),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table('order_plugin_metadata')
|
||||
|
@ -17,7 +17,3 @@ import sqlalchemy as sa
|
||||
def upgrade():
|
||||
op.add_column('secrets', sa.Column('secret_type', sa.String(length=255),
|
||||
nullable=False, server_default="opaque"))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('secrets', 'secret_type')
|
||||
|
@ -16,7 +16,3 @@ import sqlalchemy as sa
|
||||
|
||||
def upgrade():
|
||||
op.alter_column('kek_data', 'plugin_name', nullable=False)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.alter_column('kek_data', 'plugin_name', nullable=True)
|
||||
|
@ -33,22 +33,3 @@ def upgrade():
|
||||
# reverse existing flag value as project_access is negation of creator_only
|
||||
op.execute('UPDATE container_acls SET project_access = NOT project_access',
|
||||
execution_options={'autocommit': True})
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
ctx = op.get_context()
|
||||
con = op.get_bind()
|
||||
|
||||
op.alter_column('secret_acls', 'project_access',
|
||||
existing_type=sa.BOOLEAN(), new_column_name='creator_only')
|
||||
|
||||
op.execute('UPDATE secret_acls SET creator_only = NOT creator_only',
|
||||
execution_options={'autocommit': True})
|
||||
|
||||
op.alter_column('container_acls', 'project_access',
|
||||
existing_type=sa.BOOLEAN(),
|
||||
new_column_name='creator_only')
|
||||
|
||||
op.execute('UPDATE container_acls SET creator_only = NOT creator_only',
|
||||
execution_options={'autocommit': True})
|
||||
|
@ -30,17 +30,6 @@ def _change_fk_to_project(ctx, con, table, fk_old, fk_new):
|
||||
op.create_foreign_key(fk_new, table,
|
||||
'projects', ['project_id'], ['id'])
|
||||
|
||||
|
||||
def _change_fk_to_tenant(ctx, con, table, fk_old):
|
||||
_drop_constraint(ctx, con, table, fk_old)
|
||||
op.alter_column(table, 'project_id',
|
||||
type_=sa.String(36),
|
||||
new_column_name='tenant_id')
|
||||
op.create_foreign_key(
|
||||
None, # None -> auto-generate FK name based on dialect.
|
||||
table, 'tenants', ['tenant_id'], ['id'])
|
||||
|
||||
|
||||
def upgrade():
|
||||
# project_secret table
|
||||
ctx = op.get_context()
|
||||
@ -87,53 +76,3 @@ def upgrade():
|
||||
|
||||
_change_fk_to_project(
|
||||
ctx, con, 'orders', 'orders_ibfk_1', 'orders_project_fk')
|
||||
|
||||
|
||||
def downgrade():
|
||||
# project_secret table
|
||||
ctx = op.get_context()
|
||||
con = op.get_bind()
|
||||
|
||||
# ---- Update project_secret table to tenant_secret:
|
||||
|
||||
_drop_constraint(ctx, con, 'project_secret', 'project_secret_project_fk')
|
||||
_drop_constraint(ctx, con, 'project_secret', 'project_secret_secret_fk')
|
||||
|
||||
op.drop_constraint('_project_secret_uc',
|
||||
'project_secret',
|
||||
type_='unique')
|
||||
|
||||
op.rename_table('project_secret', 'tenant_secret')
|
||||
op.alter_column('tenant_secret', 'project_id',
|
||||
type_=sa.String(36),
|
||||
new_column_name='tenant_id')
|
||||
|
||||
op.create_unique_constraint('_tenant_secret_uc', 'tenant_secret',
|
||||
['tenant_id', 'secret_id'])
|
||||
|
||||
# ---- Update projects table to tenants:
|
||||
|
||||
op.rename_table('projects', 'tenants')
|
||||
|
||||
# re-create the foreign key constraints with explicit names.
|
||||
op.create_foreign_key(
|
||||
None, # None -> auto-generate FK name based on dialect.
|
||||
'tenant_secret', 'tenants', ['tenant_id'], ['id'])
|
||||
op.create_foreign_key(
|
||||
None, # None -> auto-generate FK name based on dialect.
|
||||
'tenant_secret', 'secrets', ['secret_id'], ['id'])
|
||||
|
||||
# ---- Update containers table:
|
||||
|
||||
_change_fk_to_tenant(
|
||||
ctx, con, 'containers', 'containers_project_fk')
|
||||
|
||||
# ---- Update kek_data table:
|
||||
|
||||
_change_fk_to_tenant(
|
||||
ctx, con, 'kek_data', 'kek_data_project_fk')
|
||||
|
||||
# ---- Update orders table:
|
||||
|
||||
_change_fk_to_tenant(
|
||||
ctx, con, 'orders', 'orders_project_fk')
|
||||
|
@ -28,7 +28,3 @@ def upgrade():
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
mysql_engine="InnoDB"
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("order_retry_tasks")
|
||||
|
@ -18,9 +18,3 @@ def upgrade():
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.execute( 'ALTER TABLE containers CHANGE type type ENUM(\'generic\',\'rsa\',\'certificate\') DEFAULT NULL');
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.execute( 'ALTER TABLE containers CHANGE type type ENUM(\'generic\',\'rsa\') DEFAULT NULL');
|
||||
### end Alembic commands ###
|
||||
|
@ -62,12 +62,6 @@ def upgrade(to_version='head', sql_url=None):
|
||||
alembic_command.upgrade(alembic_cfg, to_version)
|
||||
|
||||
|
||||
def downgrade(to_version, sql_url=None):
|
||||
"""Downgrade to the specified version."""
|
||||
alembic_cfg = init_config(sql_url)
|
||||
alembic_command.downgrade(alembic_cfg, to_version)
|
||||
|
||||
|
||||
def history(verbose, sql_url=None):
|
||||
alembic_cfg = init_config(sql_url)
|
||||
alembic_command.history(alembic_cfg, verbose=verbose)
|
||||
|
@ -13,13 +13,18 @@
|
||||
import mock
|
||||
import sqlalchemy
|
||||
|
||||
from alembic import script as alembic_script
|
||||
|
||||
from barbican.common import config
|
||||
from barbican.common import exception
|
||||
from barbican.model.migration import commands as migration
|
||||
from barbican.model import models
|
||||
from barbican.model import repositories
|
||||
from barbican.tests import database_utils
|
||||
from barbican.tests import utils
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
|
||||
class WhenCleaningRepositoryPagingParameters(utils.BaseTestCase):
|
||||
|
||||
@ -321,3 +326,24 @@ class WhenTestingIsDbConnectionError(utils.BaseTestCase):
|
||||
result = repositories.is_db_connection_error(args)
|
||||
|
||||
self.assertTrue(result)
|
||||
|
||||
|
||||
class WhenTestingMigrations(utils.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(WhenTestingMigrations, self).setUp()
|
||||
self.alembic_config = migration.init_config()
|
||||
self.alembic_config.barbican_config = cfg.CONF
|
||||
|
||||
def test_no_downgrade(self):
|
||||
script_dir = alembic_script.ScriptDirectory.from_config(
|
||||
self.alembic_config)
|
||||
versions = [v for v in script_dir.walk_revisions(base='base',
|
||||
head='heads')]
|
||||
failed_revisions = []
|
||||
for version in versions:
|
||||
if hasattr(version.module, 'downgrade'):
|
||||
failed_revisions.append(version.revision)
|
||||
|
||||
if failed_revisions:
|
||||
self.fail('Migrations %s have downgrade' % failed_revisions)
|
||||
|
@ -126,9 +126,6 @@ instructions).
|
||||
barbican.model.models.py module, and also in the Alembic version
|
||||
modules when creating/dropping constraints, otherwise MySQL migrations
|
||||
might crash.
|
||||
3. If only columns were added with no uniqueness constraints, you should
|
||||
consider reordering the ``downgrade()`` lines to place them in the
|
||||
same order as the ``upgrade()`` lines.
|
||||
|
||||
d. **If you added new tables, follow this guidance**:
|
||||
|
||||
@ -162,8 +159,7 @@ Manually
|
||||
Note that only the first 20 characters of the description are used.
|
||||
3. You can then edit this file per tutorial and the `Alembic Operation
|
||||
Reference`_ page for available operations you may make from the version
|
||||
files. **You must properly fill in both the** ``upgrade()`` **and**
|
||||
``downgrade()`` **methods.**
|
||||
files. **You must properly fill in the** ``upgrade()`` **methods.**
|
||||
|
||||
Applying Changes
|
||||
-----------------
|
||||
@ -181,6 +177,11 @@ automatic database updates from the Barbican application should be disabled by
|
||||
adding/updating ``db_auto_create = False`` in the ``barbican.conf``
|
||||
configuration file.
|
||||
|
||||
**Note** : Before attempting any upgrade, you should make a full database
|
||||
backup of your production data. As of Kilo, database downgrades are not
|
||||
supported in OpenStack, and the only method available to get back to a
|
||||
prior database version will be to restore from backup.
|
||||
|
||||
Via Application
|
||||
''''''''''''''''
|
||||
|
||||
@ -211,9 +212,37 @@ To upgrade to a specific version, run this command:
|
||||
upgrade -v <Alembic-ID-of-version>``. The ``Alembic-ID-of-version`` is a
|
||||
unique ID assigned to the change such ``as1a0c2cdafb38``.
|
||||
|
||||
To downgrade to a specific version, run this command:
|
||||
``bin/barbican-db-manage.py -d <Full URL to database, including user/pw>
|
||||
downgrade -v <Alembic-ID-of-version>``.
|
||||
Downgrade
|
||||
'''''''''
|
||||
|
||||
Upgrades involve complex operations and can fail. Before attempting any upgrade,
|
||||
you should make a full database backup of your production data. As of Kilo,
|
||||
database downgrades are not supported, and the only method available to get back
|
||||
to a prior database version will be to restore from backup.
|
||||
|
||||
You must complete these steps to successfully roll back your environment:
|
||||
|
||||
1. Roll back configuration files.
|
||||
2. Restore databases from backup.
|
||||
3. Roll back packages.
|
||||
|
||||
Rolling back upgrades is a tricky process because distributions tend to put
|
||||
much more effort into testing upgrades than downgrades. Broken downgrades
|
||||
often take significantly more effort to troubleshoot and resolve than broken
|
||||
upgrades. Only you can weigh the risks of trying to push a failed upgrade
|
||||
forward versus rolling it back. Generally, consider rolling back as the
|
||||
very last option.
|
||||
|
||||
The backup instructions provided in `Backup tutorial`_ ensure that you have
|
||||
proper backups of your databases and configuration files. Read through this
|
||||
section carefully and verify that you have the requisite backups to restore.
|
||||
|
||||
**Note** : The backup tutorial reference file only updated to Juno, DB backup
|
||||
operation will be similar for Kilo. The link will be updated when the reference
|
||||
has updated.
|
||||
|
||||
For more information and examples about downgrade operation please
|
||||
see `Downgrade tutorial`_ as reference.
|
||||
|
||||
TODO Items
|
||||
-----------
|
||||
@ -250,3 +279,5 @@ TODO Items
|
||||
.. _OpenStack and SQLAlchemy: https://wiki.openstack.org/wiki/OpenStack_and_SQLAlchemy#Migrations
|
||||
.. _What does Autogenerate Detect: http://alembic.readthedocs.org/en/latest/autogenerate.html#what-does-autogenerate-detect-and-what-does-it-not-detect
|
||||
.. _SQLAlchemy's server_default: http://docs.sqlalchemy.org/en/latest/core/metadata.html?highlight=column#sqlalchemy.schema.Column.params.server_default
|
||||
.. _Backup tutorial: http://docs.openstack.org/openstack-ops/content/upgrade-icehouse-juno.html#upgrade-icehouse-juno-backup
|
||||
.. _Downgrade tutorial: http://docs.openstack.org/openstack-ops/content/ops_upgrades-roll-back.html
|
||||
|
Loading…
Reference in New Issue
Block a user