Fixed database migration script issues

The db downgrade scripts are currently not dropping foreign key
constraints, causing errors when the script tries to drop related
tables on downgrade.

This commit address issues in the migration scripts, and also
introduces a new test script to test the migration scripts, so that
issues can be prevented in the future. The new test script is based
on the existing migration test script implementated in Nova.

Change-Id: I240d81afc3e43fd3711de8c156cfb43fd14850bf
Closes-Bug: #1347114
This commit is contained in:
Simon Chang 2014-08-12 12:24:54 -04:00
parent 5e7675db4a
commit 06e0aa25a0
12 changed files with 473 additions and 37 deletions

View File

@ -209,6 +209,7 @@ if __name__ == "__main__":
from trove.tests.api.mgmt import instances_actions as mgmt_actions # noqa
from trove.tests.api.mgmt import storage # noqa
from trove.tests.api.mgmt import malformed_json # noqa
from trove.tests.db import migrations # noqa
except Exception as e:
print("Run tests failed: %s" % e)
traceback.print_exc()

View File

@ -22,6 +22,7 @@ from trove.db.sqlalchemy.migrate_repo.schema import create_tables
from trove.db.sqlalchemy.migrate_repo.schema import drop_tables
from trove.db.sqlalchemy.migrate_repo.schema import String
from trove.db.sqlalchemy.migrate_repo.schema import Table
from trove.db.sqlalchemy import utils as db_utils
meta = MetaData()
@ -65,9 +66,19 @@ def upgrade(migrate_engine):
def downgrade(migrate_engine):
meta.bind = migrate_engine
drop_tables([datastores, datastore_versions])
instances = Table('instances', meta, autoload=True)
constraint_names = db_utils.get_foreign_key_constraint_names(
engine=migrate_engine,
table='instances',
columns=['datastore_version_id'],
ref_table='datastore_versions',
ref_columns=['id'])
db_utils.drop_foreign_key_constraints(
constraint_names=constraint_names,
columns=[instances.c.datastore_version_id],
ref_columns=[datastore_versions.c.id])
instances.drop_column('datastore_version_id')
service_type = Column('service_type', String(36))
instances.create_column(service_type)
instances.update().values({'service_type': 'mysql'}).execute()
drop_tables([datastore_versions, datastores])

View File

@ -14,17 +14,19 @@
# under the License.
from sqlalchemy import ForeignKey
from sqlalchemy.exc import OperationalError
from sqlalchemy.schema import Column
from sqlalchemy.schema import MetaData
from trove.db.sqlalchemy.migrate_repo.schema import create_tables
from trove.db.sqlalchemy.migrate_repo.schema import drop_tables
from trove.db.sqlalchemy.migrate_repo.schema import DateTime
from trove.db.sqlalchemy.migrate_repo.schema import Boolean
from trove.db.sqlalchemy.migrate_repo.schema import String
from trove.db.sqlalchemy.migrate_repo.schema import Table
from trove.db.sqlalchemy import utils as db_utils
from trove.openstack.common import log as logging
logger = logging.getLogger('trove.db.sqlalchemy.migrate_repo.schema')
meta = MetaData()
@ -55,22 +57,25 @@ configuration_parameters = Table(
def upgrade(migrate_engine):
meta.bind = migrate_engine
# since the downgrade is a no-op, an upgrade after a downgrade will
# cause an exception because the tables already exist
# we will catch that case and log an info message
try:
create_tables([configurations])
create_tables([configuration_parameters])
instances = Table('instances', meta, autoload=True)
instances.create_column(Column('configuration_id', String(36),
ForeignKey("configurations.id")))
except OperationalError as e:
logger.info(e)
create_tables([configurations])
create_tables([configuration_parameters])
instances = Table('instances', meta, autoload=True)
instances.create_column(Column('configuration_id', String(36),
ForeignKey("configurations.id")))
def downgrade(migrate_engine):
meta.bind = migrate_engine
# Not dropping the tables for concern if rollback needed would cause
# consumers to recreate configurations.
instances = Table('instances', meta, autoload=True)
constraint_names = db_utils.get_foreign_key_constraint_names(
engine=migrate_engine,
table='instances',
columns=['configuration_id'],
ref_table='configurations',
ref_columns=['id'])
db_utils.drop_foreign_key_constraints(
constraint_names=constraint_names,
columns=[instances.c.configuration_id],
ref_columns=[configurations.c.id])
instances.drop_column('configuration_id')
drop_tables([configuration_parameters, configurations])

View File

@ -58,4 +58,4 @@ def upgrade(migrate_engine):
def downgrade(migrate_engine):
meta.bind = migrate_engine
drop_tables([capabilities, capability_overrides])
drop_tables([capability_overrides, capabilities])

View File

@ -18,6 +18,7 @@ from sqlalchemy.schema import MetaData
from trove.db.sqlalchemy.migrate_repo.schema import String
from trove.db.sqlalchemy.migrate_repo.schema import Table
from trove.db.sqlalchemy import utils as db_utils
def upgrade(migrate_engine):
@ -34,4 +35,15 @@ def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
backups = Table('backups', meta, autoload=True)
datastore_versions = Table('datastore_versions', meta, autoload=True)
constraint_names = db_utils.get_foreign_key_constraint_names(
engine=migrate_engine,
table='backups',
columns=['datastore_version_id'],
ref_table='datastore_versions',
ref_columns=['id'])
db_utils.drop_foreign_key_constraints(
constraint_names=constraint_names,
columns=[backups.c.datastore_version_id],
ref_columns=[datastore_versions.c.id])
backups.drop_column('datastore_version_id')

View File

@ -18,6 +18,8 @@ from sqlalchemy.schema import ForeignKey
from trove.db.sqlalchemy.migrate_repo.schema import String
from trove.db.sqlalchemy.migrate_repo.schema import Table
from trove.db.sqlalchemy import utils as db_utils
COLUMN_NAME = 'slave_of_id'
@ -35,4 +37,14 @@ def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
instances = Table('instances', meta, autoload=True)
constraint_names = db_utils.get_foreign_key_constraint_names(
engine=migrate_engine,
table='instances',
columns=[COLUMN_NAME],
ref_table='instances',
ref_columns=['id'])
db_utils.drop_foreign_key_constraints(
constraint_names=constraint_names,
columns=[instances.c.slave_of_id],
ref_columns=[instances.c.id])
instances.drop_column(COLUMN_NAME)

View File

@ -14,19 +14,21 @@
# under the License.
from sqlalchemy import ForeignKey
from sqlalchemy.exc import OperationalError
from sqlalchemy.schema import Column
from sqlalchemy.schema import Index
from sqlalchemy.schema import MetaData
from trove.db.sqlalchemy.migrate_repo.schema import Boolean
from trove.db.sqlalchemy.migrate_repo.schema import create_tables
from trove.db.sqlalchemy.migrate_repo.schema import drop_tables
from trove.db.sqlalchemy.migrate_repo.schema import DateTime
from trove.db.sqlalchemy.migrate_repo.schema import Integer
from trove.db.sqlalchemy.migrate_repo.schema import String
from trove.db.sqlalchemy.migrate_repo.schema import Table
from trove.db.sqlalchemy import utils as db_utils
from trove.openstack.common import log as logging
logger = logging.getLogger('trove.db.sqlalchemy.migrate_repo.schema')
meta = MetaData()
@ -53,25 +55,44 @@ def upgrade(migrate_engine):
Table('datastores', meta, autoload=True)
Table('datastore_versions', meta, autoload=True)
instances = Table('instances', meta, autoload=True)
# since the downgrade is a no-op, an upgrade after a downgrade will
# cause an exception because the tables already exist
# we will catch that case and log an info message
try:
create_tables([clusters])
instances.create_column(Column('cluster_id', String(36),
ForeignKey("clusters.id")))
instances.create_column(Column('shard_id', String(36)))
instances.create_column(Column('type', String(64)))
cluster_id_idx = Index("instances_cluster_id", instances.c.cluster_id)
cluster_id_idx.create()
except OperationalError as e:
logger.info(e)
create_tables([clusters])
instances.create_column(Column('cluster_id', String(36),
ForeignKey("clusters.id")))
instances.create_column(Column('shard_id', String(36)))
instances.create_column(Column('type', String(64)))
cluster_id_idx = Index("instances_cluster_id", instances.c.cluster_id)
cluster_id_idx.create()
def downgrade(migrate_engine):
meta.bind = migrate_engine
# not dropping the table on a rollback because the cluster
# assets will still exist
datastore_versions = Table('datastore_versions', meta, autoload=True)
constraint_names = db_utils.get_foreign_key_constraint_names(
engine=migrate_engine,
table='clusters',
columns=['datastore_version_id'],
ref_table='datastore_versions',
ref_columns=['id'])
db_utils.drop_foreign_key_constraints(
constraint_names=constraint_names,
columns=[clusters.c.datastore_version_id],
ref_columns=[datastore_versions.c.id])
instances = Table('instances', meta, autoload=True)
constraint_names = db_utils.get_foreign_key_constraint_names(
engine=migrate_engine,
table='instances',
columns=['cluster_id'],
ref_table='clusters',
ref_columns=['id'])
db_utils.drop_foreign_key_constraints(
constraint_names=constraint_names,
columns=[instances.c.cluster_id],
ref_columns=[clusters.c.id])
instances.drop_column('cluster_id')
instances.drop_column('shard_id')
instances.drop_column('type')
drop_tables([clusters])

View File

@ -0,0 +1,54 @@
# Copyright 2014 Tesora Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.changeset.constraint import ForeignKeyConstraint
from sqlalchemy.engine import reflection
def get_foreign_key_constraint_names(engine, table, columns,
ref_table, ref_columns):
"""Retrieve the names of foreign key constraints that match
the given criteria.
:param engine: The sqlalchemy engine to be used.
:param table: Name of the child table.
:param columns: List of the foreign key columns.
:param ref_table: Name of the parent table.
:param ref_columns: List of the referenced columns.
:return: List of foreign key constraint names.
"""
constraint_names = []
inspector = reflection.Inspector.from_engine(engine)
fks = inspector.get_foreign_keys(table)
for fk in fks:
if (fk['referred_table'] == ref_table
and fk['constrained_columns'] == columns
and fk['referred_columns'] == ref_columns):
constraint_names.append(fk['name'])
return constraint_names
def drop_foreign_key_constraints(constraint_names, columns,
ref_columns):
"""Drop the foreign key constraints that match the given
criteria.
:param constraint_names: List of foreign key constraint names
:param columns: List of the foreign key columns.
:param ref_columns: List of the referenced columns.
"""
for constraint_name in constraint_names:
fkey_constraint = ForeignKeyConstraint(columns=columns,
refcolumns=ref_columns,
name=constraint_name)
fkey_constraint.drop()

View File

View File

@ -0,0 +1,210 @@
# Copyright 2014 Tesora Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests database migration scripts for mysql.
To run the tests, you'll need to set up db user named 'openstack_citest'
with password 'openstack_citest' on localhost. This user needs db
admin rights (i.e. create/drop database)
"""
import glob
import os
from migrate.versioning import repository
import migrate.versioning.api as migration_api
from proboscis import after_class
from proboscis import before_class
from proboscis import test
from proboscis import SkipTest
from proboscis.asserts import assert_equal
from proboscis.asserts import assert_true
import sqlalchemy
import sqlalchemy.exc
import trove.db.sqlalchemy.migrate_repo
from trove.openstack.common.gettextutils import _
from trove.openstack.common import log as logging
from trove.openstack.common import processutils
from trove.tests.util import event_simulator
GROUP = "dbaas.db.migrations"
LOG = logging.getLogger(__name__)
@test(groups=[GROUP])
class ProjectTestCase(object):
"""Test migration scripts integrity."""
@test
def test_all_migrations_have_downgrade(self):
topdir = os.path.normpath(os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir))
py_glob = os.path.join(topdir, "trove", "db", "sqlalchemy",
"migrate_repo", "versions", "*.py")
missing_downgrade = []
for path in glob.iglob(py_glob):
has_upgrade = False
has_downgrade = False
with open(path, "r") as f:
for line in f:
if 'def upgrade(' in line:
has_upgrade = True
if 'def downgrade(' in line:
has_downgrade = True
if has_upgrade and not has_downgrade:
fname = os.path.basename(path)
missing_downgrade.append(fname)
helpful_msg = (_("The following migration scripts are missing a "
"downgrade implementation:\n\t%s") %
'\n\t'.join(sorted(missing_downgrade)))
assert_true(not missing_downgrade, helpful_msg)
@test(depends_on_classes=[ProjectTestCase],
groups=[GROUP])
class TestTroveMigrations(object):
"""Test sqlalchemy-migrate migrations."""
USER = "openstack_citest"
PASSWD = "openstack_citest"
DATABASE = "openstack_citest"
@before_class
def setUp(self):
event_simulator.allowable_empty_sleeps = 1
@after_class
def tearDown(self):
event_simulator.allowable_empty_sleeps = 0
def __init__(self):
self.MIGRATE_FILE = trove.db.sqlalchemy.migrate_repo.__file__
self.REPOSITORY = repository.Repository(
os.path.abspath(os.path.dirname(self.MIGRATE_FILE)))
self.INIT_VERSION = 0
def _get_connect_string(self, backend, database=None):
"""Get database connection string."""
args = {'backend': backend,
'user': self.USER,
'passwd': self.PASSWD}
template = "%(backend)s://%(user)s:%(passwd)s@localhost"
if database is not None:
args['database'] = database
template += "/%(database)s"
return template % args
def _is_backend_avail(self, backend):
"""Check database backend availability."""
connect_uri = self._get_connect_string(backend)
engine = sqlalchemy.create_engine(connect_uri)
try:
connection = engine.connect()
except Exception:
# any error here means the database backend is not available
return False
else:
connection.close()
return True
finally:
if engine is not None:
engine.dispose()
def _execute_cmd(self, cmd=None):
"""Shell out and run the given command."""
out, err = processutils.trycmd(cmd, shell=True)
assert_equal('', err,
"Failed to run: '%(cmd)s' "
"Output: '%(stdout)s' "
"Error: '%(stderr)s'" %
{'cmd': cmd, 'stdout': out, 'stderr': err})
def _reset_mysql(self):
"""Reset the MySQL test database
Drop the MySQL test database if it already exists and create
a new one.
"""
sql = ("drop database if exists %(database)s; "
"create database %(database)s;" % {'database': self.DATABASE})
cmd = ("mysql -u \"%(user)s\" -p%(password)s -h %(host)s "
"-e \"%(sql)s\"" % {'user': self.USER, 'password': self.PASSWD,
'host': 'localhost', 'sql': sql})
self._execute_cmd(cmd)
@test
def test_mysql_migration(self):
db_backend = "mysql+mysqldb"
# Gracefully skip this test if the developer do not have
# MySQL running. MySQL should always be available on
# the infrastructure
if not self._is_backend_avail(db_backend):
raise SkipTest("MySQL is not available.")
self._reset_mysql()
connect_string = self._get_connect_string(db_backend, self.DATABASE)
engine = sqlalchemy.create_engine(connect_string)
self._walk_versions(engine)
engine.dispose()
def _walk_versions(self, engine=None):
"""Walk through and test the migration scripts
Determine latest version script from the repo, then
upgrade from 1 through to the latest, then downgrade from
the latest back to 1, with no data in the databases. This
just checks that the schema itself upgrades and downgrades
successfully.
"""
# Place the database under version control
migration_api.version_control(engine, self.REPOSITORY,
self.INIT_VERSION)
assert_equal(self.INIT_VERSION,
migration_api.db_version(engine, self.REPOSITORY))
LOG.debug('Latest version is %s' % self.REPOSITORY.latest)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
# Snake walk from version 1 to the latest, testing the upgrade paths.
# upgrade -> downgrade -> upgrade
for version in versions:
self._migrate_up(engine, version)
self._migrate_down(engine, version - 1)
self._migrate_up(engine, version)
# Now snake walk back down to version 1 from the latest, testing the
# downgrade paths.
# downgrade -> upgrade -> downgrade
for version in reversed(versions):
self._migrate_down(engine, version - 1)
self._migrate_up(engine, version)
self._migrate_down(engine, version - 1)
def _migrate_down(self, engine, version):
"""Migrate down to an old version of database."""
migration_api.downgrade(engine, self.REPOSITORY, version)
assert_equal(version,
migration_api.db_version(engine, self.REPOSITORY))
def _migrate_up(self, engine, version):
"""Migrate up to a new version of database."""
migration_api.upgrade(engine, self.REPOSITORY, version)
assert_equal(version,
migration_api.db_version(engine, self.REPOSITORY))

View File

View File

@ -0,0 +1,110 @@
# Copyright 2014 Tesora Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from mock import call
from mock import Mock
from mock import patch
from sqlalchemy.engine import reflection
from sqlalchemy.schema import Column
import testtools
from trove.db.sqlalchemy.migrate_repo.schema import String
from trove.db.sqlalchemy import utils as db_utils
class TestDbMigrationUtils(testtools.TestCase):
def setUp(self):
super(TestDbMigrationUtils, self).setUp()
def tearDown(self):
super(TestDbMigrationUtils, self).tearDown()
@patch.object(reflection.Inspector, 'from_engine')
def test_get_foreign_key_constraint_names_single_match(self,
mock_inspector):
mock_engine = Mock()
(mock_inspector.return_value.
get_foreign_keys.return_value) = [{'constrained_columns': ['col1'],
'referred_table': 'ref_table1',
'referred_columns': ['ref_col1'],
'name': 'constraint1'},
{'constrained_columns': ['col2'],
'referred_table': 'ref_table2',
'referred_columns': ['ref_col2'],
'name': 'constraint2'}]
ret_val = db_utils.get_foreign_key_constraint_names(mock_engine,
'table1',
['col1'],
'ref_table1',
['ref_col1'])
self.assertEqual(['constraint1'], ret_val)
@patch.object(reflection.Inspector, 'from_engine')
def test_get_foreign_key_constraint_names_multi_match(self,
mock_inspector):
mock_engine = Mock()
(mock_inspector.return_value.
get_foreign_keys.return_value) = [
{'constrained_columns': ['col1'],
'referred_table': 'ref_table1',
'referred_columns': ['ref_col1'],
'name': 'constraint1'},
{'constrained_columns': ['col2', 'col3'],
'referred_table': 'ref_table1',
'referred_columns': ['ref_col2', 'ref_col3'],
'name': 'constraint2'},
{'constrained_columns': ['col2', 'col3'],
'referred_table': 'ref_table1',
'referred_columns': ['ref_col2', 'ref_col3'],
'name': 'constraint3'},
{'constrained_columns': ['col4'],
'referred_table': 'ref_table2',
'referred_columns': ['ref_col4'],
'name': 'constraint4'}]
ret_val = db_utils.get_foreign_key_constraint_names(
mock_engine, 'table1', ['col2', 'col3'],
'ref_table1', ['ref_col2', 'ref_col3'])
self.assertEqual(['constraint2', 'constraint3'], ret_val)
@patch.object(reflection.Inspector, 'from_engine')
def test_get_foreign_key_constraint_names_no_match(self, mock_inspector):
mock_engine = Mock()
(mock_inspector.return_value.
get_foreign_keys.return_value) = []
ret_val = db_utils.get_foreign_key_constraint_names(mock_engine,
'table1',
['col1'],
'ref_table1',
['ref_col1'])
self.assertEqual([], ret_val)
@patch('trove.db.sqlalchemy.utils.ForeignKeyConstraint')
def test_drop_foreign_key_constraints(self, mock_constraint):
test_columns = [Column('col1', String(5)),
Column('col2', String(5))]
test_refcolumns = [Column('ref_col1', String(5)),
Column('ref_col2', String(5))]
test_constraint_names = ['constraint1', 'constraint2']
db_utils.drop_foreign_key_constraints(test_constraint_names,
test_columns,
test_refcolumns)
expected = [call(columns=test_columns,
refcolumns=test_refcolumns,
name='constraint1'),
call(columns=test_columns,
refcolumns=test_refcolumns,
name='constraint2')]
self.assertEqual(mock_constraint.call_args_list, expected)