Merge "Fix tests to work with mysql+postgres concurrently"

This commit is contained in:
Jenkins 2013-10-29 20:09:21 +00:00 committed by Gerrit Code Review
commit b328978a07
6 changed files with 398 additions and 190 deletions

View File

@ -1,7 +1,7 @@
[DEFAULT] [DEFAULT]
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \ test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \ OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \ OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-160} \
${PYTHON:-python} -m subunit.run discover -t ./ ./nova/tests $LISTOPT $IDOPTION ${PYTHON:-python} -m subunit.run discover -t ./ ./nova/tests $LISTOPT $IDOPTION
test_id_option=--load-list $IDFILE test_id_option=--load-list $IDFILE

View File

@ -289,6 +289,8 @@ def create_shadow_table(migrate_engine, table_name=None, table=None,
column_instance is instance of Column. These params column_instance is instance of Column. These params
are required only for columns that have unsupported are required only for columns that have unsupported
types by sqlite. For example BigInteger. types by sqlite. For example BigInteger.
:returns: The created shadow_table object.
""" """
meta = MetaData(bind=migrate_engine) meta = MetaData(bind=migrate_engine)
@ -316,6 +318,7 @@ def create_shadow_table(migrate_engine, table_name=None, table=None,
mysql_engine='InnoDB') mysql_engine='InnoDB')
try: try:
shadow_table.create() shadow_table.create()
return shadow_table
except (OperationalError, ProgrammingError): except (OperationalError, ProgrammingError):
LOG.info(repr(shadow_table)) LOG.info(repr(shadow_table))
LOG.exception(_('Exception while creating table.')) LOG.exception(_('Exception while creating table.'))

View File

@ -42,7 +42,7 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
"""Class for testing utils that are used in db migrations.""" """Class for testing utils that are used in db migrations."""
def test_utils_drop_unique_constraint(self): def test_utils_drop_unique_constraint(self):
table_name = "__test_tmp_table__" table_name = "test_utils_drop_unique_constraint"
uc_name = 'uniq_foo' uc_name = 'uniq_foo'
values = [ values = [
{'id': 1, 'a': 3, 'foo': 10}, {'id': 1, 'a': 3, 'foo': 10},
@ -84,8 +84,12 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
test_table.drop() test_table.drop()
def test_util_drop_unique_constraint_with_not_supported_sqlite_type(self): def test_util_drop_unique_constraint_with_not_supported_sqlite_type(self):
if 'sqlite' in self.engines:
engine = self.engines['sqlite']
meta = MetaData(bind=engine)
table_name = "__test_tmp_table__" table_name = ("test_util_drop_unique_constraint_with_not_supported"
"_sqlite_type")
uc_name = 'uniq_foo' uc_name = 'uniq_foo'
values = [ values = [
{'id': 1, 'a': 3, 'foo': 10}, {'id': 1, 'a': 3, 'foo': 10},
@ -93,9 +97,6 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
{'id': 3, 'a': 1, 'foo': 30} {'id': 3, 'a': 1, 'foo': 30}
] ]
engine = self.engines['sqlite']
meta = MetaData(bind=engine)
test_table = Table(table_name, meta, test_table = Table(table_name, meta,
Column('id', Integer, primary_key=True, Column('id', Integer, primary_key=True,
nullable=False), nullable=False),
@ -117,7 +118,8 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
# instance of sqlalchemy.Column. # instance of sqlalchemy.Column.
self.assertRaises(exception.NovaException, self.assertRaises(exception.NovaException,
utils.drop_unique_constraint, utils.drop_unique_constraint,
engine, table_name, uc_name, 'foo', foo=Integer()) engine, table_name, uc_name, 'foo',
foo=Integer())
foo = Column('foo', CustomType, default=0) foo = Column('foo', CustomType, default=0)
utils.drop_unique_constraint(engine, table_name, uc_name, 'foo', utils.drop_unique_constraint(engine, table_name, uc_name, 'foo',
@ -168,7 +170,7 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
return test_table, values return test_table, values
def test_drop_old_duplicate_entries_from_table(self): def test_drop_old_duplicate_entries_from_table(self):
table_name = "__test_tmp_table__" table_name = "test_drop_old_duplicate_entries_from_table"
for key, engine in self.engines.items(): for key, engine in self.engines.items():
meta = MetaData() meta = MetaData()
@ -195,9 +197,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
self.assertEqual(len(real_ids), len(expected_ids)) self.assertEqual(len(real_ids), len(expected_ids))
for id_ in expected_ids: for id_ in expected_ids:
self.assertIn(id_, real_ids) self.assertIn(id_, real_ids)
test_table.drop()
def test_drop_old_duplicate_entries_from_table_soft_delete(self): def test_drop_old_duplicate_entries_from_table_soft_delete(self):
table_name = "__test_tmp_table__" table_name = "test_drop_old_duplicate_entries_from_table_soft_delete"
for key, engine in self.engines.items(): for key, engine in self.engines.items():
meta = MetaData() meta = MetaData()
@ -237,9 +240,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
len(values) - len(row_ids)) len(values) - len(row_ids))
for value in soft_deleted_values: for value in soft_deleted_values:
self.assertIn(value['id'], deleted_rows_ids) self.assertIn(value['id'], deleted_rows_ids)
table.drop()
def test_check_shadow_table(self): def test_check_shadow_table(self):
table_name = 'abc' table_name = 'test_check_shadow_table'
for key, engine in self.engines.items(): for key, engine in self.engines.items():
meta = MetaData() meta = MetaData()
meta.bind = engine meta.bind = engine
@ -274,8 +278,11 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
self.assertRaises(exception.NovaException, self.assertRaises(exception.NovaException,
utils.check_shadow_table, engine, table_name) utils.check_shadow_table, engine, table_name)
table.drop()
shadow_table.drop()
def test_check_shadow_table_different_types(self): def test_check_shadow_table_different_types(self):
table_name = 'abc' table_name = 'test_check_shadow_table_different_types'
for key, engine in self.engines.items(): for key, engine in self.engines.items():
meta = MetaData() meta = MetaData()
meta.bind = engine meta.bind = engine
@ -292,8 +299,11 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
self.assertRaises(exception.NovaException, self.assertRaises(exception.NovaException,
utils.check_shadow_table, engine, table_name) utils.check_shadow_table, engine, table_name)
table.drop()
shadow_table.drop()
def test_check_shadow_table_with_unsupported_type(self): def test_check_shadow_table_with_unsupported_type(self):
table_name = 'abc' table_name = 'test_check_shadow_table_with_unsupported_type'
engine = self.engines['sqlite'] engine = self.engines['sqlite']
meta = MetaData(bind=engine) meta = MetaData(bind=engine)
@ -309,9 +319,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
Column('c', CustomType)) Column('c', CustomType))
shadow_table.create() shadow_table.create()
self.assertTrue(utils.check_shadow_table(engine, table_name)) self.assertTrue(utils.check_shadow_table(engine, table_name))
shadow_table.drop()
def test_create_shadow_table_by_table_instance(self): def test_create_shadow_table_by_table_instance(self):
table_name = 'abc' table_name = 'test_create_shadow_table_by_table_instance'
for key, engine in self.engines.items(): for key, engine in self.engines.items():
meta = MetaData() meta = MetaData()
meta.bind = engine meta.bind = engine
@ -320,11 +331,13 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
Column('a', Integer), Column('a', Integer),
Column('b', String(256))) Column('b', String(256)))
table.create() table.create()
utils.create_shadow_table(engine, table=table) shadow_table = utils.create_shadow_table(engine, table=table)
self.assertTrue(utils.check_shadow_table(engine, table_name)) self.assertTrue(utils.check_shadow_table(engine, table_name))
table.drop()
shadow_table.drop()
def test_create_shadow_table_by_name(self): def test_create_shadow_table_by_name(self):
table_name = 'abc' table_name = 'test_create_shadow_table_by_name'
for key, engine in self.engines.items(): for key, engine in self.engines.items():
meta = MetaData() meta = MetaData()
meta.bind = engine meta.bind = engine
@ -334,11 +347,15 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
Column('a', Integer), Column('a', Integer),
Column('b', String(256))) Column('b', String(256)))
table.create() table.create()
utils.create_shadow_table(engine, table_name=table_name) shadow_table = utils.create_shadow_table(engine,
table_name=table_name)
self.assertTrue(utils.check_shadow_table(engine, table_name)) self.assertTrue(utils.check_shadow_table(engine, table_name))
table.drop()
shadow_table.drop()
def test_create_shadow_table_not_supported_type(self): def test_create_shadow_table_not_supported_type(self):
table_name = 'abc' if 'sqlite' in self.engines:
table_name = 'test_create_shadow_table_not_supported_type'
engine = self.engines['sqlite'] engine = self.engines['sqlite']
meta = MetaData() meta = MetaData()
meta.bind = engine meta.bind = engine
@ -350,9 +367,13 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
utils.create_shadow_table, utils.create_shadow_table,
engine, table_name=table_name) engine, table_name=table_name)
utils.create_shadow_table(engine, table_name=table_name, shadow_table = utils.create_shadow_table(engine,
a=Column('a', CustomType())) table_name=table_name,
a=Column('a', CustomType())
)
self.assertTrue(utils.check_shadow_table(engine, table_name)) self.assertTrue(utils.check_shadow_table(engine, table_name))
table.drop()
shadow_table.drop()
def test_create_shadow_both_table_and_table_name_are_none(self): def test_create_shadow_both_table_and_table_name_are_none(self):
for key, engine in self.engines.items(): for key, engine in self.engines.items():
@ -362,7 +383,8 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
utils.create_shadow_table, engine) utils.create_shadow_table, engine)
def test_create_shadow_both_table_and_table_name_are_specified(self): def test_create_shadow_both_table_and_table_name_are_specified(self):
table_name = 'abc' table_name = ('test_create_shadow_both_table_and_table_name_are_'
'specified')
for key, engine in self.engines.items(): for key, engine in self.engines.items():
meta = MetaData() meta = MetaData()
meta.bind = engine meta.bind = engine
@ -373,9 +395,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
self.assertRaises(exception.NovaException, self.assertRaises(exception.NovaException,
utils.create_shadow_table, utils.create_shadow_table,
engine, table=table, table_name=table_name) engine, table=table, table_name=table_name)
table.drop()
def test_create_duplicate_shadow_table(self): def test_create_duplicate_shadow_table(self):
table_name = 'abc' table_name = 'test_create_duplicate_shadow_table'
for key, engine in self.engines.items(): for key, engine in self.engines.items():
meta = MetaData() meta = MetaData()
meta.bind = engine meta.bind = engine
@ -383,13 +406,16 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
Column('id', Integer, primary_key=True), Column('id', Integer, primary_key=True),
Column('a', Integer)) Column('a', Integer))
table.create() table.create()
utils.create_shadow_table(engine, table_name=table_name) shadow_table = utils.create_shadow_table(engine,
table_name=table_name)
self.assertRaises(exception.ShadowTableExists, self.assertRaises(exception.ShadowTableExists,
utils.create_shadow_table, utils.create_shadow_table,
engine, table_name=table_name) engine, table_name=table_name)
table.drop()
shadow_table.drop()
def test_change_deleted_column_type_doesnt_drop_index(self): def test_change_deleted_column_type_doesnt_drop_index(self):
table_name = 'abc' table_name = 'test_change_deleted_column_type_doesnt_drop_index'
for key, engine in self.engines.items(): for key, engine in self.engines.items():
meta = MetaData(bind=engine) meta = MetaData(bind=engine)
@ -420,9 +446,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
self.assertIn(name, indexes) self.assertIn(name, indexes)
self.assertEqual(set(index['column_names']), self.assertEqual(set(index['column_names']),
set(indexes[name])) set(indexes[name]))
table.drop()
def test_change_deleted_column_type_to_id_type_integer(self): def test_change_deleted_column_type_to_id_type_integer(self):
table_name = 'abc' table_name = 'test_change_deleted_column_type_to_id_type_integer'
for key, engine in self.engines.items(): for key, engine in self.engines.items():
meta = MetaData() meta = MetaData()
meta.bind = engine meta.bind = engine
@ -434,9 +461,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
table = utils.get_table(engine, table_name) table = utils.get_table(engine, table_name)
self.assertIsInstance(table.c.deleted.type, Integer) self.assertIsInstance(table.c.deleted.type, Integer)
table.drop()
def test_change_deleted_column_type_to_id_type_string(self): def test_change_deleted_column_type_to_id_type_string(self):
table_name = 'abc' table_name = 'test_change_deleted_column_type_to_id_type_string'
for key, engine in self.engines.items(): for key, engine in self.engines.items():
meta = MetaData() meta = MetaData()
meta.bind = engine meta.bind = engine
@ -448,9 +476,11 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
table = utils.get_table(engine, table_name) table = utils.get_table(engine, table_name)
self.assertIsInstance(table.c.deleted.type, String) self.assertIsInstance(table.c.deleted.type, String)
table.drop()
def test_change_deleted_column_type_to_id_type_custom(self): def test_change_deleted_column_type_to_id_type_custom(self):
table_name = 'abc' if 'sqlite' in self.engines:
table_name = 'test_change_deleted_column_type_to_id_type_custom'
engine = self.engines['sqlite'] engine = self.engines['sqlite']
meta = MetaData() meta = MetaData()
meta.bind = engine meta.bind = engine
@ -473,9 +503,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
# but sqlalchemy will set it to NullType. # but sqlalchemy will set it to NullType.
self.assertIsInstance(table.c.foo.type, NullType) self.assertIsInstance(table.c.foo.type, NullType)
self.assertIsInstance(table.c.deleted.type, Integer) self.assertIsInstance(table.c.deleted.type, Integer)
table.drop()
def test_change_deleted_column_type_to_boolean(self): def test_change_deleted_column_type_to_boolean(self):
table_name = 'abc' table_name = 'test_change_deleted_column_type_to_boolean'
for key, engine in self.engines.items(): for key, engine in self.engines.items():
meta = MetaData() meta = MetaData()
meta.bind = engine meta.bind = engine
@ -489,9 +520,12 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
table = utils.get_table(engine, table_name) table = utils.get_table(engine, table_name)
expected_type = Boolean if key != "mysql" else mysql.TINYINT expected_type = Boolean if key != "mysql" else mysql.TINYINT
self.assertIsInstance(table.c.deleted.type, expected_type) self.assertIsInstance(table.c.deleted.type, expected_type)
table.drop()
def test_change_deleted_column_type_to_boolean_type_custom(self): def test_change_deleted_column_type_to_boolean_type_custom(self):
table_name = 'abc' if 'sqlite' in self.engines:
table_name = \
'test_change_deleted_column_type_to_boolean_type_custom'
engine = self.engines['sqlite'] engine = self.engines['sqlite']
meta = MetaData() meta = MetaData()
meta.bind = engine meta.bind = engine
@ -514,20 +548,25 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
# but sqlalchemy will set it to NullType. # but sqlalchemy will set it to NullType.
self.assertIsInstance(table.c.foo.type, NullType) self.assertIsInstance(table.c.foo.type, NullType)
self.assertIsInstance(table.c.deleted.type, Boolean) self.assertIsInstance(table.c.deleted.type, Boolean)
table.drop()
def test_drop_unique_constraint_in_sqlite_fk_recreate(self): def test_drop_unique_constraint_in_sqlite_fk_recreate(self):
if 'sqlite' in self.engines:
engine = self.engines['sqlite'] engine = self.engines['sqlite']
meta = MetaData() meta = MetaData()
meta.bind = engine meta.bind = engine
parent_table = Table('table0', meta, parent_table_name = ('test_drop_unique_constraint_in_sqlite_fk_'
'recreate_parent_table')
parent_table = Table(parent_table_name, meta,
Column('id', Integer, primary_key=True), Column('id', Integer, primary_key=True),
Column('foo', Integer)) Column('foo', Integer))
parent_table.create() parent_table.create()
table_name = 'table1' table_name = 'test_drop_unique_constraint_in_sqlite_fk_recreate'
table = Table(table_name, meta, table = Table(table_name, meta,
Column('id', Integer, primary_key=True), Column('id', Integer, primary_key=True),
Column('baz', Integer), Column('baz', Integer),
Column('bar', Integer, ForeignKey("table0.id")), Column('bar', Integer,
ForeignKey(parent_table_name + ".id")),
UniqueConstraint('baz', name='constr1')) UniqueConstraint('baz', name='constr1'))
table.create() table.create()
utils.drop_unique_constraint(engine, table_name, 'constr1', 'baz') utils.drop_unique_constraint(engine, table_name, 'constr1', 'baz')
@ -536,6 +575,8 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
f_keys = insp.get_foreign_keys(table_name) f_keys = insp.get_foreign_keys(table_name)
self.assertEqual(len(f_keys), 1) self.assertEqual(len(f_keys), 1)
f_key = f_keys[0] f_key = f_keys[0]
self.assertEqual(f_key['referred_table'], 'table0') self.assertEqual(f_key['referred_table'], parent_table_name)
self.assertEqual(f_key['referred_columns'], ['id']) self.assertEqual(f_key['referred_columns'], ['id'])
self.assertEqual(f_key['constrained_columns'], ['bar']) self.assertEqual(f_key['constrained_columns'], ['bar'])
table.drop()
parent_table.drop()

View File

@ -1,9 +1,26 @@
[DEFAULT] [unit_tests]
# Set up any number of migration data stores you want, one # Set up any number of databases to test concurrently.
# The "name" used in the test is the config variable key. # The "name" used in the test is the config variable key.
#sqlite=sqlite:///test_migrations.db
# A few tests rely on one sqlite database with 'sqlite' as the key.
sqlite=sqlite:// sqlite=sqlite://
#mysql=mysql://root:@localhost/test_migrations #sqlitefile=sqlite:///test_migrations_utils.db
#postgresql=postgresql://user:pass@localhost/test_migrations #mysql=mysql+mysqldb://user:pass@localhost/test_migrations_utils
#postgresql=postgresql+psycopg2://user:pass@localhost/test_migrations_utils
[migration_dbs]
# Migration DB details are listed separately as they can't be connected to
# concurrently. These databases can't be the same as above
# Note, sqlite:// is in-memory and unique each time it is spawned.
# However file sqlite's are not unique.
sqlite=sqlite://
#sqlitefile=sqlite:///test_migrations.db
#mysql=mysql+mysqldb://user:pass@localhost/test_migrations
#postgresql=postgresql+psycopg2://user:pass@localhost/test_migrations
[walk_style] [walk_style]
snake_walk=yes snake_walk=yes
downgrade=yes

View File

@ -38,6 +38,8 @@ sudo -u postgres psql
postgres=# create user openstack_citest with createdb login password postgres=# create user openstack_citest with createdb login password
'openstack_citest'; 'openstack_citest';
postgres=# create database openstack_citest with owner openstack_citest; postgres=# create database openstack_citest with owner openstack_citest;
postgres=# create database openstack_baremetal_citest with owner
openstack_citest;
""" """
@ -156,7 +158,9 @@ class CommonTestsMixIn(object):
""" """
def test_walk_versions(self): def test_walk_versions(self):
for key, engine in self.engines.items(): for key, engine in self.engines.items():
self._walk_versions(engine, self.snake_walk) # We start each walk with a completely blank slate.
self._reset_database(key)
self._walk_versions(engine, self.snake_walk, self.downgrade)
def test_mysql_opportunistically(self): def test_mysql_opportunistically(self):
self._test_mysql_opportunistically() self._test_mysql_opportunistically()
@ -184,7 +188,18 @@ class CommonTestsMixIn(object):
class BaseMigrationTestCase(test.NoDBTestCase): class BaseMigrationTestCase(test.NoDBTestCase):
"""Base class fort testing migrations and migration utils.""" """Base class for testing migrations and migration utils. This sets up
and configures the databases to run tests against.
"""
# NOTE(jhesketh): It is expected that tests clean up after themselves.
# This is necessary for concurrency to allow multiple tests to work on
# one database.
# The full migration walk tests however do call the old _reset_databases()
# to throw away whatever was there so they need to operate on their own
# database that we know isn't accessed concurrently.
# Hence, BaseWalkMigrationTestCase overwrites the engine list.
USER = None USER = None
PASSWD = None PASSWD = None
DATABASE = None DATABASE = None
@ -204,13 +219,16 @@ class BaseMigrationTestCase(test.NoDBTestCase):
self.INIT_VERSION = 0 self.INIT_VERSION = 0
self.snake_walk = False self.snake_walk = False
self.downgrade = False
self.test_databases = {} self.test_databases = {}
self.migration = None self.migration = None
self.migration_api = None self.migration_api = None
def setUp(self): def setUp(self):
super(BaseMigrationTestCase, self).setUp() super(BaseMigrationTestCase, self).setUp()
self._load_config()
def _load_config(self):
# Load test databases from the config file. Only do this # Load test databases from the config file. Only do this
# once. No need to re-run this on each test... # once. No need to re-run this on each test...
LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH) LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH)
@ -218,10 +236,12 @@ class BaseMigrationTestCase(test.NoDBTestCase):
cp = ConfigParser.RawConfigParser() cp = ConfigParser.RawConfigParser()
try: try:
cp.read(self.CONFIG_FILE_PATH) cp.read(self.CONFIG_FILE_PATH)
defaults = cp.defaults() config = cp.options('unit_tests')
for key, value in defaults.items(): for key in config:
self.test_databases[key] = value self.test_databases[key] = cp.get('unit_tests', key)
self.snake_walk = cp.getboolean('walk_style', 'snake_walk') self.snake_walk = cp.getboolean('walk_style', 'snake_walk')
self.downgrade = cp.getboolean('walk_style', 'downgrade')
except ConfigParser.ParsingError as e: except ConfigParser.ParsingError as e:
self.fail("Failed to read test_migrations.conf config " self.fail("Failed to read test_migrations.conf config "
"file. Got error: %s" % e) "file. Got error: %s" % e)
@ -233,15 +253,9 @@ class BaseMigrationTestCase(test.NoDBTestCase):
for key, value in self.test_databases.items(): for key, value in self.test_databases.items():
self.engines[key] = sqlalchemy.create_engine(value) self.engines[key] = sqlalchemy.create_engine(value)
# We start each test case with a completely blank slate. # NOTE(jhesketh): We only need to make sure the databases are created
self._reset_databases() # not necessarily clean of tables.
self._create_databases()
def tearDown(self):
# We destroy the test data store between each test case,
# and recreate it, which ensures that we have no side-effects
# from the tests
self._reset_databases()
super(BaseMigrationTestCase, self).tearDown()
def execute_cmd(self, cmd=None): def execute_cmd(self, cmd=None):
status, output = commands.getstatusoutput(cmd) status, output = commands.getstatusoutput(cmd)
@ -273,12 +287,22 @@ class BaseMigrationTestCase(test.NoDBTestCase):
os.unsetenv('PGPASSWORD') os.unsetenv('PGPASSWORD')
os.unsetenv('PGUSER') os.unsetenv('PGUSER')
def _reset_databases(self): @utils.synchronized('mysql', external=True)
for key, engine in self.engines.items(): def _reset_mysql(self, conn_pieces):
conn_string = self.test_databases[key] # We can execute the MySQL client to destroy and re-create
conn_pieces = urlparse.urlparse(conn_string) # the MYSQL database, which is easier and less error-prone
engine.dispose() # than using SQLAlchemy to do this via MetaData...trust me.
if conn_string.startswith('sqlite'): (user, password, database, host) = \
get_mysql_connection_info(conn_pieces)
sql = ("drop database if exists %(database)s; "
"create database %(database)s;" % {'database': database})
cmd = ("mysql -u \"%(user)s\" %(password)s -h %(host)s "
"-e \"%(sql)s\"" % {'user': user, 'password': password,
'host': host, 'sql': sql})
self.execute_cmd(cmd)
@utils.synchronized('sqlite', external=True)
def _reset_sqlite(self, conn_pieces):
# We can just delete the SQLite database, which is # We can just delete the SQLite database, which is
# the easiest and cleanest solution # the easiest and cleanest solution
db_path = conn_pieces.path.strip('/') db_path = conn_pieces.path.strip('/')
@ -286,22 +310,101 @@ class BaseMigrationTestCase(test.NoDBTestCase):
os.unlink(db_path) os.unlink(db_path)
# No need to recreate the SQLite DB. SQLite will # No need to recreate the SQLite DB. SQLite will
# create it for us if it's not there... # create it for us if it's not there...
elif conn_string.startswith('mysql'):
# We can execute the MySQL client to destroy and re-create def _create_databases(self):
# the MYSQL database, which is easier and less error-prone """Create all configured databases as needed."""
# than using SQLAlchemy to do this via MetaData...trust me. for key, engine in self.engines.items():
self._create_database(key)
def _create_database(self, key):
"""Create database if it doesn't exist."""
conn_string = self.test_databases[key]
conn_pieces = urlparse.urlparse(conn_string)
if conn_string.startswith('mysql'):
(user, password, database, host) = \ (user, password, database, host) = \
get_mysql_connection_info(conn_pieces) get_mysql_connection_info(conn_pieces)
sql = ("drop database if exists %(database)s; " sql = "create database if not exists %s;" % database
"create database %(database)s;"
% {'database': database})
cmd = ("mysql -u \"%(user)s\" %(password)s -h %(host)s " cmd = ("mysql -u \"%(user)s\" %(password)s -h %(host)s "
"-e \"%(sql)s\"" % {'user': user, "-e \"%(sql)s\"" % {'user': user, 'password': password,
'password': password, 'host': host, 'sql': sql}) 'host': host, 'sql': sql})
self.execute_cmd(cmd) self.execute_cmd(cmd)
elif conn_string.startswith('postgresql'):
(user, password, database, host) = \
get_pgsql_connection_info(conn_pieces)
os.environ['PGPASSWORD'] = password
os.environ['PGUSER'] = user
sqlcmd = ("psql -w -U %(user)s -h %(host)s -c"
" '%(sql)s' -d template1")
sql = ("create database if not exists %s;") % database
createtable = sqlcmd % {'user': user, 'host': host, 'sql': sql}
status, output = commands.getstatusoutput(createtable)
if status != 0 and status != 256:
# 0 means databases is created
# 256 means it already exists (which is fine)
# otherwise raise an error
self.fail("Failed to run: %s\n%s" % (createtable, output))
os.unsetenv('PGPASSWORD')
os.unsetenv('PGUSER')
def _reset_databases(self):
"""Reset all configured databases."""
for key, engine in self.engines.items():
self._reset_database(key)
def _reset_database(self, key):
"""Reset specific database."""
engine = self.engines[key]
conn_string = self.test_databases[key]
conn_pieces = urlparse.urlparse(conn_string)
engine.dispose()
if conn_string.startswith('sqlite'):
self._reset_sqlite(conn_pieces)
elif conn_string.startswith('mysql'):
self._reset_mysql(conn_pieces)
elif conn_string.startswith('postgresql'): elif conn_string.startswith('postgresql'):
self._reset_pg(conn_pieces) self._reset_pg(conn_pieces)
class BaseWalkMigrationTestCase(BaseMigrationTestCase):
"""BaseWalkMigrationTestCase loads in an alternative set of databases for
testing against. This is necessary as the default databases can run tests
concurrently without interfering with itself. It is expected that
databases listed under [migraiton_dbs] in the configuration are only being
accessed by one test at a time. Currently only test_walk_versions accesses
the databases (and is the only method that calls _reset_database() which
is clearly problematic for concurrency).
"""
def _load_config(self):
# Load test databases from the config file. Only do this
# once. No need to re-run this on each test...
LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH)
if os.path.exists(self.CONFIG_FILE_PATH):
cp = ConfigParser.RawConfigParser()
try:
cp.read(self.CONFIG_FILE_PATH)
config = cp.options('migration_dbs')
for key in config:
self.test_databases[key] = cp.get('migration_dbs', key)
self.snake_walk = cp.getboolean('walk_style', 'snake_walk')
self.downgrade = cp.getboolean('walk_style', 'downgrade')
except ConfigParser.ParsingError as e:
self.fail("Failed to read test_migrations.conf config "
"file. Got error: %s" % e)
else:
self.fail("Failed to find test_migrations.conf config "
"file.")
self.engines = {}
for key, value in self.test_databases.items():
self.engines[key] = sqlalchemy.create_engine(value)
self._create_databases()
def _test_mysql_opportunistically(self): def _test_mysql_opportunistically(self):
# Test that table creation on mysql only builds InnoDB tables # Test that table creation on mysql only builds InnoDB tables
if not _have_mysql(self.USER, self.PASSWD, self.DATABASE): if not _have_mysql(self.USER, self.PASSWD, self.DATABASE):
@ -317,8 +420,8 @@ class BaseMigrationTestCase(test.NoDBTestCase):
self.test_databases[database] = connect_string self.test_databases[database] = connect_string
# build a fully populated mysql database with all the tables # build a fully populated mysql database with all the tables
self._reset_databases() self._reset_database(database)
self._walk_versions(engine, False, False) self._walk_versions(engine, self.snake_walk, self.downgrade)
connection = engine.connect() connection = engine.connect()
# sanity check # sanity check
@ -338,6 +441,9 @@ class BaseMigrationTestCase(test.NoDBTestCase):
self.assertEqual(count, 0, "%d non InnoDB tables created" % count) self.assertEqual(count, 0, "%d non InnoDB tables created" % count)
connection.close() connection.close()
del(self.engines[database])
del(self.test_databases[database])
def _test_postgresql_opportunistically(self): def _test_postgresql_opportunistically(self):
# Test postgresql database migration walk # Test postgresql database migration walk
if not _have_postgresql(self.USER, self.PASSWD, self.DATABASE): if not _have_postgresql(self.USER, self.PASSWD, self.DATABASE):
@ -353,8 +459,10 @@ class BaseMigrationTestCase(test.NoDBTestCase):
self.test_databases[database] = connect_string self.test_databases[database] = connect_string
# build a fully populated postgresql database with all the tables # build a fully populated postgresql database with all the tables
self._reset_databases() self._reset_database(database)
self._walk_versions(engine, False, False) self._walk_versions(engine, self.snake_walk, self.downgrade)
del(self.engines[database])
del(self.test_databases[database])
def _walk_versions(self, engine=None, snake_walk=False, downgrade=True): def _walk_versions(self, engine=None, snake_walk=False, downgrade=True):
# Determine latest version script from the repo, then # Determine latest version script from the repo, then
@ -447,7 +555,7 @@ class BaseMigrationTestCase(test.NoDBTestCase):
raise raise
class TestNovaMigrations(BaseMigrationTestCase, CommonTestsMixIn): class TestNovaMigrations(BaseWalkMigrationTestCase, CommonTestsMixIn):
"""Test sqlalchemy-migrate migrations.""" """Test sqlalchemy-migrate migrations."""
USER = "openstack_citest" USER = "openstack_citest"
PASSWD = "openstack_citest" PASSWD = "openstack_citest"
@ -2607,6 +2715,30 @@ class TestNovaMigrations(BaseMigrationTestCase, CommonTestsMixIn):
for index in indexes: for index in indexes:
self.assertNotIn(index, current_indexes) self.assertNotIn(index, current_indexes)
# Check indexes are gone
if engine.name == 'mysql' or engine.name == 'postgresql':
data = {
# table_name: ((idx_1, (c1, c2,)), (idx2, (c1, c2,)), ...)
'quota_usages': (
('ix_quota_usages_user_id_deleted',
('user_id', 'deleted')),
),
'reservations': (
('ix_reservations_user_id_deleted',
('user_id', 'deleted')),
)
}
meta = sqlalchemy.MetaData()
meta.bind = engine
for table_name, indexes in data.iteritems():
table = sqlalchemy.Table(table_name, meta, autoload=True)
current_indexes = [(i.name, tuple(i.columns.keys()))
for i in table.indexes]
for index in indexes:
self.assertNotIn(index, current_indexes)
def _check_204(self, engine, data): def _check_204(self, engine, data):
if engine.name != 'sqlite': if engine.name != 'sqlite':
return return
@ -3049,18 +3181,18 @@ class TestNovaMigrations(BaseMigrationTestCase, CommonTestsMixIn):
"local_gb_used": 1, "deleted": 0, "local_gb_used": 1, "deleted": 0,
"hypervisor_type": "fake_type", "hypervisor_type": "fake_type",
"hypervisor_version": 1, "hypervisor_version": 1,
"service_id": 1, "id": 1}, "service_id": 1, "id": 10001},
{"vcpus": 1, "cpu_info": "info", {"vcpus": 1, "cpu_info": "info",
"memory_mb": 1, "local_gb": 1, "memory_mb": 1, "local_gb": 1,
"vcpus_used": 1, "memory_mb_used": 1, "vcpus_used": 1, "memory_mb_used": 1,
"local_gb_used": 1, "deleted": 2, "local_gb_used": 1, "deleted": 2,
"hypervisor_type": "fake_type", "hypervisor_type": "fake_type",
"hypervisor_version": 1, "hypervisor_version": 1,
"service_id": 1, "id": 2}], "service_id": 1, "id": 10002}],
"compute_node_stats": [{"id": 10, "compute_node_id": 1, "compute_node_stats": [{"id": 10, "compute_node_id": 10001,
"key": "fake-1", "key": "fake-1",
"deleted": 0}, "deleted": 0},
{"id": 20, "compute_node_id": 2, {"id": 20, "compute_node_id": 10002,
"key": "fake-2", "key": "fake-2",
"deleted": 0}]} "deleted": 0}]}
return ret return ret
@ -3209,7 +3341,7 @@ class TestNovaMigrations(BaseMigrationTestCase, CommonTestsMixIn):
self.assertEqual(quota['resource'], 'injected_file_content_bytes') self.assertEqual(quota['resource'], 'injected_file_content_bytes')
class TestBaremetalMigrations(BaseMigrationTestCase, CommonTestsMixIn): class TestBaremetalMigrations(BaseWalkMigrationTestCase, CommonTestsMixIn):
"""Test sqlalchemy-migrate migrations.""" """Test sqlalchemy-migrate migrations."""
USER = "openstack_citest" USER = "openstack_citest"
PASSWD = "openstack_citest" PASSWD = "openstack_citest"

View File

@ -1,9 +1,24 @@
[DEFAULT] [unit_tests]
# Set up any number of migration data stores you want, one # Set up any number of databases to test concurrently.
# The "name" used in the test is the config variable key. # The "name" used in the test is the config variable key.
#sqlite=sqlite:///test_migrations.db
sqlite=sqlite:// sqlite=sqlite://
#mysql=mysql://root:@localhost/test_migrations #sqlitefile=sqlite:///test_baremetal_migrations_utils.db
#postgresql=postgresql://user:pass@localhost/test_migrations #mysql=mysql+mysqldb://user:pass@localhost/test_baremetal_migrations_utils
#postgresql=postgresql+psycopg2://user:pass@localhost/test_migrations_utils
[migration_dbs]
# Migration DB details are listed separately as they can't be connected to
# concurrently. These databases can't be the same as above
# Note, sqlite:// is in-memory and unique each time it is spawned.
# However file sqlite's are not unique.
sqlite=sqlite://
#sqlitefile=sqlite:///test_baremetal_migrations.db
#mysql=mysql+mysqldb://user:pass@localhost/test_baremetal_migrations
#postgresql=postgresql+psycopg2://user:pass@localhost/test_baremetal_migrations
[walk_style] [walk_style]
snake_walk=yes snake_walk=yes
downgrade=yes