Merge "Fix tests to work with mysql+postgres concurrently"
This commit is contained in:
commit
b328978a07
@ -1,7 +1,7 @@
|
|||||||
[DEFAULT]
|
[DEFAULT]
|
||||||
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
|
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
|
||||||
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
|
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
|
||||||
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \
|
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-160} \
|
||||||
${PYTHON:-python} -m subunit.run discover -t ./ ./nova/tests $LISTOPT $IDOPTION
|
${PYTHON:-python} -m subunit.run discover -t ./ ./nova/tests $LISTOPT $IDOPTION
|
||||||
|
|
||||||
test_id_option=--load-list $IDFILE
|
test_id_option=--load-list $IDFILE
|
||||||
|
@ -289,6 +289,8 @@ def create_shadow_table(migrate_engine, table_name=None, table=None,
|
|||||||
column_instance is instance of Column. These params
|
column_instance is instance of Column. These params
|
||||||
are required only for columns that have unsupported
|
are required only for columns that have unsupported
|
||||||
types by sqlite. For example BigInteger.
|
types by sqlite. For example BigInteger.
|
||||||
|
|
||||||
|
:returns: The created shadow_table object.
|
||||||
"""
|
"""
|
||||||
meta = MetaData(bind=migrate_engine)
|
meta = MetaData(bind=migrate_engine)
|
||||||
|
|
||||||
@ -316,6 +318,7 @@ def create_shadow_table(migrate_engine, table_name=None, table=None,
|
|||||||
mysql_engine='InnoDB')
|
mysql_engine='InnoDB')
|
||||||
try:
|
try:
|
||||||
shadow_table.create()
|
shadow_table.create()
|
||||||
|
return shadow_table
|
||||||
except (OperationalError, ProgrammingError):
|
except (OperationalError, ProgrammingError):
|
||||||
LOG.info(repr(shadow_table))
|
LOG.info(repr(shadow_table))
|
||||||
LOG.exception(_('Exception while creating table.'))
|
LOG.exception(_('Exception while creating table.'))
|
||||||
|
@ -42,7 +42,7 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
"""Class for testing utils that are used in db migrations."""
|
"""Class for testing utils that are used in db migrations."""
|
||||||
|
|
||||||
def test_utils_drop_unique_constraint(self):
|
def test_utils_drop_unique_constraint(self):
|
||||||
table_name = "__test_tmp_table__"
|
table_name = "test_utils_drop_unique_constraint"
|
||||||
uc_name = 'uniq_foo'
|
uc_name = 'uniq_foo'
|
||||||
values = [
|
values = [
|
||||||
{'id': 1, 'a': 3, 'foo': 10},
|
{'id': 1, 'a': 3, 'foo': 10},
|
||||||
@ -84,47 +84,49 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
test_table.drop()
|
test_table.drop()
|
||||||
|
|
||||||
def test_util_drop_unique_constraint_with_not_supported_sqlite_type(self):
|
def test_util_drop_unique_constraint_with_not_supported_sqlite_type(self):
|
||||||
|
if 'sqlite' in self.engines:
|
||||||
|
engine = self.engines['sqlite']
|
||||||
|
meta = MetaData(bind=engine)
|
||||||
|
|
||||||
table_name = "__test_tmp_table__"
|
table_name = ("test_util_drop_unique_constraint_with_not_supported"
|
||||||
uc_name = 'uniq_foo'
|
"_sqlite_type")
|
||||||
values = [
|
uc_name = 'uniq_foo'
|
||||||
{'id': 1, 'a': 3, 'foo': 10},
|
values = [
|
||||||
{'id': 2, 'a': 2, 'foo': 20},
|
{'id': 1, 'a': 3, 'foo': 10},
|
||||||
{'id': 3, 'a': 1, 'foo': 30}
|
{'id': 2, 'a': 2, 'foo': 20},
|
||||||
]
|
{'id': 3, 'a': 1, 'foo': 30}
|
||||||
|
]
|
||||||
|
|
||||||
engine = self.engines['sqlite']
|
test_table = Table(table_name, meta,
|
||||||
meta = MetaData(bind=engine)
|
Column('id', Integer, primary_key=True,
|
||||||
|
nullable=False),
|
||||||
|
Column('a', Integer),
|
||||||
|
Column('foo', CustomType, default=0),
|
||||||
|
UniqueConstraint('a', name='uniq_a'),
|
||||||
|
UniqueConstraint('foo', name=uc_name))
|
||||||
|
test_table.create()
|
||||||
|
|
||||||
test_table = Table(table_name, meta,
|
engine.execute(test_table.insert(), values)
|
||||||
Column('id', Integer, primary_key=True,
|
warnings.simplefilter("ignore", SAWarning)
|
||||||
nullable=False),
|
# NOTE(boris-42): Missing info about column `foo` that has
|
||||||
Column('a', Integer),
|
# unsupported type CustomType.
|
||||||
Column('foo', CustomType, default=0),
|
self.assertRaises(exception.NovaException,
|
||||||
UniqueConstraint('a', name='uniq_a'),
|
utils.drop_unique_constraint,
|
||||||
UniqueConstraint('foo', name=uc_name))
|
engine, table_name, uc_name, 'foo')
|
||||||
test_table.create()
|
|
||||||
|
|
||||||
engine.execute(test_table.insert(), values)
|
# NOTE(boris-42): Wrong type of foo instance. it should be
|
||||||
warnings.simplefilter("ignore", SAWarning)
|
# instance of sqlalchemy.Column.
|
||||||
# NOTE(boris-42): Missing info about column `foo` that has
|
self.assertRaises(exception.NovaException,
|
||||||
# unsupported type CustomType.
|
utils.drop_unique_constraint,
|
||||||
self.assertRaises(exception.NovaException,
|
engine, table_name, uc_name, 'foo',
|
||||||
utils.drop_unique_constraint,
|
foo=Integer())
|
||||||
engine, table_name, uc_name, 'foo')
|
|
||||||
|
|
||||||
# NOTE(boris-42): Wrong type of foo instance. it should be
|
foo = Column('foo', CustomType, default=0)
|
||||||
# instance of sqlalchemy.Column.
|
utils.drop_unique_constraint(engine, table_name, uc_name, 'foo',
|
||||||
self.assertRaises(exception.NovaException,
|
foo=foo)
|
||||||
utils.drop_unique_constraint,
|
|
||||||
engine, table_name, uc_name, 'foo', foo=Integer())
|
|
||||||
|
|
||||||
foo = Column('foo', CustomType, default=0)
|
s = test_table.select().order_by(test_table.c.id)
|
||||||
utils.drop_unique_constraint(engine, table_name, uc_name, 'foo',
|
rows = engine.execute(s).fetchall()
|
||||||
foo=foo)
|
|
||||||
|
|
||||||
s = test_table.select().order_by(test_table.c.id)
|
|
||||||
rows = engine.execute(s).fetchall()
|
|
||||||
|
|
||||||
for i in xrange(0, len(values)):
|
for i in xrange(0, len(values)):
|
||||||
v = values[i]
|
v = values[i]
|
||||||
@ -168,7 +170,7 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
return test_table, values
|
return test_table, values
|
||||||
|
|
||||||
def test_drop_old_duplicate_entries_from_table(self):
|
def test_drop_old_duplicate_entries_from_table(self):
|
||||||
table_name = "__test_tmp_table__"
|
table_name = "test_drop_old_duplicate_entries_from_table"
|
||||||
|
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
meta = MetaData()
|
meta = MetaData()
|
||||||
@ -195,9 +197,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
self.assertEqual(len(real_ids), len(expected_ids))
|
self.assertEqual(len(real_ids), len(expected_ids))
|
||||||
for id_ in expected_ids:
|
for id_ in expected_ids:
|
||||||
self.assertIn(id_, real_ids)
|
self.assertIn(id_, real_ids)
|
||||||
|
test_table.drop()
|
||||||
|
|
||||||
def test_drop_old_duplicate_entries_from_table_soft_delete(self):
|
def test_drop_old_duplicate_entries_from_table_soft_delete(self):
|
||||||
table_name = "__test_tmp_table__"
|
table_name = "test_drop_old_duplicate_entries_from_table_soft_delete"
|
||||||
|
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
meta = MetaData()
|
meta = MetaData()
|
||||||
@ -237,9 +240,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
len(values) - len(row_ids))
|
len(values) - len(row_ids))
|
||||||
for value in soft_deleted_values:
|
for value in soft_deleted_values:
|
||||||
self.assertIn(value['id'], deleted_rows_ids)
|
self.assertIn(value['id'], deleted_rows_ids)
|
||||||
|
table.drop()
|
||||||
|
|
||||||
def test_check_shadow_table(self):
|
def test_check_shadow_table(self):
|
||||||
table_name = 'abc'
|
table_name = 'test_check_shadow_table'
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
meta = MetaData()
|
meta = MetaData()
|
||||||
meta.bind = engine
|
meta.bind = engine
|
||||||
@ -274,8 +278,11 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
self.assertRaises(exception.NovaException,
|
self.assertRaises(exception.NovaException,
|
||||||
utils.check_shadow_table, engine, table_name)
|
utils.check_shadow_table, engine, table_name)
|
||||||
|
|
||||||
|
table.drop()
|
||||||
|
shadow_table.drop()
|
||||||
|
|
||||||
def test_check_shadow_table_different_types(self):
|
def test_check_shadow_table_different_types(self):
|
||||||
table_name = 'abc'
|
table_name = 'test_check_shadow_table_different_types'
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
meta = MetaData()
|
meta = MetaData()
|
||||||
meta.bind = engine
|
meta.bind = engine
|
||||||
@ -292,8 +299,11 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
self.assertRaises(exception.NovaException,
|
self.assertRaises(exception.NovaException,
|
||||||
utils.check_shadow_table, engine, table_name)
|
utils.check_shadow_table, engine, table_name)
|
||||||
|
|
||||||
|
table.drop()
|
||||||
|
shadow_table.drop()
|
||||||
|
|
||||||
def test_check_shadow_table_with_unsupported_type(self):
|
def test_check_shadow_table_with_unsupported_type(self):
|
||||||
table_name = 'abc'
|
table_name = 'test_check_shadow_table_with_unsupported_type'
|
||||||
engine = self.engines['sqlite']
|
engine = self.engines['sqlite']
|
||||||
meta = MetaData(bind=engine)
|
meta = MetaData(bind=engine)
|
||||||
|
|
||||||
@ -309,9 +319,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
Column('c', CustomType))
|
Column('c', CustomType))
|
||||||
shadow_table.create()
|
shadow_table.create()
|
||||||
self.assertTrue(utils.check_shadow_table(engine, table_name))
|
self.assertTrue(utils.check_shadow_table(engine, table_name))
|
||||||
|
shadow_table.drop()
|
||||||
|
|
||||||
def test_create_shadow_table_by_table_instance(self):
|
def test_create_shadow_table_by_table_instance(self):
|
||||||
table_name = 'abc'
|
table_name = 'test_create_shadow_table_by_table_instance'
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
meta = MetaData()
|
meta = MetaData()
|
||||||
meta.bind = engine
|
meta.bind = engine
|
||||||
@ -320,11 +331,13 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
Column('a', Integer),
|
Column('a', Integer),
|
||||||
Column('b', String(256)))
|
Column('b', String(256)))
|
||||||
table.create()
|
table.create()
|
||||||
utils.create_shadow_table(engine, table=table)
|
shadow_table = utils.create_shadow_table(engine, table=table)
|
||||||
self.assertTrue(utils.check_shadow_table(engine, table_name))
|
self.assertTrue(utils.check_shadow_table(engine, table_name))
|
||||||
|
table.drop()
|
||||||
|
shadow_table.drop()
|
||||||
|
|
||||||
def test_create_shadow_table_by_name(self):
|
def test_create_shadow_table_by_name(self):
|
||||||
table_name = 'abc'
|
table_name = 'test_create_shadow_table_by_name'
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
meta = MetaData()
|
meta = MetaData()
|
||||||
meta.bind = engine
|
meta.bind = engine
|
||||||
@ -334,25 +347,33 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
Column('a', Integer),
|
Column('a', Integer),
|
||||||
Column('b', String(256)))
|
Column('b', String(256)))
|
||||||
table.create()
|
table.create()
|
||||||
utils.create_shadow_table(engine, table_name=table_name)
|
shadow_table = utils.create_shadow_table(engine,
|
||||||
|
table_name=table_name)
|
||||||
self.assertTrue(utils.check_shadow_table(engine, table_name))
|
self.assertTrue(utils.check_shadow_table(engine, table_name))
|
||||||
|
table.drop()
|
||||||
|
shadow_table.drop()
|
||||||
|
|
||||||
def test_create_shadow_table_not_supported_type(self):
|
def test_create_shadow_table_not_supported_type(self):
|
||||||
table_name = 'abc'
|
if 'sqlite' in self.engines:
|
||||||
engine = self.engines['sqlite']
|
table_name = 'test_create_shadow_table_not_supported_type'
|
||||||
meta = MetaData()
|
engine = self.engines['sqlite']
|
||||||
meta.bind = engine
|
meta = MetaData()
|
||||||
table = Table(table_name, meta,
|
meta.bind = engine
|
||||||
Column('id', Integer, primary_key=True),
|
table = Table(table_name, meta,
|
||||||
Column('a', CustomType))
|
Column('id', Integer, primary_key=True),
|
||||||
table.create()
|
Column('a', CustomType))
|
||||||
self.assertRaises(exception.NovaException,
|
table.create()
|
||||||
utils.create_shadow_table,
|
self.assertRaises(exception.NovaException,
|
||||||
engine, table_name=table_name)
|
utils.create_shadow_table,
|
||||||
|
engine, table_name=table_name)
|
||||||
|
|
||||||
utils.create_shadow_table(engine, table_name=table_name,
|
shadow_table = utils.create_shadow_table(engine,
|
||||||
a=Column('a', CustomType()))
|
table_name=table_name,
|
||||||
self.assertTrue(utils.check_shadow_table(engine, table_name))
|
a=Column('a', CustomType())
|
||||||
|
)
|
||||||
|
self.assertTrue(utils.check_shadow_table(engine, table_name))
|
||||||
|
table.drop()
|
||||||
|
shadow_table.drop()
|
||||||
|
|
||||||
def test_create_shadow_both_table_and_table_name_are_none(self):
|
def test_create_shadow_both_table_and_table_name_are_none(self):
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
@ -362,7 +383,8 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
utils.create_shadow_table, engine)
|
utils.create_shadow_table, engine)
|
||||||
|
|
||||||
def test_create_shadow_both_table_and_table_name_are_specified(self):
|
def test_create_shadow_both_table_and_table_name_are_specified(self):
|
||||||
table_name = 'abc'
|
table_name = ('test_create_shadow_both_table_and_table_name_are_'
|
||||||
|
'specified')
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
meta = MetaData()
|
meta = MetaData()
|
||||||
meta.bind = engine
|
meta.bind = engine
|
||||||
@ -373,9 +395,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
self.assertRaises(exception.NovaException,
|
self.assertRaises(exception.NovaException,
|
||||||
utils.create_shadow_table,
|
utils.create_shadow_table,
|
||||||
engine, table=table, table_name=table_name)
|
engine, table=table, table_name=table_name)
|
||||||
|
table.drop()
|
||||||
|
|
||||||
def test_create_duplicate_shadow_table(self):
|
def test_create_duplicate_shadow_table(self):
|
||||||
table_name = 'abc'
|
table_name = 'test_create_duplicate_shadow_table'
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
meta = MetaData()
|
meta = MetaData()
|
||||||
meta.bind = engine
|
meta.bind = engine
|
||||||
@ -383,13 +406,16 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
Column('id', Integer, primary_key=True),
|
Column('id', Integer, primary_key=True),
|
||||||
Column('a', Integer))
|
Column('a', Integer))
|
||||||
table.create()
|
table.create()
|
||||||
utils.create_shadow_table(engine, table_name=table_name)
|
shadow_table = utils.create_shadow_table(engine,
|
||||||
|
table_name=table_name)
|
||||||
self.assertRaises(exception.ShadowTableExists,
|
self.assertRaises(exception.ShadowTableExists,
|
||||||
utils.create_shadow_table,
|
utils.create_shadow_table,
|
||||||
engine, table_name=table_name)
|
engine, table_name=table_name)
|
||||||
|
table.drop()
|
||||||
|
shadow_table.drop()
|
||||||
|
|
||||||
def test_change_deleted_column_type_doesnt_drop_index(self):
|
def test_change_deleted_column_type_doesnt_drop_index(self):
|
||||||
table_name = 'abc'
|
table_name = 'test_change_deleted_column_type_doesnt_drop_index'
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
meta = MetaData(bind=engine)
|
meta = MetaData(bind=engine)
|
||||||
|
|
||||||
@ -420,9 +446,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
self.assertIn(name, indexes)
|
self.assertIn(name, indexes)
|
||||||
self.assertEqual(set(index['column_names']),
|
self.assertEqual(set(index['column_names']),
|
||||||
set(indexes[name]))
|
set(indexes[name]))
|
||||||
|
table.drop()
|
||||||
|
|
||||||
def test_change_deleted_column_type_to_id_type_integer(self):
|
def test_change_deleted_column_type_to_id_type_integer(self):
|
||||||
table_name = 'abc'
|
table_name = 'test_change_deleted_column_type_to_id_type_integer'
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
meta = MetaData()
|
meta = MetaData()
|
||||||
meta.bind = engine
|
meta.bind = engine
|
||||||
@ -434,9 +461,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
|
|
||||||
table = utils.get_table(engine, table_name)
|
table = utils.get_table(engine, table_name)
|
||||||
self.assertIsInstance(table.c.deleted.type, Integer)
|
self.assertIsInstance(table.c.deleted.type, Integer)
|
||||||
|
table.drop()
|
||||||
|
|
||||||
def test_change_deleted_column_type_to_id_type_string(self):
|
def test_change_deleted_column_type_to_id_type_string(self):
|
||||||
table_name = 'abc'
|
table_name = 'test_change_deleted_column_type_to_id_type_string'
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
meta = MetaData()
|
meta = MetaData()
|
||||||
meta.bind = engine
|
meta.bind = engine
|
||||||
@ -448,34 +476,37 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
|
|
||||||
table = utils.get_table(engine, table_name)
|
table = utils.get_table(engine, table_name)
|
||||||
self.assertIsInstance(table.c.deleted.type, String)
|
self.assertIsInstance(table.c.deleted.type, String)
|
||||||
|
table.drop()
|
||||||
|
|
||||||
def test_change_deleted_column_type_to_id_type_custom(self):
|
def test_change_deleted_column_type_to_id_type_custom(self):
|
||||||
table_name = 'abc'
|
if 'sqlite' in self.engines:
|
||||||
engine = self.engines['sqlite']
|
table_name = 'test_change_deleted_column_type_to_id_type_custom'
|
||||||
meta = MetaData()
|
engine = self.engines['sqlite']
|
||||||
meta.bind = engine
|
meta = MetaData()
|
||||||
table = Table(table_name, meta,
|
meta.bind = engine
|
||||||
Column('id', Integer, primary_key=True),
|
table = Table(table_name, meta,
|
||||||
Column('foo', CustomType),
|
Column('id', Integer, primary_key=True),
|
||||||
Column('deleted', Boolean))
|
Column('foo', CustomType),
|
||||||
table.create()
|
Column('deleted', Boolean))
|
||||||
|
table.create()
|
||||||
|
|
||||||
self.assertRaises(exception.NovaException,
|
self.assertRaises(exception.NovaException,
|
||||||
utils.change_deleted_column_type_to_id_type,
|
utils.change_deleted_column_type_to_id_type,
|
||||||
engine, table_name)
|
engine, table_name)
|
||||||
|
|
||||||
fooColumn = Column('foo', CustomType())
|
fooColumn = Column('foo', CustomType())
|
||||||
utils.change_deleted_column_type_to_id_type(engine, table_name,
|
utils.change_deleted_column_type_to_id_type(engine, table_name,
|
||||||
foo=fooColumn)
|
foo=fooColumn)
|
||||||
|
|
||||||
table = utils.get_table(engine, table_name)
|
table = utils.get_table(engine, table_name)
|
||||||
# NOTE(boris-42): There is no way to check has foo type CustomType.
|
# NOTE(boris-42): There is no way to check has foo type CustomType.
|
||||||
# but sqlalchemy will set it to NullType.
|
# but sqlalchemy will set it to NullType.
|
||||||
self.assertIsInstance(table.c.foo.type, NullType)
|
self.assertIsInstance(table.c.foo.type, NullType)
|
||||||
self.assertIsInstance(table.c.deleted.type, Integer)
|
self.assertIsInstance(table.c.deleted.type, Integer)
|
||||||
|
table.drop()
|
||||||
|
|
||||||
def test_change_deleted_column_type_to_boolean(self):
|
def test_change_deleted_column_type_to_boolean(self):
|
||||||
table_name = 'abc'
|
table_name = 'test_change_deleted_column_type_to_boolean'
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
meta = MetaData()
|
meta = MetaData()
|
||||||
meta.bind = engine
|
meta.bind = engine
|
||||||
@ -489,53 +520,63 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
|
|||||||
table = utils.get_table(engine, table_name)
|
table = utils.get_table(engine, table_name)
|
||||||
expected_type = Boolean if key != "mysql" else mysql.TINYINT
|
expected_type = Boolean if key != "mysql" else mysql.TINYINT
|
||||||
self.assertIsInstance(table.c.deleted.type, expected_type)
|
self.assertIsInstance(table.c.deleted.type, expected_type)
|
||||||
|
table.drop()
|
||||||
|
|
||||||
def test_change_deleted_column_type_to_boolean_type_custom(self):
|
def test_change_deleted_column_type_to_boolean_type_custom(self):
|
||||||
table_name = 'abc'
|
if 'sqlite' in self.engines:
|
||||||
engine = self.engines['sqlite']
|
table_name = \
|
||||||
meta = MetaData()
|
'test_change_deleted_column_type_to_boolean_type_custom'
|
||||||
meta.bind = engine
|
engine = self.engines['sqlite']
|
||||||
table = Table(table_name, meta,
|
meta = MetaData()
|
||||||
Column('id', Integer, primary_key=True),
|
meta.bind = engine
|
||||||
Column('foo', CustomType),
|
table = Table(table_name, meta,
|
||||||
Column('deleted', Integer))
|
Column('id', Integer, primary_key=True),
|
||||||
table.create()
|
Column('foo', CustomType),
|
||||||
|
Column('deleted', Integer))
|
||||||
|
table.create()
|
||||||
|
|
||||||
self.assertRaises(exception.NovaException,
|
self.assertRaises(exception.NovaException,
|
||||||
utils.change_deleted_column_type_to_boolean,
|
utils.change_deleted_column_type_to_boolean,
|
||||||
engine, table_name)
|
engine, table_name)
|
||||||
|
|
||||||
fooColumn = Column('foo', CustomType())
|
fooColumn = Column('foo', CustomType())
|
||||||
utils.change_deleted_column_type_to_boolean(engine, table_name,
|
utils.change_deleted_column_type_to_boolean(engine, table_name,
|
||||||
foo=fooColumn)
|
foo=fooColumn)
|
||||||
|
|
||||||
table = utils.get_table(engine, table_name)
|
table = utils.get_table(engine, table_name)
|
||||||
# NOTE(boris-42): There is no way to check has foo type CustomType.
|
# NOTE(boris-42): There is no way to check has foo type CustomType.
|
||||||
# but sqlalchemy will set it to NullType.
|
# but sqlalchemy will set it to NullType.
|
||||||
self.assertIsInstance(table.c.foo.type, NullType)
|
self.assertIsInstance(table.c.foo.type, NullType)
|
||||||
self.assertIsInstance(table.c.deleted.type, Boolean)
|
self.assertIsInstance(table.c.deleted.type, Boolean)
|
||||||
|
table.drop()
|
||||||
|
|
||||||
def test_drop_unique_constraint_in_sqlite_fk_recreate(self):
|
def test_drop_unique_constraint_in_sqlite_fk_recreate(self):
|
||||||
engine = self.engines['sqlite']
|
if 'sqlite' in self.engines:
|
||||||
meta = MetaData()
|
engine = self.engines['sqlite']
|
||||||
meta.bind = engine
|
meta = MetaData()
|
||||||
parent_table = Table('table0', meta,
|
meta.bind = engine
|
||||||
Column('id', Integer, primary_key=True),
|
parent_table_name = ('test_drop_unique_constraint_in_sqlite_fk_'
|
||||||
Column('foo', Integer))
|
'recreate_parent_table')
|
||||||
parent_table.create()
|
parent_table = Table(parent_table_name, meta,
|
||||||
table_name = 'table1'
|
Column('id', Integer, primary_key=True),
|
||||||
table = Table(table_name, meta,
|
Column('foo', Integer))
|
||||||
Column('id', Integer, primary_key=True),
|
parent_table.create()
|
||||||
Column('baz', Integer),
|
table_name = 'test_drop_unique_constraint_in_sqlite_fk_recreate'
|
||||||
Column('bar', Integer, ForeignKey("table0.id")),
|
table = Table(table_name, meta,
|
||||||
UniqueConstraint('baz', name='constr1'))
|
Column('id', Integer, primary_key=True),
|
||||||
table.create()
|
Column('baz', Integer),
|
||||||
utils.drop_unique_constraint(engine, table_name, 'constr1', 'baz')
|
Column('bar', Integer,
|
||||||
|
ForeignKey(parent_table_name + ".id")),
|
||||||
|
UniqueConstraint('baz', name='constr1'))
|
||||||
|
table.create()
|
||||||
|
utils.drop_unique_constraint(engine, table_name, 'constr1', 'baz')
|
||||||
|
|
||||||
insp = reflection.Inspector.from_engine(engine)
|
insp = reflection.Inspector.from_engine(engine)
|
||||||
f_keys = insp.get_foreign_keys(table_name)
|
f_keys = insp.get_foreign_keys(table_name)
|
||||||
self.assertEqual(len(f_keys), 1)
|
self.assertEqual(len(f_keys), 1)
|
||||||
f_key = f_keys[0]
|
f_key = f_keys[0]
|
||||||
self.assertEqual(f_key['referred_table'], 'table0')
|
self.assertEqual(f_key['referred_table'], parent_table_name)
|
||||||
self.assertEqual(f_key['referred_columns'], ['id'])
|
self.assertEqual(f_key['referred_columns'], ['id'])
|
||||||
self.assertEqual(f_key['constrained_columns'], ['bar'])
|
self.assertEqual(f_key['constrained_columns'], ['bar'])
|
||||||
|
table.drop()
|
||||||
|
parent_table.drop()
|
||||||
|
@ -1,9 +1,26 @@
|
|||||||
[DEFAULT]
|
[unit_tests]
|
||||||
# Set up any number of migration data stores you want, one
|
# Set up any number of databases to test concurrently.
|
||||||
# The "name" used in the test is the config variable key.
|
# The "name" used in the test is the config variable key.
|
||||||
#sqlite=sqlite:///test_migrations.db
|
|
||||||
|
# A few tests rely on one sqlite database with 'sqlite' as the key.
|
||||||
|
|
||||||
sqlite=sqlite://
|
sqlite=sqlite://
|
||||||
#mysql=mysql://root:@localhost/test_migrations
|
#sqlitefile=sqlite:///test_migrations_utils.db
|
||||||
#postgresql=postgresql://user:pass@localhost/test_migrations
|
#mysql=mysql+mysqldb://user:pass@localhost/test_migrations_utils
|
||||||
|
#postgresql=postgresql+psycopg2://user:pass@localhost/test_migrations_utils
|
||||||
|
|
||||||
|
[migration_dbs]
|
||||||
|
# Migration DB details are listed separately as they can't be connected to
|
||||||
|
# concurrently. These databases can't be the same as above
|
||||||
|
|
||||||
|
# Note, sqlite:// is in-memory and unique each time it is spawned.
|
||||||
|
# However file sqlite's are not unique.
|
||||||
|
|
||||||
|
sqlite=sqlite://
|
||||||
|
#sqlitefile=sqlite:///test_migrations.db
|
||||||
|
#mysql=mysql+mysqldb://user:pass@localhost/test_migrations
|
||||||
|
#postgresql=postgresql+psycopg2://user:pass@localhost/test_migrations
|
||||||
|
|
||||||
[walk_style]
|
[walk_style]
|
||||||
snake_walk=yes
|
snake_walk=yes
|
||||||
|
downgrade=yes
|
||||||
|
@ -38,6 +38,8 @@ sudo -u postgres psql
|
|||||||
postgres=# create user openstack_citest with createdb login password
|
postgres=# create user openstack_citest with createdb login password
|
||||||
'openstack_citest';
|
'openstack_citest';
|
||||||
postgres=# create database openstack_citest with owner openstack_citest;
|
postgres=# create database openstack_citest with owner openstack_citest;
|
||||||
|
postgres=# create database openstack_baremetal_citest with owner
|
||||||
|
openstack_citest;
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -156,7 +158,9 @@ class CommonTestsMixIn(object):
|
|||||||
"""
|
"""
|
||||||
def test_walk_versions(self):
|
def test_walk_versions(self):
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
self._walk_versions(engine, self.snake_walk)
|
# We start each walk with a completely blank slate.
|
||||||
|
self._reset_database(key)
|
||||||
|
self._walk_versions(engine, self.snake_walk, self.downgrade)
|
||||||
|
|
||||||
def test_mysql_opportunistically(self):
|
def test_mysql_opportunistically(self):
|
||||||
self._test_mysql_opportunistically()
|
self._test_mysql_opportunistically()
|
||||||
@ -184,7 +188,18 @@ class CommonTestsMixIn(object):
|
|||||||
|
|
||||||
|
|
||||||
class BaseMigrationTestCase(test.NoDBTestCase):
|
class BaseMigrationTestCase(test.NoDBTestCase):
|
||||||
"""Base class fort testing migrations and migration utils."""
|
"""Base class for testing migrations and migration utils. This sets up
|
||||||
|
and configures the databases to run tests against.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# NOTE(jhesketh): It is expected that tests clean up after themselves.
|
||||||
|
# This is necessary for concurrency to allow multiple tests to work on
|
||||||
|
# one database.
|
||||||
|
# The full migration walk tests however do call the old _reset_databases()
|
||||||
|
# to throw away whatever was there so they need to operate on their own
|
||||||
|
# database that we know isn't accessed concurrently.
|
||||||
|
# Hence, BaseWalkMigrationTestCase overwrites the engine list.
|
||||||
|
|
||||||
USER = None
|
USER = None
|
||||||
PASSWD = None
|
PASSWD = None
|
||||||
DATABASE = None
|
DATABASE = None
|
||||||
@ -204,13 +219,16 @@ class BaseMigrationTestCase(test.NoDBTestCase):
|
|||||||
self.INIT_VERSION = 0
|
self.INIT_VERSION = 0
|
||||||
|
|
||||||
self.snake_walk = False
|
self.snake_walk = False
|
||||||
|
self.downgrade = False
|
||||||
self.test_databases = {}
|
self.test_databases = {}
|
||||||
self.migration = None
|
self.migration = None
|
||||||
self.migration_api = None
|
self.migration_api = None
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(BaseMigrationTestCase, self).setUp()
|
super(BaseMigrationTestCase, self).setUp()
|
||||||
|
self._load_config()
|
||||||
|
|
||||||
|
def _load_config(self):
|
||||||
# Load test databases from the config file. Only do this
|
# Load test databases from the config file. Only do this
|
||||||
# once. No need to re-run this on each test...
|
# once. No need to re-run this on each test...
|
||||||
LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH)
|
LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH)
|
||||||
@ -218,10 +236,12 @@ class BaseMigrationTestCase(test.NoDBTestCase):
|
|||||||
cp = ConfigParser.RawConfigParser()
|
cp = ConfigParser.RawConfigParser()
|
||||||
try:
|
try:
|
||||||
cp.read(self.CONFIG_FILE_PATH)
|
cp.read(self.CONFIG_FILE_PATH)
|
||||||
defaults = cp.defaults()
|
config = cp.options('unit_tests')
|
||||||
for key, value in defaults.items():
|
for key in config:
|
||||||
self.test_databases[key] = value
|
self.test_databases[key] = cp.get('unit_tests', key)
|
||||||
self.snake_walk = cp.getboolean('walk_style', 'snake_walk')
|
self.snake_walk = cp.getboolean('walk_style', 'snake_walk')
|
||||||
|
self.downgrade = cp.getboolean('walk_style', 'downgrade')
|
||||||
|
|
||||||
except ConfigParser.ParsingError as e:
|
except ConfigParser.ParsingError as e:
|
||||||
self.fail("Failed to read test_migrations.conf config "
|
self.fail("Failed to read test_migrations.conf config "
|
||||||
"file. Got error: %s" % e)
|
"file. Got error: %s" % e)
|
||||||
@ -233,15 +253,9 @@ class BaseMigrationTestCase(test.NoDBTestCase):
|
|||||||
for key, value in self.test_databases.items():
|
for key, value in self.test_databases.items():
|
||||||
self.engines[key] = sqlalchemy.create_engine(value)
|
self.engines[key] = sqlalchemy.create_engine(value)
|
||||||
|
|
||||||
# We start each test case with a completely blank slate.
|
# NOTE(jhesketh): We only need to make sure the databases are created
|
||||||
self._reset_databases()
|
# not necessarily clean of tables.
|
||||||
|
self._create_databases()
|
||||||
def tearDown(self):
|
|
||||||
# We destroy the test data store between each test case,
|
|
||||||
# and recreate it, which ensures that we have no side-effects
|
|
||||||
# from the tests
|
|
||||||
self._reset_databases()
|
|
||||||
super(BaseMigrationTestCase, self).tearDown()
|
|
||||||
|
|
||||||
def execute_cmd(self, cmd=None):
|
def execute_cmd(self, cmd=None):
|
||||||
status, output = commands.getstatusoutput(cmd)
|
status, output = commands.getstatusoutput(cmd)
|
||||||
@ -273,34 +287,123 @@ class BaseMigrationTestCase(test.NoDBTestCase):
|
|||||||
os.unsetenv('PGPASSWORD')
|
os.unsetenv('PGPASSWORD')
|
||||||
os.unsetenv('PGUSER')
|
os.unsetenv('PGUSER')
|
||||||
|
|
||||||
def _reset_databases(self):
|
@utils.synchronized('mysql', external=True)
|
||||||
|
def _reset_mysql(self, conn_pieces):
|
||||||
|
# We can execute the MySQL client to destroy and re-create
|
||||||
|
# the MYSQL database, which is easier and less error-prone
|
||||||
|
# than using SQLAlchemy to do this via MetaData...trust me.
|
||||||
|
(user, password, database, host) = \
|
||||||
|
get_mysql_connection_info(conn_pieces)
|
||||||
|
sql = ("drop database if exists %(database)s; "
|
||||||
|
"create database %(database)s;" % {'database': database})
|
||||||
|
cmd = ("mysql -u \"%(user)s\" %(password)s -h %(host)s "
|
||||||
|
"-e \"%(sql)s\"" % {'user': user, 'password': password,
|
||||||
|
'host': host, 'sql': sql})
|
||||||
|
self.execute_cmd(cmd)
|
||||||
|
|
||||||
|
@utils.synchronized('sqlite', external=True)
|
||||||
|
def _reset_sqlite(self, conn_pieces):
|
||||||
|
# We can just delete the SQLite database, which is
|
||||||
|
# the easiest and cleanest solution
|
||||||
|
db_path = conn_pieces.path.strip('/')
|
||||||
|
if os.path.exists(db_path):
|
||||||
|
os.unlink(db_path)
|
||||||
|
# No need to recreate the SQLite DB. SQLite will
|
||||||
|
# create it for us if it's not there...
|
||||||
|
|
||||||
|
def _create_databases(self):
|
||||||
|
"""Create all configured databases as needed."""
|
||||||
for key, engine in self.engines.items():
|
for key, engine in self.engines.items():
|
||||||
conn_string = self.test_databases[key]
|
self._create_database(key)
|
||||||
conn_pieces = urlparse.urlparse(conn_string)
|
|
||||||
engine.dispose()
|
def _create_database(self, key):
|
||||||
if conn_string.startswith('sqlite'):
|
"""Create database if it doesn't exist."""
|
||||||
# We can just delete the SQLite database, which is
|
conn_string = self.test_databases[key]
|
||||||
# the easiest and cleanest solution
|
conn_pieces = urlparse.urlparse(conn_string)
|
||||||
db_path = conn_pieces.path.strip('/')
|
|
||||||
if os.path.exists(db_path):
|
if conn_string.startswith('mysql'):
|
||||||
os.unlink(db_path)
|
(user, password, database, host) = \
|
||||||
# No need to recreate the SQLite DB. SQLite will
|
get_mysql_connection_info(conn_pieces)
|
||||||
# create it for us if it's not there...
|
sql = "create database if not exists %s;" % database
|
||||||
elif conn_string.startswith('mysql'):
|
cmd = ("mysql -u \"%(user)s\" %(password)s -h %(host)s "
|
||||||
# We can execute the MySQL client to destroy and re-create
|
"-e \"%(sql)s\"" % {'user': user, 'password': password,
|
||||||
# the MYSQL database, which is easier and less error-prone
|
'host': host, 'sql': sql})
|
||||||
# than using SQLAlchemy to do this via MetaData...trust me.
|
self.execute_cmd(cmd)
|
||||||
(user, password, database, host) = \
|
elif conn_string.startswith('postgresql'):
|
||||||
get_mysql_connection_info(conn_pieces)
|
(user, password, database, host) = \
|
||||||
sql = ("drop database if exists %(database)s; "
|
get_pgsql_connection_info(conn_pieces)
|
||||||
"create database %(database)s;"
|
os.environ['PGPASSWORD'] = password
|
||||||
% {'database': database})
|
os.environ['PGUSER'] = user
|
||||||
cmd = ("mysql -u \"%(user)s\" %(password)s -h %(host)s "
|
|
||||||
"-e \"%(sql)s\"" % {'user': user,
|
sqlcmd = ("psql -w -U %(user)s -h %(host)s -c"
|
||||||
'password': password, 'host': host, 'sql': sql})
|
" '%(sql)s' -d template1")
|
||||||
self.execute_cmd(cmd)
|
|
||||||
elif conn_string.startswith('postgresql'):
|
sql = ("create database if not exists %s;") % database
|
||||||
self._reset_pg(conn_pieces)
|
createtable = sqlcmd % {'user': user, 'host': host, 'sql': sql}
|
||||||
|
status, output = commands.getstatusoutput(createtable)
|
||||||
|
if status != 0 and status != 256:
|
||||||
|
# 0 means databases is created
|
||||||
|
# 256 means it already exists (which is fine)
|
||||||
|
# otherwise raise an error
|
||||||
|
self.fail("Failed to run: %s\n%s" % (createtable, output))
|
||||||
|
|
||||||
|
os.unsetenv('PGPASSWORD')
|
||||||
|
os.unsetenv('PGUSER')
|
||||||
|
|
||||||
|
def _reset_databases(self):
|
||||||
|
"""Reset all configured databases."""
|
||||||
|
for key, engine in self.engines.items():
|
||||||
|
self._reset_database(key)
|
||||||
|
|
||||||
|
def _reset_database(self, key):
|
||||||
|
"""Reset specific database."""
|
||||||
|
engine = self.engines[key]
|
||||||
|
conn_string = self.test_databases[key]
|
||||||
|
conn_pieces = urlparse.urlparse(conn_string)
|
||||||
|
engine.dispose()
|
||||||
|
if conn_string.startswith('sqlite'):
|
||||||
|
self._reset_sqlite(conn_pieces)
|
||||||
|
elif conn_string.startswith('mysql'):
|
||||||
|
self._reset_mysql(conn_pieces)
|
||||||
|
elif conn_string.startswith('postgresql'):
|
||||||
|
self._reset_pg(conn_pieces)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseWalkMigrationTestCase(BaseMigrationTestCase):
|
||||||
|
"""BaseWalkMigrationTestCase loads in an alternative set of databases for
|
||||||
|
testing against. This is necessary as the default databases can run tests
|
||||||
|
concurrently without interfering with itself. It is expected that
|
||||||
|
databases listed under [migraiton_dbs] in the configuration are only being
|
||||||
|
accessed by one test at a time. Currently only test_walk_versions accesses
|
||||||
|
the databases (and is the only method that calls _reset_database() which
|
||||||
|
is clearly problematic for concurrency).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _load_config(self):
|
||||||
|
# Load test databases from the config file. Only do this
|
||||||
|
# once. No need to re-run this on each test...
|
||||||
|
LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH)
|
||||||
|
if os.path.exists(self.CONFIG_FILE_PATH):
|
||||||
|
cp = ConfigParser.RawConfigParser()
|
||||||
|
try:
|
||||||
|
cp.read(self.CONFIG_FILE_PATH)
|
||||||
|
config = cp.options('migration_dbs')
|
||||||
|
for key in config:
|
||||||
|
self.test_databases[key] = cp.get('migration_dbs', key)
|
||||||
|
self.snake_walk = cp.getboolean('walk_style', 'snake_walk')
|
||||||
|
self.downgrade = cp.getboolean('walk_style', 'downgrade')
|
||||||
|
except ConfigParser.ParsingError as e:
|
||||||
|
self.fail("Failed to read test_migrations.conf config "
|
||||||
|
"file. Got error: %s" % e)
|
||||||
|
else:
|
||||||
|
self.fail("Failed to find test_migrations.conf config "
|
||||||
|
"file.")
|
||||||
|
|
||||||
|
self.engines = {}
|
||||||
|
for key, value in self.test_databases.items():
|
||||||
|
self.engines[key] = sqlalchemy.create_engine(value)
|
||||||
|
|
||||||
|
self._create_databases()
|
||||||
|
|
||||||
def _test_mysql_opportunistically(self):
|
def _test_mysql_opportunistically(self):
|
||||||
# Test that table creation on mysql only builds InnoDB tables
|
# Test that table creation on mysql only builds InnoDB tables
|
||||||
@ -317,8 +420,8 @@ class BaseMigrationTestCase(test.NoDBTestCase):
|
|||||||
self.test_databases[database] = connect_string
|
self.test_databases[database] = connect_string
|
||||||
|
|
||||||
# build a fully populated mysql database with all the tables
|
# build a fully populated mysql database with all the tables
|
||||||
self._reset_databases()
|
self._reset_database(database)
|
||||||
self._walk_versions(engine, False, False)
|
self._walk_versions(engine, self.snake_walk, self.downgrade)
|
||||||
|
|
||||||
connection = engine.connect()
|
connection = engine.connect()
|
||||||
# sanity check
|
# sanity check
|
||||||
@ -338,6 +441,9 @@ class BaseMigrationTestCase(test.NoDBTestCase):
|
|||||||
self.assertEqual(count, 0, "%d non InnoDB tables created" % count)
|
self.assertEqual(count, 0, "%d non InnoDB tables created" % count)
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
|
del(self.engines[database])
|
||||||
|
del(self.test_databases[database])
|
||||||
|
|
||||||
def _test_postgresql_opportunistically(self):
|
def _test_postgresql_opportunistically(self):
|
||||||
# Test postgresql database migration walk
|
# Test postgresql database migration walk
|
||||||
if not _have_postgresql(self.USER, self.PASSWD, self.DATABASE):
|
if not _have_postgresql(self.USER, self.PASSWD, self.DATABASE):
|
||||||
@ -353,8 +459,10 @@ class BaseMigrationTestCase(test.NoDBTestCase):
|
|||||||
self.test_databases[database] = connect_string
|
self.test_databases[database] = connect_string
|
||||||
|
|
||||||
# build a fully populated postgresql database with all the tables
|
# build a fully populated postgresql database with all the tables
|
||||||
self._reset_databases()
|
self._reset_database(database)
|
||||||
self._walk_versions(engine, False, False)
|
self._walk_versions(engine, self.snake_walk, self.downgrade)
|
||||||
|
del(self.engines[database])
|
||||||
|
del(self.test_databases[database])
|
||||||
|
|
||||||
def _walk_versions(self, engine=None, snake_walk=False, downgrade=True):
|
def _walk_versions(self, engine=None, snake_walk=False, downgrade=True):
|
||||||
# Determine latest version script from the repo, then
|
# Determine latest version script from the repo, then
|
||||||
@ -447,7 +555,7 @@ class BaseMigrationTestCase(test.NoDBTestCase):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
class TestNovaMigrations(BaseMigrationTestCase, CommonTestsMixIn):
|
class TestNovaMigrations(BaseWalkMigrationTestCase, CommonTestsMixIn):
|
||||||
"""Test sqlalchemy-migrate migrations."""
|
"""Test sqlalchemy-migrate migrations."""
|
||||||
USER = "openstack_citest"
|
USER = "openstack_citest"
|
||||||
PASSWD = "openstack_citest"
|
PASSWD = "openstack_citest"
|
||||||
@ -2607,6 +2715,30 @@ class TestNovaMigrations(BaseMigrationTestCase, CommonTestsMixIn):
|
|||||||
for index in indexes:
|
for index in indexes:
|
||||||
self.assertNotIn(index, current_indexes)
|
self.assertNotIn(index, current_indexes)
|
||||||
|
|
||||||
|
# Check indexes are gone
|
||||||
|
if engine.name == 'mysql' or engine.name == 'postgresql':
|
||||||
|
data = {
|
||||||
|
# table_name: ((idx_1, (c1, c2,)), (idx2, (c1, c2,)), ...)
|
||||||
|
'quota_usages': (
|
||||||
|
('ix_quota_usages_user_id_deleted',
|
||||||
|
('user_id', 'deleted')),
|
||||||
|
),
|
||||||
|
'reservations': (
|
||||||
|
('ix_reservations_user_id_deleted',
|
||||||
|
('user_id', 'deleted')),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
meta = sqlalchemy.MetaData()
|
||||||
|
meta.bind = engine
|
||||||
|
|
||||||
|
for table_name, indexes in data.iteritems():
|
||||||
|
table = sqlalchemy.Table(table_name, meta, autoload=True)
|
||||||
|
current_indexes = [(i.name, tuple(i.columns.keys()))
|
||||||
|
for i in table.indexes]
|
||||||
|
for index in indexes:
|
||||||
|
self.assertNotIn(index, current_indexes)
|
||||||
|
|
||||||
def _check_204(self, engine, data):
|
def _check_204(self, engine, data):
|
||||||
if engine.name != 'sqlite':
|
if engine.name != 'sqlite':
|
||||||
return
|
return
|
||||||
@ -3049,18 +3181,18 @@ class TestNovaMigrations(BaseMigrationTestCase, CommonTestsMixIn):
|
|||||||
"local_gb_used": 1, "deleted": 0,
|
"local_gb_used": 1, "deleted": 0,
|
||||||
"hypervisor_type": "fake_type",
|
"hypervisor_type": "fake_type",
|
||||||
"hypervisor_version": 1,
|
"hypervisor_version": 1,
|
||||||
"service_id": 1, "id": 1},
|
"service_id": 1, "id": 10001},
|
||||||
{"vcpus": 1, "cpu_info": "info",
|
{"vcpus": 1, "cpu_info": "info",
|
||||||
"memory_mb": 1, "local_gb": 1,
|
"memory_mb": 1, "local_gb": 1,
|
||||||
"vcpus_used": 1, "memory_mb_used": 1,
|
"vcpus_used": 1, "memory_mb_used": 1,
|
||||||
"local_gb_used": 1, "deleted": 2,
|
"local_gb_used": 1, "deleted": 2,
|
||||||
"hypervisor_type": "fake_type",
|
"hypervisor_type": "fake_type",
|
||||||
"hypervisor_version": 1,
|
"hypervisor_version": 1,
|
||||||
"service_id": 1, "id": 2}],
|
"service_id": 1, "id": 10002}],
|
||||||
"compute_node_stats": [{"id": 10, "compute_node_id": 1,
|
"compute_node_stats": [{"id": 10, "compute_node_id": 10001,
|
||||||
"key": "fake-1",
|
"key": "fake-1",
|
||||||
"deleted": 0},
|
"deleted": 0},
|
||||||
{"id": 20, "compute_node_id": 2,
|
{"id": 20, "compute_node_id": 10002,
|
||||||
"key": "fake-2",
|
"key": "fake-2",
|
||||||
"deleted": 0}]}
|
"deleted": 0}]}
|
||||||
return ret
|
return ret
|
||||||
@ -3209,7 +3341,7 @@ class TestNovaMigrations(BaseMigrationTestCase, CommonTestsMixIn):
|
|||||||
self.assertEqual(quota['resource'], 'injected_file_content_bytes')
|
self.assertEqual(quota['resource'], 'injected_file_content_bytes')
|
||||||
|
|
||||||
|
|
||||||
class TestBaremetalMigrations(BaseMigrationTestCase, CommonTestsMixIn):
|
class TestBaremetalMigrations(BaseWalkMigrationTestCase, CommonTestsMixIn):
|
||||||
"""Test sqlalchemy-migrate migrations."""
|
"""Test sqlalchemy-migrate migrations."""
|
||||||
USER = "openstack_citest"
|
USER = "openstack_citest"
|
||||||
PASSWD = "openstack_citest"
|
PASSWD = "openstack_citest"
|
||||||
|
@ -1,9 +1,24 @@
|
|||||||
[DEFAULT]
|
[unit_tests]
|
||||||
# Set up any number of migration data stores you want, one
|
# Set up any number of databases to test concurrently.
|
||||||
# The "name" used in the test is the config variable key.
|
# The "name" used in the test is the config variable key.
|
||||||
#sqlite=sqlite:///test_migrations.db
|
|
||||||
sqlite=sqlite://
|
sqlite=sqlite://
|
||||||
#mysql=mysql://root:@localhost/test_migrations
|
#sqlitefile=sqlite:///test_baremetal_migrations_utils.db
|
||||||
#postgresql=postgresql://user:pass@localhost/test_migrations
|
#mysql=mysql+mysqldb://user:pass@localhost/test_baremetal_migrations_utils
|
||||||
|
#postgresql=postgresql+psycopg2://user:pass@localhost/test_migrations_utils
|
||||||
|
|
||||||
|
[migration_dbs]
|
||||||
|
# Migration DB details are listed separately as they can't be connected to
|
||||||
|
# concurrently. These databases can't be the same as above
|
||||||
|
|
||||||
|
# Note, sqlite:// is in-memory and unique each time it is spawned.
|
||||||
|
# However file sqlite's are not unique.
|
||||||
|
|
||||||
|
sqlite=sqlite://
|
||||||
|
#sqlitefile=sqlite:///test_baremetal_migrations.db
|
||||||
|
#mysql=mysql+mysqldb://user:pass@localhost/test_baremetal_migrations
|
||||||
|
#postgresql=postgresql+psycopg2://user:pass@localhost/test_baremetal_migrations
|
||||||
|
|
||||||
[walk_style]
|
[walk_style]
|
||||||
snake_walk=yes
|
snake_walk=yes
|
||||||
|
downgrade=yes
|
||||||
|
Loading…
Reference in New Issue
Block a user