diff --git a/.testr.conf b/.testr.conf index 4bc514d62b49..ea6f49fe83ef 100644 --- a/.testr.conf +++ b/.testr.conf @@ -1,7 +1,7 @@ [DEFAULT] test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \ OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \ - OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \ + OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-160} \ ${PYTHON:-python} -m subunit.run discover -t ./ ./nova/tests $LISTOPT $IDOPTION test_id_option=--load-list $IDFILE diff --git a/nova/db/sqlalchemy/utils.py b/nova/db/sqlalchemy/utils.py index 81e37c119d67..f99ba296e6c6 100644 --- a/nova/db/sqlalchemy/utils.py +++ b/nova/db/sqlalchemy/utils.py @@ -289,6 +289,8 @@ def create_shadow_table(migrate_engine, table_name=None, table=None, column_instance is instance of Column. These params are required only for columns that have unsupported types by sqlite. For example BigInteger. + + :returns: The created shadow_table object. """ meta = MetaData(bind=migrate_engine) @@ -316,6 +318,7 @@ def create_shadow_table(migrate_engine, table_name=None, table=None, mysql_engine='InnoDB') try: shadow_table.create() + return shadow_table except (OperationalError, ProgrammingError): LOG.info(repr(shadow_table)) LOG.exception(_('Exception while creating table.')) diff --git a/nova/tests/db/test_migration_utils.py b/nova/tests/db/test_migration_utils.py index 42a4fd782870..8bea915fa842 100644 --- a/nova/tests/db/test_migration_utils.py +++ b/nova/tests/db/test_migration_utils.py @@ -42,7 +42,7 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): """Class for testing utils that are used in db migrations.""" def test_utils_drop_unique_constraint(self): - table_name = "__test_tmp_table__" + table_name = "test_utils_drop_unique_constraint" uc_name = 'uniq_foo' values = [ {'id': 1, 'a': 3, 'foo': 10}, @@ -84,47 +84,49 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): test_table.drop() def test_util_drop_unique_constraint_with_not_supported_sqlite_type(self): + if 'sqlite' in self.engines: + engine = self.engines['sqlite'] + meta = MetaData(bind=engine) - table_name = "__test_tmp_table__" - uc_name = 'uniq_foo' - values = [ - {'id': 1, 'a': 3, 'foo': 10}, - {'id': 2, 'a': 2, 'foo': 20}, - {'id': 3, 'a': 1, 'foo': 30} - ] + table_name = ("test_util_drop_unique_constraint_with_not_supported" + "_sqlite_type") + uc_name = 'uniq_foo' + values = [ + {'id': 1, 'a': 3, 'foo': 10}, + {'id': 2, 'a': 2, 'foo': 20}, + {'id': 3, 'a': 1, 'foo': 30} + ] - engine = self.engines['sqlite'] - meta = MetaData(bind=engine) + test_table = Table(table_name, meta, + Column('id', Integer, primary_key=True, + nullable=False), + Column('a', Integer), + Column('foo', CustomType, default=0), + UniqueConstraint('a', name='uniq_a'), + UniqueConstraint('foo', name=uc_name)) + test_table.create() - test_table = Table(table_name, meta, - Column('id', Integer, primary_key=True, - nullable=False), - Column('a', Integer), - Column('foo', CustomType, default=0), - UniqueConstraint('a', name='uniq_a'), - UniqueConstraint('foo', name=uc_name)) - test_table.create() + engine.execute(test_table.insert(), values) + warnings.simplefilter("ignore", SAWarning) + # NOTE(boris-42): Missing info about column `foo` that has + # unsupported type CustomType. + self.assertRaises(exception.NovaException, + utils.drop_unique_constraint, + engine, table_name, uc_name, 'foo') - engine.execute(test_table.insert(), values) - warnings.simplefilter("ignore", SAWarning) - # NOTE(boris-42): Missing info about column `foo` that has - # unsupported type CustomType. - self.assertRaises(exception.NovaException, - utils.drop_unique_constraint, - engine, table_name, uc_name, 'foo') + # NOTE(boris-42): Wrong type of foo instance. it should be + # instance of sqlalchemy.Column. + self.assertRaises(exception.NovaException, + utils.drop_unique_constraint, + engine, table_name, uc_name, 'foo', + foo=Integer()) - # NOTE(boris-42): Wrong type of foo instance. it should be - # instance of sqlalchemy.Column. - self.assertRaises(exception.NovaException, - utils.drop_unique_constraint, - engine, table_name, uc_name, 'foo', foo=Integer()) + foo = Column('foo', CustomType, default=0) + utils.drop_unique_constraint(engine, table_name, uc_name, 'foo', + foo=foo) - foo = Column('foo', CustomType, default=0) - utils.drop_unique_constraint(engine, table_name, uc_name, 'foo', - foo=foo) - - s = test_table.select().order_by(test_table.c.id) - rows = engine.execute(s).fetchall() + s = test_table.select().order_by(test_table.c.id) + rows = engine.execute(s).fetchall() for i in xrange(0, len(values)): v = values[i] @@ -168,7 +170,7 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): return test_table, values def test_drop_old_duplicate_entries_from_table(self): - table_name = "__test_tmp_table__" + table_name = "test_drop_old_duplicate_entries_from_table" for key, engine in self.engines.items(): meta = MetaData() @@ -195,9 +197,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): self.assertEqual(len(real_ids), len(expected_ids)) for id_ in expected_ids: self.assertIn(id_, real_ids) + test_table.drop() def test_drop_old_duplicate_entries_from_table_soft_delete(self): - table_name = "__test_tmp_table__" + table_name = "test_drop_old_duplicate_entries_from_table_soft_delete" for key, engine in self.engines.items(): meta = MetaData() @@ -237,9 +240,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): len(values) - len(row_ids)) for value in soft_deleted_values: self.assertIn(value['id'], deleted_rows_ids) + table.drop() def test_check_shadow_table(self): - table_name = 'abc' + table_name = 'test_check_shadow_table' for key, engine in self.engines.items(): meta = MetaData() meta.bind = engine @@ -274,8 +278,11 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): self.assertRaises(exception.NovaException, utils.check_shadow_table, engine, table_name) + table.drop() + shadow_table.drop() + def test_check_shadow_table_different_types(self): - table_name = 'abc' + table_name = 'test_check_shadow_table_different_types' for key, engine in self.engines.items(): meta = MetaData() meta.bind = engine @@ -292,8 +299,11 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): self.assertRaises(exception.NovaException, utils.check_shadow_table, engine, table_name) + table.drop() + shadow_table.drop() + def test_check_shadow_table_with_unsupported_type(self): - table_name = 'abc' + table_name = 'test_check_shadow_table_with_unsupported_type' engine = self.engines['sqlite'] meta = MetaData(bind=engine) @@ -309,9 +319,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): Column('c', CustomType)) shadow_table.create() self.assertTrue(utils.check_shadow_table(engine, table_name)) + shadow_table.drop() def test_create_shadow_table_by_table_instance(self): - table_name = 'abc' + table_name = 'test_create_shadow_table_by_table_instance' for key, engine in self.engines.items(): meta = MetaData() meta.bind = engine @@ -320,11 +331,13 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): Column('a', Integer), Column('b', String(256))) table.create() - utils.create_shadow_table(engine, table=table) + shadow_table = utils.create_shadow_table(engine, table=table) self.assertTrue(utils.check_shadow_table(engine, table_name)) + table.drop() + shadow_table.drop() def test_create_shadow_table_by_name(self): - table_name = 'abc' + table_name = 'test_create_shadow_table_by_name' for key, engine in self.engines.items(): meta = MetaData() meta.bind = engine @@ -334,25 +347,33 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): Column('a', Integer), Column('b', String(256))) table.create() - utils.create_shadow_table(engine, table_name=table_name) + shadow_table = utils.create_shadow_table(engine, + table_name=table_name) self.assertTrue(utils.check_shadow_table(engine, table_name)) + table.drop() + shadow_table.drop() def test_create_shadow_table_not_supported_type(self): - table_name = 'abc' - engine = self.engines['sqlite'] - meta = MetaData() - meta.bind = engine - table = Table(table_name, meta, - Column('id', Integer, primary_key=True), - Column('a', CustomType)) - table.create() - self.assertRaises(exception.NovaException, - utils.create_shadow_table, - engine, table_name=table_name) + if 'sqlite' in self.engines: + table_name = 'test_create_shadow_table_not_supported_type' + engine = self.engines['sqlite'] + meta = MetaData() + meta.bind = engine + table = Table(table_name, meta, + Column('id', Integer, primary_key=True), + Column('a', CustomType)) + table.create() + self.assertRaises(exception.NovaException, + utils.create_shadow_table, + engine, table_name=table_name) - utils.create_shadow_table(engine, table_name=table_name, - a=Column('a', CustomType())) - self.assertTrue(utils.check_shadow_table(engine, table_name)) + shadow_table = utils.create_shadow_table(engine, + table_name=table_name, + a=Column('a', CustomType()) + ) + self.assertTrue(utils.check_shadow_table(engine, table_name)) + table.drop() + shadow_table.drop() def test_create_shadow_both_table_and_table_name_are_none(self): for key, engine in self.engines.items(): @@ -362,7 +383,8 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): utils.create_shadow_table, engine) def test_create_shadow_both_table_and_table_name_are_specified(self): - table_name = 'abc' + table_name = ('test_create_shadow_both_table_and_table_name_are_' + 'specified') for key, engine in self.engines.items(): meta = MetaData() meta.bind = engine @@ -373,9 +395,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): self.assertRaises(exception.NovaException, utils.create_shadow_table, engine, table=table, table_name=table_name) + table.drop() def test_create_duplicate_shadow_table(self): - table_name = 'abc' + table_name = 'test_create_duplicate_shadow_table' for key, engine in self.engines.items(): meta = MetaData() meta.bind = engine @@ -383,13 +406,16 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): Column('id', Integer, primary_key=True), Column('a', Integer)) table.create() - utils.create_shadow_table(engine, table_name=table_name) + shadow_table = utils.create_shadow_table(engine, + table_name=table_name) self.assertRaises(exception.ShadowTableExists, utils.create_shadow_table, engine, table_name=table_name) + table.drop() + shadow_table.drop() def test_change_deleted_column_type_doesnt_drop_index(self): - table_name = 'abc' + table_name = 'test_change_deleted_column_type_doesnt_drop_index' for key, engine in self.engines.items(): meta = MetaData(bind=engine) @@ -420,9 +446,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): self.assertIn(name, indexes) self.assertEqual(set(index['column_names']), set(indexes[name])) + table.drop() def test_change_deleted_column_type_to_id_type_integer(self): - table_name = 'abc' + table_name = 'test_change_deleted_column_type_to_id_type_integer' for key, engine in self.engines.items(): meta = MetaData() meta.bind = engine @@ -434,9 +461,10 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): table = utils.get_table(engine, table_name) self.assertIsInstance(table.c.deleted.type, Integer) + table.drop() def test_change_deleted_column_type_to_id_type_string(self): - table_name = 'abc' + table_name = 'test_change_deleted_column_type_to_id_type_string' for key, engine in self.engines.items(): meta = MetaData() meta.bind = engine @@ -448,34 +476,37 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): table = utils.get_table(engine, table_name) self.assertIsInstance(table.c.deleted.type, String) + table.drop() def test_change_deleted_column_type_to_id_type_custom(self): - table_name = 'abc' - engine = self.engines['sqlite'] - meta = MetaData() - meta.bind = engine - table = Table(table_name, meta, - Column('id', Integer, primary_key=True), - Column('foo', CustomType), - Column('deleted', Boolean)) - table.create() + if 'sqlite' in self.engines: + table_name = 'test_change_deleted_column_type_to_id_type_custom' + engine = self.engines['sqlite'] + meta = MetaData() + meta.bind = engine + table = Table(table_name, meta, + Column('id', Integer, primary_key=True), + Column('foo', CustomType), + Column('deleted', Boolean)) + table.create() - self.assertRaises(exception.NovaException, - utils.change_deleted_column_type_to_id_type, - engine, table_name) + self.assertRaises(exception.NovaException, + utils.change_deleted_column_type_to_id_type, + engine, table_name) - fooColumn = Column('foo', CustomType()) - utils.change_deleted_column_type_to_id_type(engine, table_name, - foo=fooColumn) + fooColumn = Column('foo', CustomType()) + utils.change_deleted_column_type_to_id_type(engine, table_name, + foo=fooColumn) - table = utils.get_table(engine, table_name) - # NOTE(boris-42): There is no way to check has foo type CustomType. - # but sqlalchemy will set it to NullType. - self.assertIsInstance(table.c.foo.type, NullType) - self.assertIsInstance(table.c.deleted.type, Integer) + table = utils.get_table(engine, table_name) + # NOTE(boris-42): There is no way to check has foo type CustomType. + # but sqlalchemy will set it to NullType. + self.assertIsInstance(table.c.foo.type, NullType) + self.assertIsInstance(table.c.deleted.type, Integer) + table.drop() def test_change_deleted_column_type_to_boolean(self): - table_name = 'abc' + table_name = 'test_change_deleted_column_type_to_boolean' for key, engine in self.engines.items(): meta = MetaData() meta.bind = engine @@ -489,53 +520,63 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase): table = utils.get_table(engine, table_name) expected_type = Boolean if key != "mysql" else mysql.TINYINT self.assertIsInstance(table.c.deleted.type, expected_type) + table.drop() def test_change_deleted_column_type_to_boolean_type_custom(self): - table_name = 'abc' - engine = self.engines['sqlite'] - meta = MetaData() - meta.bind = engine - table = Table(table_name, meta, - Column('id', Integer, primary_key=True), - Column('foo', CustomType), - Column('deleted', Integer)) - table.create() + if 'sqlite' in self.engines: + table_name = \ + 'test_change_deleted_column_type_to_boolean_type_custom' + engine = self.engines['sqlite'] + meta = MetaData() + meta.bind = engine + table = Table(table_name, meta, + Column('id', Integer, primary_key=True), + Column('foo', CustomType), + Column('deleted', Integer)) + table.create() - self.assertRaises(exception.NovaException, - utils.change_deleted_column_type_to_boolean, - engine, table_name) + self.assertRaises(exception.NovaException, + utils.change_deleted_column_type_to_boolean, + engine, table_name) - fooColumn = Column('foo', CustomType()) - utils.change_deleted_column_type_to_boolean(engine, table_name, - foo=fooColumn) + fooColumn = Column('foo', CustomType()) + utils.change_deleted_column_type_to_boolean(engine, table_name, + foo=fooColumn) - table = utils.get_table(engine, table_name) - # NOTE(boris-42): There is no way to check has foo type CustomType. - # but sqlalchemy will set it to NullType. - self.assertIsInstance(table.c.foo.type, NullType) - self.assertIsInstance(table.c.deleted.type, Boolean) + table = utils.get_table(engine, table_name) + # NOTE(boris-42): There is no way to check has foo type CustomType. + # but sqlalchemy will set it to NullType. + self.assertIsInstance(table.c.foo.type, NullType) + self.assertIsInstance(table.c.deleted.type, Boolean) + table.drop() def test_drop_unique_constraint_in_sqlite_fk_recreate(self): - engine = self.engines['sqlite'] - meta = MetaData() - meta.bind = engine - parent_table = Table('table0', meta, - Column('id', Integer, primary_key=True), - Column('foo', Integer)) - parent_table.create() - table_name = 'table1' - table = Table(table_name, meta, - Column('id', Integer, primary_key=True), - Column('baz', Integer), - Column('bar', Integer, ForeignKey("table0.id")), - UniqueConstraint('baz', name='constr1')) - table.create() - utils.drop_unique_constraint(engine, table_name, 'constr1', 'baz') + if 'sqlite' in self.engines: + engine = self.engines['sqlite'] + meta = MetaData() + meta.bind = engine + parent_table_name = ('test_drop_unique_constraint_in_sqlite_fk_' + 'recreate_parent_table') + parent_table = Table(parent_table_name, meta, + Column('id', Integer, primary_key=True), + Column('foo', Integer)) + parent_table.create() + table_name = 'test_drop_unique_constraint_in_sqlite_fk_recreate' + table = Table(table_name, meta, + Column('id', Integer, primary_key=True), + Column('baz', Integer), + Column('bar', Integer, + ForeignKey(parent_table_name + ".id")), + UniqueConstraint('baz', name='constr1')) + table.create() + utils.drop_unique_constraint(engine, table_name, 'constr1', 'baz') - insp = reflection.Inspector.from_engine(engine) - f_keys = insp.get_foreign_keys(table_name) - self.assertEqual(len(f_keys), 1) - f_key = f_keys[0] - self.assertEqual(f_key['referred_table'], 'table0') - self.assertEqual(f_key['referred_columns'], ['id']) - self.assertEqual(f_key['constrained_columns'], ['bar']) + insp = reflection.Inspector.from_engine(engine) + f_keys = insp.get_foreign_keys(table_name) + self.assertEqual(len(f_keys), 1) + f_key = f_keys[0] + self.assertEqual(f_key['referred_table'], parent_table_name) + self.assertEqual(f_key['referred_columns'], ['id']) + self.assertEqual(f_key['constrained_columns'], ['bar']) + table.drop() + parent_table.drop() diff --git a/nova/tests/db/test_migrations.conf b/nova/tests/db/test_migrations.conf index 774f1499406e..310b7055c419 100644 --- a/nova/tests/db/test_migrations.conf +++ b/nova/tests/db/test_migrations.conf @@ -1,9 +1,26 @@ -[DEFAULT] -# Set up any number of migration data stores you want, one +[unit_tests] +# Set up any number of databases to test concurrently. # The "name" used in the test is the config variable key. -#sqlite=sqlite:///test_migrations.db + +# A few tests rely on one sqlite database with 'sqlite' as the key. + sqlite=sqlite:// -#mysql=mysql://root:@localhost/test_migrations -#postgresql=postgresql://user:pass@localhost/test_migrations +#sqlitefile=sqlite:///test_migrations_utils.db +#mysql=mysql+mysqldb://user:pass@localhost/test_migrations_utils +#postgresql=postgresql+psycopg2://user:pass@localhost/test_migrations_utils + +[migration_dbs] +# Migration DB details are listed separately as they can't be connected to +# concurrently. These databases can't be the same as above + +# Note, sqlite:// is in-memory and unique each time it is spawned. +# However file sqlite's are not unique. + +sqlite=sqlite:// +#sqlitefile=sqlite:///test_migrations.db +#mysql=mysql+mysqldb://user:pass@localhost/test_migrations +#postgresql=postgresql+psycopg2://user:pass@localhost/test_migrations + [walk_style] snake_walk=yes +downgrade=yes diff --git a/nova/tests/db/test_migrations.py b/nova/tests/db/test_migrations.py index 781d57b8f1ac..ff212ba8a395 100644 --- a/nova/tests/db/test_migrations.py +++ b/nova/tests/db/test_migrations.py @@ -38,6 +38,8 @@ sudo -u postgres psql postgres=# create user openstack_citest with createdb login password 'openstack_citest'; postgres=# create database openstack_citest with owner openstack_citest; +postgres=# create database openstack_baremetal_citest with owner + openstack_citest; """ @@ -156,7 +158,9 @@ class CommonTestsMixIn(object): """ def test_walk_versions(self): for key, engine in self.engines.items(): - self._walk_versions(engine, self.snake_walk) + # We start each walk with a completely blank slate. + self._reset_database(key) + self._walk_versions(engine, self.snake_walk, self.downgrade) def test_mysql_opportunistically(self): self._test_mysql_opportunistically() @@ -184,7 +188,18 @@ class CommonTestsMixIn(object): class BaseMigrationTestCase(test.NoDBTestCase): - """Base class fort testing migrations and migration utils.""" + """Base class for testing migrations and migration utils. This sets up + and configures the databases to run tests against. + """ + + # NOTE(jhesketh): It is expected that tests clean up after themselves. + # This is necessary for concurrency to allow multiple tests to work on + # one database. + # The full migration walk tests however do call the old _reset_databases() + # to throw away whatever was there so they need to operate on their own + # database that we know isn't accessed concurrently. + # Hence, BaseWalkMigrationTestCase overwrites the engine list. + USER = None PASSWD = None DATABASE = None @@ -204,13 +219,16 @@ class BaseMigrationTestCase(test.NoDBTestCase): self.INIT_VERSION = 0 self.snake_walk = False + self.downgrade = False self.test_databases = {} self.migration = None self.migration_api = None def setUp(self): super(BaseMigrationTestCase, self).setUp() + self._load_config() + def _load_config(self): # Load test databases from the config file. Only do this # once. No need to re-run this on each test... LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH) @@ -218,10 +236,12 @@ class BaseMigrationTestCase(test.NoDBTestCase): cp = ConfigParser.RawConfigParser() try: cp.read(self.CONFIG_FILE_PATH) - defaults = cp.defaults() - for key, value in defaults.items(): - self.test_databases[key] = value + config = cp.options('unit_tests') + for key in config: + self.test_databases[key] = cp.get('unit_tests', key) self.snake_walk = cp.getboolean('walk_style', 'snake_walk') + self.downgrade = cp.getboolean('walk_style', 'downgrade') + except ConfigParser.ParsingError as e: self.fail("Failed to read test_migrations.conf config " "file. Got error: %s" % e) @@ -233,15 +253,9 @@ class BaseMigrationTestCase(test.NoDBTestCase): for key, value in self.test_databases.items(): self.engines[key] = sqlalchemy.create_engine(value) - # We start each test case with a completely blank slate. - self._reset_databases() - - def tearDown(self): - # We destroy the test data store between each test case, - # and recreate it, which ensures that we have no side-effects - # from the tests - self._reset_databases() - super(BaseMigrationTestCase, self).tearDown() + # NOTE(jhesketh): We only need to make sure the databases are created + # not necessarily clean of tables. + self._create_databases() def execute_cmd(self, cmd=None): status, output = commands.getstatusoutput(cmd) @@ -273,34 +287,123 @@ class BaseMigrationTestCase(test.NoDBTestCase): os.unsetenv('PGPASSWORD') os.unsetenv('PGUSER') - def _reset_databases(self): + @utils.synchronized('mysql', external=True) + def _reset_mysql(self, conn_pieces): + # We can execute the MySQL client to destroy and re-create + # the MYSQL database, which is easier and less error-prone + # than using SQLAlchemy to do this via MetaData...trust me. + (user, password, database, host) = \ + get_mysql_connection_info(conn_pieces) + sql = ("drop database if exists %(database)s; " + "create database %(database)s;" % {'database': database}) + cmd = ("mysql -u \"%(user)s\" %(password)s -h %(host)s " + "-e \"%(sql)s\"" % {'user': user, 'password': password, + 'host': host, 'sql': sql}) + self.execute_cmd(cmd) + + @utils.synchronized('sqlite', external=True) + def _reset_sqlite(self, conn_pieces): + # We can just delete the SQLite database, which is + # the easiest and cleanest solution + db_path = conn_pieces.path.strip('/') + if os.path.exists(db_path): + os.unlink(db_path) + # No need to recreate the SQLite DB. SQLite will + # create it for us if it's not there... + + def _create_databases(self): + """Create all configured databases as needed.""" for key, engine in self.engines.items(): - conn_string = self.test_databases[key] - conn_pieces = urlparse.urlparse(conn_string) - engine.dispose() - if conn_string.startswith('sqlite'): - # We can just delete the SQLite database, which is - # the easiest and cleanest solution - db_path = conn_pieces.path.strip('/') - if os.path.exists(db_path): - os.unlink(db_path) - # No need to recreate the SQLite DB. SQLite will - # create it for us if it's not there... - elif conn_string.startswith('mysql'): - # We can execute the MySQL client to destroy and re-create - # the MYSQL database, which is easier and less error-prone - # than using SQLAlchemy to do this via MetaData...trust me. - (user, password, database, host) = \ - get_mysql_connection_info(conn_pieces) - sql = ("drop database if exists %(database)s; " - "create database %(database)s;" - % {'database': database}) - cmd = ("mysql -u \"%(user)s\" %(password)s -h %(host)s " - "-e \"%(sql)s\"" % {'user': user, - 'password': password, 'host': host, 'sql': sql}) - self.execute_cmd(cmd) - elif conn_string.startswith('postgresql'): - self._reset_pg(conn_pieces) + self._create_database(key) + + def _create_database(self, key): + """Create database if it doesn't exist.""" + conn_string = self.test_databases[key] + conn_pieces = urlparse.urlparse(conn_string) + + if conn_string.startswith('mysql'): + (user, password, database, host) = \ + get_mysql_connection_info(conn_pieces) + sql = "create database if not exists %s;" % database + cmd = ("mysql -u \"%(user)s\" %(password)s -h %(host)s " + "-e \"%(sql)s\"" % {'user': user, 'password': password, + 'host': host, 'sql': sql}) + self.execute_cmd(cmd) + elif conn_string.startswith('postgresql'): + (user, password, database, host) = \ + get_pgsql_connection_info(conn_pieces) + os.environ['PGPASSWORD'] = password + os.environ['PGUSER'] = user + + sqlcmd = ("psql -w -U %(user)s -h %(host)s -c" + " '%(sql)s' -d template1") + + sql = ("create database if not exists %s;") % database + createtable = sqlcmd % {'user': user, 'host': host, 'sql': sql} + status, output = commands.getstatusoutput(createtable) + if status != 0 and status != 256: + # 0 means databases is created + # 256 means it already exists (which is fine) + # otherwise raise an error + self.fail("Failed to run: %s\n%s" % (createtable, output)) + + os.unsetenv('PGPASSWORD') + os.unsetenv('PGUSER') + + def _reset_databases(self): + """Reset all configured databases.""" + for key, engine in self.engines.items(): + self._reset_database(key) + + def _reset_database(self, key): + """Reset specific database.""" + engine = self.engines[key] + conn_string = self.test_databases[key] + conn_pieces = urlparse.urlparse(conn_string) + engine.dispose() + if conn_string.startswith('sqlite'): + self._reset_sqlite(conn_pieces) + elif conn_string.startswith('mysql'): + self._reset_mysql(conn_pieces) + elif conn_string.startswith('postgresql'): + self._reset_pg(conn_pieces) + + +class BaseWalkMigrationTestCase(BaseMigrationTestCase): + """BaseWalkMigrationTestCase loads in an alternative set of databases for + testing against. This is necessary as the default databases can run tests + concurrently without interfering with itself. It is expected that + databases listed under [migraiton_dbs] in the configuration are only being + accessed by one test at a time. Currently only test_walk_versions accesses + the databases (and is the only method that calls _reset_database() which + is clearly problematic for concurrency). + """ + + def _load_config(self): + # Load test databases from the config file. Only do this + # once. No need to re-run this on each test... + LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH) + if os.path.exists(self.CONFIG_FILE_PATH): + cp = ConfigParser.RawConfigParser() + try: + cp.read(self.CONFIG_FILE_PATH) + config = cp.options('migration_dbs') + for key in config: + self.test_databases[key] = cp.get('migration_dbs', key) + self.snake_walk = cp.getboolean('walk_style', 'snake_walk') + self.downgrade = cp.getboolean('walk_style', 'downgrade') + except ConfigParser.ParsingError as e: + self.fail("Failed to read test_migrations.conf config " + "file. Got error: %s" % e) + else: + self.fail("Failed to find test_migrations.conf config " + "file.") + + self.engines = {} + for key, value in self.test_databases.items(): + self.engines[key] = sqlalchemy.create_engine(value) + + self._create_databases() def _test_mysql_opportunistically(self): # Test that table creation on mysql only builds InnoDB tables @@ -317,8 +420,8 @@ class BaseMigrationTestCase(test.NoDBTestCase): self.test_databases[database] = connect_string # build a fully populated mysql database with all the tables - self._reset_databases() - self._walk_versions(engine, False, False) + self._reset_database(database) + self._walk_versions(engine, self.snake_walk, self.downgrade) connection = engine.connect() # sanity check @@ -338,6 +441,9 @@ class BaseMigrationTestCase(test.NoDBTestCase): self.assertEqual(count, 0, "%d non InnoDB tables created" % count) connection.close() + del(self.engines[database]) + del(self.test_databases[database]) + def _test_postgresql_opportunistically(self): # Test postgresql database migration walk if not _have_postgresql(self.USER, self.PASSWD, self.DATABASE): @@ -353,8 +459,10 @@ class BaseMigrationTestCase(test.NoDBTestCase): self.test_databases[database] = connect_string # build a fully populated postgresql database with all the tables - self._reset_databases() - self._walk_versions(engine, False, False) + self._reset_database(database) + self._walk_versions(engine, self.snake_walk, self.downgrade) + del(self.engines[database]) + del(self.test_databases[database]) def _walk_versions(self, engine=None, snake_walk=False, downgrade=True): # Determine latest version script from the repo, then @@ -447,7 +555,7 @@ class BaseMigrationTestCase(test.NoDBTestCase): raise -class TestNovaMigrations(BaseMigrationTestCase, CommonTestsMixIn): +class TestNovaMigrations(BaseWalkMigrationTestCase, CommonTestsMixIn): """Test sqlalchemy-migrate migrations.""" USER = "openstack_citest" PASSWD = "openstack_citest" @@ -2607,6 +2715,30 @@ class TestNovaMigrations(BaseMigrationTestCase, CommonTestsMixIn): for index in indexes: self.assertNotIn(index, current_indexes) + # Check indexes are gone + if engine.name == 'mysql' or engine.name == 'postgresql': + data = { + # table_name: ((idx_1, (c1, c2,)), (idx2, (c1, c2,)), ...) + 'quota_usages': ( + ('ix_quota_usages_user_id_deleted', + ('user_id', 'deleted')), + ), + 'reservations': ( + ('ix_reservations_user_id_deleted', + ('user_id', 'deleted')), + ) + } + + meta = sqlalchemy.MetaData() + meta.bind = engine + + for table_name, indexes in data.iteritems(): + table = sqlalchemy.Table(table_name, meta, autoload=True) + current_indexes = [(i.name, tuple(i.columns.keys())) + for i in table.indexes] + for index in indexes: + self.assertNotIn(index, current_indexes) + def _check_204(self, engine, data): if engine.name != 'sqlite': return @@ -3049,18 +3181,18 @@ class TestNovaMigrations(BaseMigrationTestCase, CommonTestsMixIn): "local_gb_used": 1, "deleted": 0, "hypervisor_type": "fake_type", "hypervisor_version": 1, - "service_id": 1, "id": 1}, + "service_id": 1, "id": 10001}, {"vcpus": 1, "cpu_info": "info", "memory_mb": 1, "local_gb": 1, "vcpus_used": 1, "memory_mb_used": 1, "local_gb_used": 1, "deleted": 2, "hypervisor_type": "fake_type", "hypervisor_version": 1, - "service_id": 1, "id": 2}], - "compute_node_stats": [{"id": 10, "compute_node_id": 1, + "service_id": 1, "id": 10002}], + "compute_node_stats": [{"id": 10, "compute_node_id": 10001, "key": "fake-1", "deleted": 0}, - {"id": 20, "compute_node_id": 2, + {"id": 20, "compute_node_id": 10002, "key": "fake-2", "deleted": 0}]} return ret @@ -3209,7 +3341,7 @@ class TestNovaMigrations(BaseMigrationTestCase, CommonTestsMixIn): self.assertEqual(quota['resource'], 'injected_file_content_bytes') -class TestBaremetalMigrations(BaseMigrationTestCase, CommonTestsMixIn): +class TestBaremetalMigrations(BaseWalkMigrationTestCase, CommonTestsMixIn): """Test sqlalchemy-migrate migrations.""" USER = "openstack_citest" PASSWD = "openstack_citest" diff --git a/nova/tests/virt/baremetal/test_baremetal_migrations.conf b/nova/tests/virt/baremetal/test_baremetal_migrations.conf index 774f1499406e..92681b41b33b 100644 --- a/nova/tests/virt/baremetal/test_baremetal_migrations.conf +++ b/nova/tests/virt/baremetal/test_baremetal_migrations.conf @@ -1,9 +1,24 @@ -[DEFAULT] -# Set up any number of migration data stores you want, one +[unit_tests] +# Set up any number of databases to test concurrently. # The "name" used in the test is the config variable key. -#sqlite=sqlite:///test_migrations.db + sqlite=sqlite:// -#mysql=mysql://root:@localhost/test_migrations -#postgresql=postgresql://user:pass@localhost/test_migrations +#sqlitefile=sqlite:///test_baremetal_migrations_utils.db +#mysql=mysql+mysqldb://user:pass@localhost/test_baremetal_migrations_utils +#postgresql=postgresql+psycopg2://user:pass@localhost/test_migrations_utils + +[migration_dbs] +# Migration DB details are listed separately as they can't be connected to +# concurrently. These databases can't be the same as above + +# Note, sqlite:// is in-memory and unique each time it is spawned. +# However file sqlite's are not unique. + +sqlite=sqlite:// +#sqlitefile=sqlite:///test_baremetal_migrations.db +#mysql=mysql+mysqldb://user:pass@localhost/test_baremetal_migrations +#postgresql=postgresql+psycopg2://user:pass@localhost/test_baremetal_migrations + [walk_style] snake_walk=yes +downgrade=yes