diff --git a/nova/db/main/legacy_migrations/versions/402_train.py b/nova/db/main/legacy_migrations/versions/402_train.py index 6ce9bdde17bd..58d207f40a18 100644 --- a/nova/db/main/legacy_migrations/versions/402_train.py +++ b/nova/db/main/legacy_migrations/versions/402_train.py @@ -1574,19 +1574,25 @@ def upgrade(migrate_engine): if migrate_engine.name == 'mysql': # In Folsom we explicitly converted migrate_version to UTF8. - migrate_engine.execute( - 'ALTER TABLE migrate_version CONVERT TO CHARACTER SET utf8') - # Set default DB charset to UTF8. - migrate_engine.execute( - 'ALTER DATABASE `%s` DEFAULT CHARACTER SET utf8' % - migrate_engine.url.database) + with migrate_engine.connect() as conn: + conn.exec_driver_sql( + 'ALTER TABLE migrate_version CONVERT TO CHARACTER SET utf8' + ) + # Set default DB charset to UTF8. + conn.exec_driver_sql( + 'ALTER DATABASE `%s` DEFAULT CHARACTER SET utf8' % ( + migrate_engine.url.database, + ) + ) - # NOTE(cdent): The resource_providers table is defined as latin1 to be - # more efficient. Now we need the name column to be UTF8. We modify it - # here otherwise the declarative handling in sqlalchemy gets confused. - migrate_engine.execute( - 'ALTER TABLE resource_providers MODIFY name ' - 'VARCHAR(200) CHARACTER SET utf8') + # NOTE(cdent): The resource_providers table is defined as latin1 to + # be more efficient. Now we need the name column to be UTF8. We + # modify it here otherwise the declarative handling in sqlalchemy + # gets confused. + conn.exec_driver_sql( + 'ALTER TABLE resource_providers MODIFY name ' + 'VARCHAR(200) CHARACTER SET utf8' + ) _create_shadow_tables(migrate_engine) @@ -1596,9 +1602,10 @@ def upgrade(migrate_engine): # also if migrate_engine.name == 'mysql': - # Use binary collation for extra specs table - migrate_engine.execute( - 'ALTER TABLE instance_type_extra_specs ' - 'CONVERT TO CHARACTER SET utf8 ' - 'COLLATE utf8_bin' - ) + with migrate_engine.connect() as conn: + # Use binary collation for extra specs table + conn.exec_driver_sql( + 'ALTER TABLE instance_type_extra_specs ' + 'CONVERT TO CHARACTER SET utf8 ' + 'COLLATE utf8_bin' + ) diff --git a/nova/test.py b/nova/test.py index b91c6b304280..a6449c01f039 100644 --- a/nova/test.py +++ b/nova/test.py @@ -388,7 +388,7 @@ class TestCase(base.BaseTestCase): engine = db_api.get_engine() dialect = engine.url.get_dialect() if dialect == sqlite.dialect: - engine.connect().execute("PRAGMA foreign_keys = ON") + engine.connect().exec_driver_sql("PRAGMA foreign_keys = ON") def start_service(self, name, host=None, cell_name=None, **kwargs): # Disallow starting multiple scheduler services diff --git a/nova/tests/fixtures/nova.py b/nova/tests/fixtures/nova.py index d8da016e6fd4..6d8cb65e68a0 100644 --- a/nova/tests/fixtures/nova.py +++ b/nova/tests/fixtures/nova.py @@ -889,12 +889,6 @@ class WarningsFixture(fixtures.Fixture): message=r'The Connection.connect\(\) method is considered .*', category=sqla_exc.SADeprecationWarning) - warnings.filterwarnings( - 'ignore', - module='nova', - message=r'Passing a string to Connection.execute\(\) is .*', - category=sqla_exc.SADeprecationWarning) - warnings.filterwarnings( 'ignore', module='nova', diff --git a/nova/tests/unit/db/main/test_api.py b/nova/tests/unit/db/main/test_api.py index a3e218438311..7d262fd08907 100644 --- a/nova/tests/unit/db/main/test_api.py +++ b/nova/tests/unit/db/main/test_api.py @@ -5688,7 +5688,9 @@ class ArchiveTestCase(test.TestCase, ModelsObjectComparatorMixin): metadata.reflect() for table in metadata.tables: if table.startswith("shadow_") and table not in exceptions: - rows = self.conn.execute("select * from %s" % table).fetchall() + rows = self.conn.exec_driver_sql( + "SELECT * FROM %s" % table + ).fetchall() self.assertEqual(rows, [], "Table %s not empty" % table) def test_shadow_tables(self): diff --git a/nova/tests/unit/db/main/test_migrations.py b/nova/tests/unit/db/main/test_migrations.py index cb253dfc5b65..f5ce3697b367 100644 --- a/nova/tests/unit/db/main/test_migrations.py +++ b/nova/tests/unit/db/main/test_migrations.py @@ -37,6 +37,8 @@ from oslo_db.sqlalchemy import utils as oslodbutils from oslo_log.fixture import logging_error as log_fixture from oslo_log import log as logging from oslotest import base +import sqlalchemy as sa +import sqlalchemy.exc from nova.db.main import models from nova.db import migration @@ -137,21 +139,29 @@ class TestModelsSyncMySQL( def test_innodb_tables(self): self.db_sync(self.get_engine()) - total = self.engine.execute( - "SELECT count(*) " - "FROM information_schema.TABLES " - "WHERE TABLE_SCHEMA = '%(database)s'" % - {'database': self.engine.url.database}) + with self.engine.connect() as conn: + total = conn.execute( + sa.text( + "SELECT count(*) " + "FROM information_schema.TABLES " + "WHERE TABLE_SCHEMA = :database" + ), + {'database': self.engine.url.database}, + ) self.assertGreater(total.scalar(), 0, "No tables found. Wrong schema?") - noninnodb = self.engine.execute( - "SELECT count(*) " - "FROM information_schema.TABLES " - "WHERE TABLE_SCHEMA='%(database)s' " - "AND ENGINE != 'InnoDB' " - "AND TABLE_NAME != 'migrate_version'" % - {'database': self.engine.url.database}) - count = noninnodb.scalar() + with self.engine.connect() as conn: + noninnodb = conn.execute( + sa.text( + "SELECT count(*) " + "FROM information_schema.TABLES " + "WHERE TABLE_SCHEMA = :database " + "AND ENGINE != 'InnoDB' " + "AND TABLE_NAME != 'migrate_version'" + ), + {'database': self.engine.url.database}, + ) + count = noninnodb.scalar() self.assertEqual(count, 0, "%d non InnoDB tables created" % count) diff --git a/nova/tests/unit/test_fixtures.py b/nova/tests/unit/test_fixtures.py index 69ce3cc48d71..22b278771a3a 100644 --- a/nova/tests/unit/test_fixtures.py +++ b/nova/tests/unit/test_fixtures.py @@ -28,7 +28,7 @@ from oslo_utils.fixture import uuidsentinel as uuids from oslo_utils import timeutils from oslo_utils import uuidutils from oslotest import output -import sqlalchemy +import sqlalchemy as sa import testtools from nova.compute import rpcapi as compute_rpcapi @@ -125,17 +125,19 @@ class TestDatabaseFixture(testtools.TestCase): self.useFixture(db_fixture) engine = main_db_api.get_engine() conn = engine.connect() - result = conn.execute("select * from instance_types") + result = conn.execute(sa.text("SELECT * FROM instance_types")) rows = result.fetchall() self.assertEqual(0, len(rows), "Rows %s" % rows) # insert a 6th instance type, column 5 below is an int id # which has a constraint on it, so if new standard instance # types are added you have to bump it. - conn.execute("insert into instance_types VALUES " - "(NULL, NULL, NULL, 't1.test', 6, 4096, 2, 0, NULL, '87'" - ", 1.0, 40, 0, 0, 1, 0)") - result = conn.execute("select * from instance_types") + conn.execute(sa.text( + "INSERT INTO instance_types VALUES " + "(NULL, NULL, NULL, 't1.test', 6, 4096, 2, 0, NULL, '87'" + ", 1.0, 40, 0, 0, 1, 0)" + )) + result = conn.execute(sa.text("SELECT * FROM instance_types")) rows = result.fetchall() self.assertEqual(1, len(rows), "Rows %s" % rows) @@ -145,7 +147,7 @@ class TestDatabaseFixture(testtools.TestCase): db_fixture.reset() engine = main_db_api.get_engine() conn = engine.connect() - result = conn.execute("select * from instance_types") + result = conn.execute(sa.text("SELECT * FROM instance_types")) rows = result.fetchall() self.assertEqual(0, len(rows), "Rows %s" % rows) @@ -156,14 +158,19 @@ class TestDatabaseFixture(testtools.TestCase): self.useFixture(db_fixture) engine = api_db_api.get_engine() conn = engine.connect() - result = conn.execute("select * from cell_mappings") + result = conn.execute(sa.text("SELECT * FROM cell_mappings")) rows = result.fetchall() self.assertEqual(0, len(rows), "Rows %s" % rows) uuid = uuidutils.generate_uuid() - conn.execute("insert into cell_mappings (uuid, name) VALUES " - "('%s', 'fake-cell')" % (uuid,)) - result = conn.execute("select * from cell_mappings") + conn.execute( + sa.text( + "INSERT INTO cell_mappings (uuid, name) VALUES (:uuid, :name)" + ), + uuid=uuid, + name='fake-cell', + ) + result = conn.execute(sa.text("SELECT * FROM cell_mappings")) rows = result.fetchall() self.assertEqual(1, len(rows), "Rows %s" % rows) @@ -173,7 +180,7 @@ class TestDatabaseFixture(testtools.TestCase): db_fixture.reset() engine = api_db_api.get_engine() conn = engine.connect() - result = conn.execute("select * from cell_mappings") + result = conn.execute(sa.text("SELECT * FROM cell_mappings")) rows = result.fetchall() self.assertEqual(0, len(rows), "Rows %s" % rows) @@ -202,9 +209,14 @@ class TestDatabaseFixture(testtools.TestCase): engine = api_db_api.get_engine() conn = engine.connect() uuid = uuidutils.generate_uuid() - conn.execute("insert into cell_mappings (uuid, name) VALUES " - "('%s', 'fake-cell')" % (uuid,)) - result = conn.execute("select * from cell_mappings") + conn.execute( + sa.text( + "INSERT INTO cell_mappings (uuid, name) VALUES (:uuid, :name)" + ), + uuid=uuid, + name='fake-cell', + ) + result = conn.execute(sa.text("SELECT * FROM cell_mappings")) rows = result.fetchall() self.assertEqual(1, len(rows), "Rows %s" % rows) @@ -227,13 +239,13 @@ class TestDefaultFlavorsFixture(testtools.TestCase): engine = api_db_api.get_engine() conn = engine.connect() - result = conn.execute("select * from flavors") + result = conn.execute(sa.text("SELECT * FROM flavors")) rows = result.fetchall() self.assertEqual(0, len(rows), "Rows %s" % rows) self.useFixture(fixtures.DefaultFlavorsFixture()) - result = conn.execute("select * from flavors") + result = conn.execute(sa.text("SELECT * FROM flavors")) rows = result.fetchall() self.assertEqual(6, len(rows), "Rows %s" % rows) @@ -323,7 +335,7 @@ class TestSynchronousThreadPoolExecutorFixture(testtools.TestCase): class TestBannedDBSchemaOperations(testtools.TestCase): def test_column(self): - column = sqlalchemy.Column() + column = sa.Column() with fixtures.BannedDBSchemaOperations(['Column']): self.assertRaises(exception.DBNotAllowed, column.drop) @@ -331,7 +343,7 @@ class TestBannedDBSchemaOperations(testtools.TestCase): column.alter) def test_table(self): - table = sqlalchemy.Table() + table = sa.Table() with fixtures.BannedDBSchemaOperations(['Table']): self.assertRaises(exception.DBNotAllowed, table.drop)