Merge "Add db migrations test framework with data"

This commit is contained in:
Jenkins 2016-03-28 11:54:47 +00:00 committed by Gerrit Code Review
commit bb7917f476
2 changed files with 283 additions and 2 deletions

View File

@ -0,0 +1,14 @@
[migration_dbs]
# Migration DB details are listed separately as they can't be connected to
# concurrently. These databases can't be the same as above
# Note, sqlite:// is in-memory and unique each time it is spawned.
# However file sqlite's are not unique.
#sqlite=sqlite://
#sqlitefile=sqlite:///test_migrations.db
#mysql=mysql+mysqldb://user:pass@localhost/test_migrations
#postgresql=postgresql+psycopg2://user:pass@localhost/test_migrations
[walk_style]
snake_walk=yes

View File

@ -12,21 +12,29 @@
# License for the specific language governing permissions and limitations
# under the License.
import abc
from alembic import script as alembic_script
from contextlib import contextmanager
import os
from oslo_config import cfg
from oslo_config import fixture as config_fixture
from oslo_db.sqlalchemy import session
from oslo_db.sqlalchemy import test_base
from oslo_db.sqlalchemy import test_migrations
from oslo_db.sqlalchemy import utils as oslo_utils
import six
from six.moves import configparser
from six.moves.urllib import parse
import sqlalchemy
from sqlalchemy import event
import sqlalchemy.types as types
import subprocess
import neutron.db.migration as migration_help
from neutron.db.migration.alembic_migrations import external
from neutron.db.migration import cli as migration
from neutron.db.migration.models import head as head_models
from neutron.tests import base as base_tests
from neutron.tests.common import base
cfg.CONF.import_opt('core_plugin', 'neutron.common.config')
@ -326,10 +334,10 @@ class TestSanityCheck(test_base.DbTestCase):
script.check_sanity, conn)
class TestWalkMigrations(test_base.DbTestCase):
class TestWalkDowngrade(test_base.DbTestCase):
def setUp(self):
super(TestWalkMigrations, self).setUp()
super(TestWalkDowngrade, self).setUp()
self.alembic_config = migration.get_neutron_config()
self.alembic_config.neutron_config = cfg.CONF
@ -345,3 +353,262 @@ class TestWalkMigrations(test_base.DbTestCase):
if failed_revisions:
self.fail('Migrations %s have downgrade' % failed_revisions)
def _is_backend_avail(backend,
user="openstack_citest",
passwd="openstack_citest",
database="openstack_citest"):
# is_backend_avail will be soon deprecated from oslo_db
# thats why its added here
try:
connect_uri = oslo_utils.get_connect_string(backend, user=user,
passwd=passwd,
database=database)
engine = session.create_engine(connect_uri)
connection = engine.connect()
except Exception:
# intentionally catch all to handle exceptions even if we don't
# have any backend code loaded.
return False
else:
connection.close()
engine.dispose()
return True
@six.add_metaclass(abc.ABCMeta)
class _TestWalkMigrations(base_tests.BaseTestCase):
'''This will add framework for testing schema migarations
for different backends.
Right now it supports pymysql and postgresql backends. Pymysql
and postgresql commands are executed to walk between to do updates.
For upgrade and downgrade migrate_up and migrate down functions
have been added.
'''
DEFAULT_CONFIG_FILE = os.path.join(os.path.dirname(__file__),
'test_migrations.conf')
CONFIG_FILE_PATH = os.environ.get('NEUTRON_TEST_MIGRATIONS_CONF',
DEFAULT_CONFIG_FILE)
def setUp(self):
if not _is_backend_avail(self.BACKEND):
self.skipTest("%s not available" % self.BACKEND)
super(_TestWalkMigrations, self).setUp()
self.snake_walk = False
self.test_databases = {}
if os.path.exists(self.CONFIG_FILE_PATH):
cp = configparser.RawConfigParser()
try:
cp.read(self.CONFIG_FILE_PATH)
options = cp.options('migration_dbs')
for key in options:
self.test_databases[key] = cp.get('migration_dbs', key)
self.snake_walk = cp.getboolean('walk_style', 'snake_walk')
except configparser.ParsingError as e:
self.fail("Failed to read test_migrations.conf config "
"file. Got error: %s" % e)
else:
self.fail("Failed to find test_migrations.conf config "
"file.")
self.engines = {}
for key, value in self.test_databases.items():
self.engines[key] = sqlalchemy.create_engine(value)
# We start each test case with a completely blank slate.
self._reset_databases()
def assertColumnInTable(self, engine, table_name, column):
table = oslo_utils.get_table(engine, table_name)
self.assertIn(column, table.columns)
def assertColumnNotInTables(self, engine, table_name, column):
table = oslo_utils.get_table(engine, table_name)
self.assertNotIn(column, table.columns)
def execute_cmd(self, cmd=None):
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, shell=True)
output = proc.communicate()[0]
self.assertEqual(0, proc.returncode, 'Command failed with '
'output:\n%s' % output)
@abc.abstractproperty
def BACKEND(self):
pass
@abc.abstractmethod
def _database_recreate(self, user, password, database, host):
pass
def _reset_databases(self):
for key, engine in self.engines.items():
conn_string = self.test_databases[key]
conn_pieces = parse.urlparse(conn_string)
engine.dispose()
user, password, database, host = oslo_utils.get_db_connection_info(
conn_pieces)
self._database_recreate(user, password, database, host)
def _get_alembic_config(self, uri):
db_config = migration.get_neutron_config()
self.script_dir = alembic_script.ScriptDirectory.from_config(db_config)
db_config.neutron_config = cfg.CONF
db_config.neutron_config.set_override('connection',
six.text_type(uri),
group='database')
return db_config
def _revisions(self, downgrade=False):
"""Provides revisions and its parent revisions.
:param downgrade: whether to include downgrade behavior or not.
:type downgrade: Bool
:return: List of tuples. Every tuple contains revision and its parent
revision.
"""
revisions = list(self.script_dir.walk_revisions("base", "heads"))
if not downgrade:
revisions = list(reversed(revisions))
for rev in revisions:
if downgrade:
# Destination, current
yield rev.down_revision, rev.revision
else:
# Destination, current
yield rev.revision, rev.down_revision
def _walk_versions(self, config, engine, downgrade=True, snake_walk=False):
"""Test migrations ability to upgrade and downgrade.
:param downgrade: whether to include downgrade behavior or not.
:type downgrade: Bool
:snake_walk: enable mode when at every upgrade revision will be
downgraded and upgraded in previous state at upgrade and backward at
downgrade.
:type snake_walk: Bool
"""
revisions = self._revisions()
for dest, curr in revisions:
self._migrate_up(config, engine, dest, curr, with_data=True)
if snake_walk and dest != 'None':
# NOTE(I159): Pass reversed arguments into `_migrate_down`
# method because we have been upgraded to a destination
# revision and now we going to downgrade back.
self._migrate_down(config, engine, curr, dest, with_data=True)
self._migrate_up(config, engine, dest, curr, with_data=True)
if downgrade:
revisions = self._revisions(downgrade)
for dest, curr in revisions:
self._migrate_down(config, engine, dest, curr, with_data=True)
if snake_walk:
self._migrate_up(config, engine, curr,
dest, with_data=True)
self._migrate_down(config, engine, dest,
curr, with_data=True)
def _migrate_down(self, config, engine, dest, curr, with_data=False):
# First upgrade it to current to do downgrade
if dest:
migration.do_alembic_command(config, 'downgrade', dest)
else:
meta = sqlalchemy.MetaData(bind=engine)
meta.drop_all()
if with_data:
post_downgrade = getattr(
self, "_post_downgrade_%s" % curr, None)
if post_downgrade:
post_downgrade(engine)
def _migrate_up(self, config, engine, dest, curr, with_data=False):
if with_data:
data = None
pre_upgrade = getattr(
self, "_pre_upgrade_%s" % dest, None)
if pre_upgrade:
data = pre_upgrade(engine)
migration.do_alembic_command(config, 'upgrade', dest)
if with_data:
check = getattr(self, "_check_%s" % dest, None)
if check and data:
check(engine, data)
class TestWalkMigrationsMysql(_TestWalkMigrations):
BACKEND = 'mysql+pymysql'
def _database_recreate(self, user, password, database, host):
# We can execute the MySQL client to destroy and re-create
# the MYSQL database, which is easier and less error-prone
# than using SQLAlchemy to do this via MetaData...trust me.
sql = ("drop database if exists %(database)s; create "
"database %(database)s;") % {'database': database}
cmd = ("mysql -u \"%(user)s\" -p%(password)s -h %(host)s "
"-e \"%(sql)s\"") % {'user': user, 'password': password,
'host': host, 'sql': sql}
self.execute_cmd(cmd)
def test_mysql_opportunistically(self):
connect_string = oslo_utils.get_connect_string(self.BACKEND,
"openstack_citest", user="openstack_citest",
passwd="openstack_citest")
engine = session.create_engine(connect_string)
config = self._get_alembic_config(connect_string)
self.engines["mysqlcitest"] = engine
self.test_databases["mysqlcitest"] = connect_string
# build a fully populated mysql database with all the tables
self._reset_databases()
self._walk_versions(config, engine, False, False)
class TestWalkMigrationsPsql(_TestWalkMigrations):
BACKEND = 'postgresql'
def _database_recreate(self, user, password, database, host):
os.environ['PGPASSWORD'] = password
os.environ['PGUSER'] = user
# note(boris-42): We must create and drop database, we can't
# drop database which we have connected to, so for such
# operations there is a special database template1.
sqlcmd = ("psql -w -U %(user)s -h %(host)s -c"
" '%(sql)s' -d template1")
sql = "drop database if exists %(database)s;"
sql = sql % {'database': database}
droptable = sqlcmd % {'user': user, 'host': host,
'sql': sql}
self.execute_cmd(droptable)
sql = "create database %(database)s;"
sql = sql % {'database': database}
createtable = sqlcmd % {'user': user, 'host': host,
'sql': sql}
self.execute_cmd(createtable)
def test_postgresql_opportunistically(self):
# add this to the global lists to make reset work with it, it's removed
# automatically in tearDown so no need to clean it up here.
connect_string = oslo_utils.get_connect_string(self.BACKEND,
"openstack_citest",
"openstack_citest",
"openstack_citest")
engine = session.create_engine(connect_string)
config = self._get_alembic_config(connect_string)
self.engines["postgresqlcitest"] = engine
self.test_databases["postgresqlcitest"] = connect_string
# build a fully populated postgresql database with all the tables
self._reset_databases()
self._walk_versions(config, engine, False, False)