alembic with initial migration and tests

All sqlalchemy migrations and code associated
with them was removed including tests

Added all necessery alembic code to expose alembic specific features:
like revision and stamp

Added initial migration for alembic, that is fully in sync with models
including: uniques, indexes and columns params

Refactored test_versions to use alembic as migration api

Refactored BaseTestCase and Database fixture to use alembic as migration api

Change-Id: I8a91704d21ccea1b8135b9d724df5856ac21108c
This commit is contained in:
Dima Shulyak 2014-01-16 12:53:53 +02:00
parent 5983ad8523
commit 50b3a02ee4
33 changed files with 499 additions and 1072 deletions

View File

@ -24,10 +24,74 @@ Run storage database migration.
import sys import sys
from oslo.config import cfg
from ironic.common import service from ironic.common import service
from ironic.db import migration from ironic.db import migration
CONF = cfg.CONF
class DBCommand(object):
def upgrade(self):
migration.upgrade(CONF.command.revision)
def downgrade(self):
migration.downgrade(CONF.command.revision)
def revision(self):
migration.revision(CONF.command.message, CONF.command.autogenerate)
def stamp(self):
migration.stamp(CONF.command.revision)
def version(self):
print(migration.version())
def add_command_parsers(subparsers):
command_object = DBCommand()
parser = subparsers.add_parser('upgrade')
parser.set_defaults(func=command_object.upgrade)
parser.add_argument('--revision', nargs='?')
parser = subparsers.add_parser('downgrade')
parser.set_defaults(func=command_object.downgrade)
parser.add_argument('--revision', nargs='?')
parser = subparsers.add_parser('stamp')
parser.add_argument('--revision', nargs='?')
parser.set_defaults(func=command_object.stamp)
parser = subparsers.add_parser('revision')
parser.add_argument('-m', '--message')
parser.add_argument('--autogenerate', action='store_true')
parser.set_defaults(func=command_object.revision)
parser = subparsers.add_parser('version')
parser.set_defaults(func=command_object.version)
command_opt = cfg.SubCommandOpt('command',
title='Command',
help='Available commands',
handler=add_command_parsers)
CONF.register_cli_opt(command_opt)
def main(): def main():
# this is hack to work with previous usage of ironic-dbsync
# pls change it to ironic-dbsync upgrade
valid_commands = set([
'upgrade', 'downgrade', 'revision',
'version', 'stamp'
])
if not set(sys.argv) & valid_commands:
sys.argv.append('upgrade')
service.prepare_service(sys.argv) service.prepare_service(sys.argv)
migration.db_sync() CONF.command.func()

View File

@ -28,18 +28,29 @@ CONF.import_opt('backend',
group='database') group='database')
IMPL = utils.LazyPluggable( IMPL = utils.LazyPluggable(
pivot='backend', pivot='backend',
config_group='database', config_group='database',
sqlalchemy='ironic.db.sqlalchemy.migration') sqlalchemy='ironic.db.sqlalchemy.migration')
INIT_VERSION = 0 INIT_VERSION = 0
def db_sync(version=None): def upgrade(version=None):
"""Migrate the database to `version` or the most recent version.""" """Migrate the database to `version` or the most recent version."""
return IMPL.db_sync(version=version) return IMPL.upgrade(version)
def db_version(): def downgrade(version=None):
"""Display the current database version.""" return IMPL.downgrade(version)
return IMPL.db_version()
def version():
return IMPL.version()
def stamp(version):
return IMPL.stamp(version)
def revision(message, autogenerate):
return IMPL.revision(message, autogenerate)

View File

@ -0,0 +1,54 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = %(here)s/alembic
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# max length of characters to apply to the
# "slug" field
#truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
#sqlalchemy.url = driver://user:pass@localhost/dbname
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -0,0 +1,16 @@
Please see https://alembic.readthedocs.org/en/latest/index.html for general documentation
To create alembic migrations use:
$ ironic-dbsync revision --message --autogenerate
Stamp db with most recent migration version, without actually running migrations
$ ironic-dbsync stamp --revision head
Upgrade can be performed by:
$ ironic-dbsync - for backward compatibility
$ ironic-dbsync upgrade
# ironic-dbsync upgrade --revision head
Downgrading db:
$ ironic-dbsync downgrade
$ ironic-dbsync downgrade --revision base

View File

@ -0,0 +1,54 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from logging import config as log_config
from alembic import context
from ironic.db.sqlalchemy import models
import ironic.openstack.common.db.sqlalchemy.session as sqlalchemy_session
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
log_config.fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
target_metadata = models.Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = sqlalchemy_session.get_engine()
with engine.connect() as connection:
context.configure(connection=connection,
target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
run_migrations_online()

View File

@ -0,0 +1,22 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision}
Create Date: ${create_date}
"""
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,106 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""initial migration
Revision ID: 2581ebaf0cb2
Revises: None
Create Date: 2014-01-17 12:14:07.754448
"""
# revision identifiers, used by Alembic.
revision = '2581ebaf0cb2'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table(
'conductors',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('hostname', sa.String(length=255), nullable=False),
sa.Column('drivers', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('hostname', name='uniq_conductors0hostname'),
mysql_ENGINE='InnoDB',
mysql_DEFAULT_CHARSET='UTF8'
)
op.create_table(
'chassis',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=36), nullable=True),
sa.Column('extra', sa.Text(), nullable=True),
sa.Column('description', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('uuid', name='uniq_chassis0uuid'),
mysql_ENGINE='InnoDB',
mysql_DEFAULT_CHARSET='UTF8'
)
op.create_table(
'nodes',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=36), nullable=True),
sa.Column('instance_uuid', sa.String(length=36), nullable=True),
sa.Column('chassis_id', sa.Integer(), nullable=True),
sa.Column('power_state', sa.String(length=15), nullable=True),
sa.Column('target_power_state', sa.String(length=15), nullable=True),
sa.Column('provision_state', sa.String(length=15), nullable=True),
sa.Column('target_provision_state', sa.String(length=15),
nullable=True),
sa.Column('last_error', sa.Text(), nullable=True),
sa.Column('properties', sa.Text(), nullable=True),
sa.Column('driver', sa.String(length=15), nullable=True),
sa.Column('driver_info', sa.Text(), nullable=True),
sa.Column('reservation', sa.String(length=255), nullable=True),
sa.Column('maintenance', sa.Boolean(), nullable=True),
sa.Column('extra', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['chassis_id'], ['chassis.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('uuid', name='uniq_nodes0uuid'),
mysql_ENGINE='InnoDB',
mysql_DEFAULT_CHARSET='UTF8'
)
op.create_index('node_instance_uuid', 'nodes', ['instance_uuid'],
unique=False)
op.create_table(
'ports',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=36), nullable=True),
sa.Column('address', sa.String(length=18), nullable=True),
sa.Column('node_id', sa.Integer(), nullable=True),
sa.Column('extra', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['node_id'], ['nodes.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('address', name='uniq_ports0address'),
sa.UniqueConstraint('uuid', name='uniq_ports0uuid'),
mysql_ENGINE='InnoDB',
mysql_DEFAULT_CHARSET='UTF8'
)
### end Alembic commands ###
def downgrade():
raise NotImplementedError(('Downgrade from initial migration is'
' unsupported.'))

View File

@ -1,22 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.versioning.shell import main
if __name__ == '__main__':
main(debug='False', repository='.')

View File

@ -1,20 +0,0 @@
[db_settings]
# Used to identify which repository this database is versioned under.
# You can use the name of your project.
repository_id=ironic
# The name of the database table used to track the schema version.
# This name shouldn't already be used by your project.
# If this is changed once a database is under version control, you'll need to
# change the table name in each database too.
version_table=migrate_version
# When committing a change script, Migrate will attempt to generate the
# sql for all supported databases; normally, if one of them fails - probably
# because you don't have that database installed - it is ignored and the
# commit continues, perhaps ending successfully.
# Databases in this list MUST compile successfully during a commit, or the
# entire commit will fail. List the databases your application will actually
# be using to ensure your updates to that database work properly.
# This must be a list; example: ['postgres','sqlite']
required_dbs=[]

View File

@ -1,96 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.changeset import UniqueConstraint
from sqlalchemy import Table, Column, Index, ForeignKey, MetaData
from sqlalchemy import DateTime, Integer, String, Text
from ironic.openstack.common import log as logging
LOG = logging.getLogger(__name__)
ENGINE = 'InnoDB'
CHARSET = 'utf8'
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
nodes = Table('nodes', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(length=36)),
Column('power_info', Text),
Column('cpu_arch', String(length=10)),
Column('cpu_num', Integer),
Column('memory', Integer),
Column('local_storage_max', Integer),
Column('task_state', String(length=255)),
Column('image_path', String(length=255), nullable=True),
Column('instance_uuid', String(length=36), nullable=True),
Column('instance_name', String(length=255), nullable=True),
Column('extra', Text),
Column('created_at', DateTime),
Column('updated_at', DateTime),
mysql_engine=ENGINE,
mysql_charset=CHARSET,
)
ifaces = Table('ifaces', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('address', String(length=18)),
Column('node_id', Integer, ForeignKey('nodes.id'),
nullable=True),
Column('extra', Text),
Column('created_at', DateTime),
Column('updated_at', DateTime),
mysql_engine=ENGINE,
mysql_charset=CHARSET,
)
tables = [nodes, ifaces]
for table in tables:
try:
table.create()
except Exception:
LOG.info(repr(table))
LOG.exception(_('Exception while creating table.'))
raise
indexes = [
Index('node_cpu_mem_disk', nodes.c.cpu_num,
nodes.c.memory, nodes.c.local_storage_max),
Index('node_instance_uuid', nodes.c.instance_uuid),
]
uniques = [
UniqueConstraint('uuid', table=nodes,
name='node_uuid_ux'),
UniqueConstraint('address', table=ifaces,
name='iface_address_ux'),
]
if migrate_engine.name == 'mysql' or migrate_engine.name == 'postgresql':
for index in indexes:
index.create(migrate_engine)
for index in uniques:
index.create(migrate_engine)
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from Folsom is unsupported.')

View File

@ -1,61 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Table, Column, MetaData
from sqlalchemy import DateTime, Integer, String, Text
from ironic.openstack.common import log as logging
LOG = logging.getLogger(__name__)
ENGINE = 'InnoDB'
CHARSET = 'utf8'
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
nodes = Table('nodes', meta, autoload=True)
chassis_id = Column('chassis_id', Integer, nullable=True)
task_start = Column('task_start', DateTime, nullable=True)
properties = Column('properties', Text)
control_driver = Column('control_driver', String(15))
control_info = Column('control_info', Text)
deploy_driver = Column('deploy_driver', String(15))
deploy_info = Column('deploy_info', Text)
reservation = Column('reservation', String(255), nullable=True)
new_cols = [chassis_id, task_start, properties, reservation,
control_driver, control_info, deploy_driver, deploy_info]
cols_to_delete = ['power_info', 'cpu_arch', 'cpu_num', 'memory',
'local_storage_max', 'image_path', 'instance_name']
for col in cols_to_delete:
getattr(nodes.c, col).drop()
for col in new_cols:
nodes.create_column(col)
task_state = getattr(nodes.c, 'task_state')
task_state.alter(String(15))
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from version 002 is unsupported.')

View File

@ -1,38 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Table, MetaData, Column, Integer, String
ENGINE = 'InnoDB'
CHARSET = 'utf8'
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
table = Table('chassis', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(length=36)),
mysql_engine=ENGINE,
mysql_charset=CHARSET,
)
table.create()
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from version 004 is unsupported.')

View File

@ -1,33 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Table, MetaData
ENGINE = 'InnoDB'
CHARSET = 'utf8'
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
table = Table('ifaces', meta, autoload=True)
table.rename('ports')
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from version 003 is unsupported.')

View File

@ -1,34 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Table, MetaData
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
nodes = Table('nodes', meta, autoload=True)
nodes.c.deploy_driver.drop()
nodes.c.deploy_info.drop()
nodes.c.control_driver.alter(name='driver')
nodes.c.control_info.alter(name='driver_info')
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from version 005 is unsupported.')

View File

@ -1,32 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Table, Column, String, MetaData
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
ports = Table('ports', meta, autoload=True)
col = Column('uuid', String(36), unique=True)
ports.create_column(col, unique_name="port_uuid_ux")
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from version 006 is unsupported.')

View File

@ -1,31 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Table, Column, Text, DateTime, MetaData
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
chassis = Table('chassis', meta, autoload=True)
chassis.create_column(Column('extra', Text))
chassis.create_column(Column('created_at', DateTime))
chassis.create_column(Column('updated_at', DateTime))
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from version 007 is unsupported.')

View File

@ -1,29 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Table, Column, MetaData, String
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
chassis = Table('chassis', meta, autoload=True)
chassis.create_column(Column('description', String(255), nullable=True))
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from version 008 is unsupported.')

View File

@ -1,39 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Table, Column, MetaData, String
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
nodes = Table('nodes', meta, autoload=True)
# Drop task_* columns
nodes.c.task_start.drop()
nodes.c.task_state.drop()
# Create new states columns
nodes.create_column(Column('power_state', String(15), nullable=True))
nodes.create_column(Column('target_power_state', String(15),
nullable=True))
nodes.create_column(Column('provision_state', String(15), nullable=True))
nodes.create_column(Column('target_provision_state', String(15),
nullable=True))
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from version 009 is unsupported.')

View File

@ -1,31 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.changeset import ForeignKeyConstraint
from sqlalchemy import MetaData, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
nodes = Table('nodes', meta, autoload=True)
chassis = Table('chassis', meta, autoload=True)
f_key = ForeignKeyConstraint([nodes.c.chassis_id], [chassis.c.id])
f_key.create()
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from version 010 is unsupported.')

View File

@ -1,30 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.changeset import UniqueConstraint
from sqlalchemy import MetaData, Table
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
t = Table('chassis', meta, autoload=True)
# NOTE: new name convention for UC
uc = UniqueConstraint('uuid', table=t, name='uniq_chassis0uuid')
uc.create()
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from version 011 is unsupported.')

View File

@ -1,56 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.changeset import UniqueConstraint
from sqlalchemy import MetaData, Table, Column, Integer, String, Text, DateTime
from ironic.openstack.common import log as logging
LOG = logging.getLogger(__name__)
ENGINE = 'InnoDB'
CHARSET = 'utf8'
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
conductor = Table('conductors', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('hostname', String(length=255), nullable=False),
Column('drivers', Text),
Column('created_at', DateTime),
Column('updated_at', DateTime),
mysql_engine=ENGINE,
mysql_charset=CHARSET,
)
try:
conductor.create()
except Exception:
LOG.info(repr(conductor))
LOG.exception(_('Exception while creating table.'))
raise
uc = UniqueConstraint('hostname',
table=conductor,
name='uniq_conductors0hostname')
uc.create()
def downgrade(migrate_engine):
raise NotImplementedError(_('Downgrade from version 012 is unsupported.'))

View File

@ -1,30 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Table, Column, MetaData, Text
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
nodes = Table('nodes', meta, autoload=True)
# Create new last_error column
nodes.create_column(Column('last_error', Text, nullable=True))
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from version 013 is unsupported.')

View File

@ -1,37 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.changeset import UniqueConstraint
from sqlalchemy import MetaData, Table
def upgrade(migrate_engine):
if migrate_engine.name == 'sqlite':
meta = MetaData(bind=migrate_engine)
ports = Table('ports', meta, autoload=True)
uniques = (
UniqueConstraint('address', table=ports, name='iface_address_ux'),
# NOTE(yuriyz): this migration can drop first UC in 'ports' table
# for sqlite backend (sqlalchemy-migrate bug), recreate it
UniqueConstraint('uuid', table=ports, name='port_uuid_ux')
)
for uc in uniques:
uc.create()
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from version 014 is unsupported.')

View File

@ -1,28 +0,0 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Table, Column, MetaData, Boolean
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
nodes = Table('nodes', meta, autoload=True)
nodes.create_column(Column('maintenance', Boolean, default=False))
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade from version 015 is unsupported.')

View File

@ -18,70 +18,75 @@
import os import os
import sqlalchemy import alembic
from alembic import config as alembic_config
import alembic.migration as alembic_migration
from ironic.common import exception
from ironic.db import migration
from ironic.openstack.common.db.sqlalchemy import session as db_session from ironic.openstack.common.db.sqlalchemy import session as db_session
# NOTE(jkoelker) Delay importing migrate until we are patched def _alembic_config():
from migrate import exceptions as versioning_exceptions path = os.path.join(os.path.dirname(__file__), 'alembic.ini')
from migrate.versioning import api as versioning_api config = alembic_config.Config(path)
from migrate.versioning.repository import Repository return config
_REPOSITORY = None
get_engine = db_session.get_engine
def db_sync(version=None): def version(config=None):
if version is not None: """Current database version.
try:
version = int(version)
except ValueError:
raise exception.IronicException(_("version should be an integer"))
current_version = db_version() :returns: Database version
repository = _find_migrate_repo() :rtype: string
if version is None or version > current_version: """
return versioning_api.upgrade(get_engine(), repository, version) engine = db_session.get_engine()
else: with engine.connect() as conn:
return versioning_api.downgrade(get_engine(), repository, context = alembic_migration.MigrationContext.configure(conn)
version) return context.get_current_revision()
def db_version(): def upgrade(revision, config=None):
repository = _find_migrate_repo() """Used for upgrading database.
try:
return versioning_api.db_version(get_engine(), repository) :param version: Desired database version
except versioning_exceptions.DatabaseNotControlledError: :type version: string
meta = sqlalchemy.MetaData() """
engine = get_engine() revision = revision or 'head'
meta.reflect(bind=engine) config = config or _alembic_config()
tables = meta.tables
if len(tables) == 0: alembic.command.upgrade(config, revision or 'head')
db_version_control(migration.INIT_VERSION)
return versioning_api.db_version(get_engine(), repository)
else:
# Some pre-Essex DB's may not be version controlled.
# Require them to upgrade using Essex first.
raise exception.IronicException(
_("Upgrade DB using Essex release first."))
def db_version_control(version=None): def downgrade(revision, config=None):
repository = _find_migrate_repo() """Used for downgrading database.
versioning_api.version_control(get_engine(), repository, version)
return version :param version: Desired database version
:type version: string
"""
revision = revision or 'base'
config = config or _alembic_config()
return alembic.command.downgrade(config, revision)
def _find_migrate_repo(): def stamp(revision, config=None):
"""Get the path for the migrate repository.""" """Stamps database with provided revision.
global _REPOSITORY Dont run any migrations.
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo') :param revision: Should match one from repository or head - to stamp
assert os.path.exists(path) database with most recent revision
if _REPOSITORY is None: :type revision: string
_REPOSITORY = Repository(path) """
return _REPOSITORY config = config or _alembic_config()
return alembic.command.stamp(config, revision=revision)
def revision(message=None, autogenerate=False, config=None):
"""Creates template for migration.
:param message: Text that will be used for migration title
:type message: string
:param autogenerate: If True - generates diff based on current database
state
:type autogenerate: bool
"""
config = config or _alembic_config()
return alembic.command.revision(config, message=message,
autogenerate=autogenerate)

View File

@ -110,7 +110,7 @@ class Node(Base):
__tablename__ = 'nodes' __tablename__ = 'nodes'
__table_args__ = ( __table_args__ = (
schema.UniqueConstraint('uuid', name='node_uuid_ux'), schema.UniqueConstraint('uuid', name='uniq_nodes0uuid'),
Index('node_instance_uuid', 'instance_uuid')) Index('node_instance_uuid', 'instance_uuid'))
id = Column(Integer, primary_key=True) id = Column(Integer, primary_key=True)
uuid = Column(String(36)) uuid = Column(String(36))
@ -137,8 +137,8 @@ class Port(Base):
__tablename__ = 'ports' __tablename__ = 'ports'
__table_args__ = ( __table_args__ = (
schema.UniqueConstraint('address', name='iface_address_ux'), schema.UniqueConstraint('address', name='uniq_ports0address'),
schema.UniqueConstraint('uuid', name='port_uuid_ux')) schema.UniqueConstraint('uuid', name='uniq_ports0uuid'))
id = Column(Integer, primary_key=True) id = Column(Integer, primary_key=True)
uuid = Column(String(36)) uuid = Column(String(36))
address = Column(String(18)) address = Column(String(18))

View File

@ -36,7 +36,8 @@ import testtools
from oslo.config import cfg from oslo.config import cfg
from ironic.db import migration from ironic.db.sqlalchemy import migration
from ironic.db.sqlalchemy import models
from ironic.common import paths from ironic.common import paths
from ironic.objects import base as objects_base from ironic.objects import base as objects_base
@ -78,13 +79,14 @@ class Database(fixtures.Fixture):
self.engine.dispose() self.engine.dispose()
conn = self.engine.connect() conn = self.engine.connect()
if sql_connection == "sqlite://": if sql_connection == "sqlite://":
if db_migrate.db_version() > db_migrate.INIT_VERSION: self.setup_sqlite(db_migrate)
return elif sql_connection.startswith('sqlite:///'):
else:
testdb = paths.state_path_rel(sqlite_db) testdb = paths.state_path_rel(sqlite_db)
if os.path.exists(testdb): if os.path.exists(testdb):
return return
db_migrate.db_sync() self.setup_sqlite(db_migrate)
else:
db_migrate.upgrade('head')
self.post_migrations() self.post_migrations()
if sql_connection == "sqlite://": if sql_connection == "sqlite://":
conn = self.engine.connect() conn = self.engine.connect()
@ -94,6 +96,12 @@ class Database(fixtures.Fixture):
cleandb = paths.state_path_rel(sqlite_clean_db) cleandb = paths.state_path_rel(sqlite_clean_db)
shutil.copyfile(testdb, cleandb) shutil.copyfile(testdb, cleandb)
def setup_sqlite(self, db_migrate):
if db_migrate.version():
return
models.Base.metadata.create_all(self.engine)
db_migrate.stamp('head')
def setUp(self): def setUp(self):
super(Database, self).setUp() super(Database, self).setUp()
@ -104,6 +112,7 @@ class Database(fixtures.Fixture):
else: else:
shutil.copyfile(paths.state_path_rel(self.sqlite_clean_db), shutil.copyfile(paths.state_path_rel(self.sqlite_clean_db),
paths.state_path_rel(self.sqlite_db)) paths.state_path_rel(self.sqlite_db))
self.addCleanup(os.unlink, self.sqlite_db)
def post_migrations(self): def post_migrations(self):
"""Any addition steps that are needed outside of the migrations.""" """Any addition steps that are needed outside of the migrations."""

View File

@ -1,7 +1,6 @@
[DEFAULT] [DEFAULT]
# Set up any number of migration data stores you want, one # Set up any number of migration data stores you want, one
# The "name" used in the test is the config variable key. # The "name" used in the test is the config variable key.
#sqlite=sqlite:///test_migrations.db # sqlite migrations not supported by alembic
sqlite=sqlite://
#mysql=mysql://root:@localhost/test_migrations #mysql=mysql://root:@localhost/test_migrations
#postgresql=postgresql://user:pass@localhost/test_migrations #postgresql=postgresql://user:pass@localhost/test_migrations

View File

@ -41,21 +41,21 @@ postgres=# create database openstack_citest with owner openstack_citest;
""" """
import ConfigParser import ConfigParser
import contextlib
import fixtures import fixtures
import os import os
import subprocess import subprocess
import urlparse import urlparse
from migrate.versioning import repository from alembic import script
import mock import mock
import sqlalchemy import sqlalchemy
import sqlalchemy.exc import sqlalchemy.exc
from ironic.openstack.common.db.sqlalchemy import utils as db_utils from ironic.db.sqlalchemy import migration
from ironic.openstack.common.db.sqlalchemy import session
from ironic.openstack.common import lockutils from ironic.openstack.common import lockutils
from ironic.openstack.common import log as logging from ironic.openstack.common import log as logging
import ironic.db.sqlalchemy.migrate_repo
from ironic.tests import base from ironic.tests import base
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@ -76,7 +76,7 @@ def _get_connect_string(backend, user, passwd, database):
return ("%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s" return ("%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s"
% {'backend': backend, 'user': user, 'passwd': passwd, % {'backend': backend, 'user': user, 'passwd': passwd,
'database': database}) 'database': database})
def _is_backend_avail(backend, user, passwd, database): def _is_backend_avail(backend, user, passwd, database):
@ -122,6 +122,18 @@ def get_db_connection_info(conn_pieces):
return (user, password, database, host) return (user, password, database, host)
@contextlib.contextmanager
def patch_with_engine(engine):
with mock.patch(('ironic.openstack.common.db'
'.sqlalchemy.session.get_engine')) as patch_migration:
with mock.patch(('ironic.db.sqlalchemy.migration'
'.db_session.get_engine')) as patch_env:
patch_migration.return_value = engine
patch_env.return_value = engine
yield
class BaseMigrationTestCase(base.TestCase): class BaseMigrationTestCase(base.TestCase):
"""Base class fort testing of migration utils.""" """Base class fort testing of migration utils."""
@ -142,7 +154,6 @@ class BaseMigrationTestCase(base.TestCase):
# Load test databases from the config file. Only do this # Load test databases from the config file. Only do this
# once. No need to re-run this on each test... # once. No need to re-run this on each test...
LOG.debug(_('config_path is %s') % self.CONFIG_FILE_PATH)
if os.path.exists(self.CONFIG_FILE_PATH): if os.path.exists(self.CONFIG_FILE_PATH):
cp = ConfigParser.RawConfigParser() cp = ConfigParser.RawConfigParser()
try: try:
@ -159,7 +170,7 @@ class BaseMigrationTestCase(base.TestCase):
self.engines = {} self.engines = {}
for key, value in self.test_databases.items(): for key, value in self.test_databases.items():
self.engines[key] = sqlalchemy.create_engine(value) self.engines[key] = session.create_engine(value)
# We start each test case with a completely blank slate. # We start each test case with a completely blank slate.
self.temp_dir = self.useFixture(fixtures.TempDir()) self.temp_dir = self.useFixture(fixtures.TempDir())
@ -235,65 +246,49 @@ class BaseMigrationTestCase(base.TestCase):
class WalkVersionsMixin(object): class WalkVersionsMixin(object):
def _walk_versions(self, engine=None, snake_walk=False, downgrade=True): def _walk_versions(self, engine=None, alembic_cfg=None, downgrade=True):
# Determine latest version script from the repo, then # Determine latest version script from the repo, then
# upgrade from 1 through to the latest, with no data # upgrade from 1 through to the latest, with no data
# in the databases. This just checks that the schema itself # in the databases. This just checks that the schema itself
# upgrades successfully. # upgrades successfully.
# Place the database under version control # Place the database under version control
self.migration_api.version_control(engine, self.REPOSITORY, with patch_with_engine(engine):
self.INIT_VERSION)
self.assertEqual(self.INIT_VERSION,
self.migration_api.db_version(engine,
self.REPOSITORY))
LOG.debug(_('latest version is %s') % self.REPOSITORY.latest) script_directory = script.ScriptDirectory.from_config(alembic_cfg)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
for version in versions: self.assertIsNone(self.migration_api.version(alembic_cfg))
# upgrade -> downgrade -> upgrade
self._migrate_up(engine, version, with_data=True)
if snake_walk:
downgraded = self._migrate_down(
engine, version - 1, with_data=True)
if downgraded:
self._migrate_up(engine, version)
if downgrade: for version in script_directory.walk_revisions():
# Now walk it back down to 0 from the latest, testing self._migrate_up(engine, alembic_cfg,
# the downgrade paths. version.revision, with_data=True)
for version in reversed(versions):
# downgrade -> upgrade -> downgrade
downgraded = self._migrate_down(engine, version - 1)
if snake_walk and downgraded: if downgrade:
self._migrate_up(engine, version) for version in reversed(script_directory.walk_revisions()):
self._migrate_down(engine, version - 1) self._migrate_down(engine, alembic_cfg, version.revision)
def _migrate_down(self, engine, version, with_data=False): def _migrate_down(self, engine, config, version, with_data=False):
try: try:
self.migration_api.downgrade(engine, self.REPOSITORY, version) self.migration_api.downgrade(version, config=config)
except NotImplementedError: except NotImplementedError:
# NOTE(sirp): some migrations, namely release-level # NOTE(sirp): some migrations, namely release-level
# migrations, don't support a downgrade. # migrations, don't support a downgrade.
return False return False
self.assertEqual( self.assertEqual(version, self.migration_api.version(config))
version, self.migration_api.db_version(engine, self.REPOSITORY))
# NOTE(sirp): `version` is what we're downgrading to (i.e. the 'target' # NOTE(sirp): `version` is what we're downgrading to (i.e. the 'target'
# version). So if we have any downgrade checks, they need to be run for # version). So if we have any downgrade checks, they need to be run for
# the previous (higher numbered) migration. # the previous (higher numbered) migration.
if with_data: if with_data:
post_downgrade = getattr( post_downgrade = getattr(
self, "_post_downgrade_%03d" % (version + 1), None) self, "_post_downgrade_%s" % (version), None)
if post_downgrade: if post_downgrade:
post_downgrade(engine) post_downgrade(engine)
return True return True
def _migrate_up(self, engine, version, with_data=False): def _migrate_up(self, engine, config, version, with_data=False):
"""migrate up to a new version of the db. """migrate up to a new version of the db.
We allow for data insertion and post checks at every We allow for data insertion and post checks at every
@ -306,16 +301,14 @@ class WalkVersionsMixin(object):
if with_data: if with_data:
data = None data = None
pre_upgrade = getattr( pre_upgrade = getattr(
self, "_pre_upgrade_%03d" % version, None) self, "_pre_upgrade_%s" % version, None)
if pre_upgrade: if pre_upgrade:
data = pre_upgrade(engine) data = pre_upgrade(engine)
self.migration_api.upgrade(engine, self.REPOSITORY, version) self.migration_api.upgrade(version, config=config)
self.assertEqual(version, self.assertEqual(version, self.migration_api.version(config))
self.migration_api.db_version(engine,
self.REPOSITORY))
if with_data: if with_data:
check = getattr(self, "_check_%03d" % version, None) check = getattr(self, "_check_%s" % version, None)
if check: if check:
check(engine, data) check(engine, data)
except Exception: except Exception:
@ -329,131 +322,83 @@ class TestWalkVersions(base.TestCase, WalkVersionsMixin):
super(TestWalkVersions, self).setUp() super(TestWalkVersions, self).setUp()
self.migration_api = mock.MagicMock() self.migration_api = mock.MagicMock()
self.engine = mock.MagicMock() self.engine = mock.MagicMock()
self.REPOSITORY = mock.MagicMock() self.config = mock.MagicMock()
self.INIT_VERSION = 4 self.versions = [mock.Mock(revision='1a1'), mock.Mock(revision='2b2')]
def test_migrate_up(self): def test_migrate_up(self):
self.migration_api.db_version.return_value = 141 self.migration_api.version.return_value = 'dsa123'
self._migrate_up(self.engine, 141) self._migrate_up(self.engine, self.config, 'dsa123')
self.migration_api.upgrade.assert_called_with( self.migration_api.upgrade.assert_called_with('dsa123',
self.engine, self.REPOSITORY, 141) config=self.config)
self.migration_api.db_version.assert_called_with( self.migration_api.version.assert_called_with(self.config)
self.engine, self.REPOSITORY)
def test_migrate_up_with_data(self): def test_migrate_up_with_data(self):
test_value = {"a": 1, "b": 2} test_value = {"a": 1, "b": 2}
self.migration_api.db_version.return_value = 141 self.migration_api.version.return_value = '141'
self._pre_upgrade_141 = mock.MagicMock() self._pre_upgrade_141 = mock.MagicMock()
self._pre_upgrade_141.return_value = test_value self._pre_upgrade_141.return_value = test_value
self._check_141 = mock.MagicMock() self._check_141 = mock.MagicMock()
self._migrate_up(self.engine, 141, True) self._migrate_up(self.engine, self.config, '141', True)
self._pre_upgrade_141.assert_called_with(self.engine) self._pre_upgrade_141.assert_called_with(self.engine)
self._check_141.assert_called_with(self.engine, test_value) self._check_141.assert_called_with(self.engine, test_value)
def test_migrate_down(self): def test_migrate_down(self):
self.migration_api.db_version.return_value = 42 self.migration_api.version.return_value = '42'
self.assertTrue(self._migrate_down(self.engine, 42)) self.assertTrue(self._migrate_down(self.engine, self.config, '42'))
self.migration_api.db_version.assert_called_with( self.migration_api.version.assert_called_with(self.config)
self.engine, self.REPOSITORY)
def test_migrate_down_not_implemented(self): def test_migrate_down_not_implemented(self):
self.migration_api.downgrade.side_effect = NotImplementedError self.migration_api.downgrade.side_effect = NotImplementedError
self.assertFalse(self._migrate_down(self.engine, 42)) self.assertFalse(self._migrate_down(self.engine, self.config, '42'))
def test_migrate_down_with_data(self): def test_migrate_down_with_data(self):
self._post_downgrade_043 = mock.MagicMock() self._post_downgrade_043 = mock.MagicMock()
self.migration_api.db_version.return_value = 42 self.migration_api.version.return_value = '043'
self._migrate_down(self.engine, 42, True) self._migrate_down(self.engine, self.config, '043', True)
self._post_downgrade_043.assert_called_with(self.engine) self._post_downgrade_043.assert_called_with(self.engine)
@mock.patch.object(script, 'ScriptDirectory')
@mock.patch.object(WalkVersionsMixin, '_migrate_up') @mock.patch.object(WalkVersionsMixin, '_migrate_up')
@mock.patch.object(WalkVersionsMixin, '_migrate_down') @mock.patch.object(WalkVersionsMixin, '_migrate_down')
def test_walk_versions_all_default(self, _migrate_up, _migrate_down): def test_walk_versions_all_default(self, _migrate_up, _migrate_down,
self.REPOSITORY.latest = 20 script_directory):
self.migration_api.db_version.return_value = self.INIT_VERSION script_directory.from_config().\
walk_revisions.return_value = self.versions
self.migration_api.version.return_value = None
self._walk_versions() self._walk_versions(self.engine, self.config)
self.migration_api.version_control.assert_called_with( self.migration_api.version.assert_called_with(self.config)
None, self.REPOSITORY, self.INIT_VERSION)
self.migration_api.db_version.assert_called_with(
None, self.REPOSITORY)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1) upgraded = [mock.call(self.engine, self.config,
upgraded = [mock.call(None, v, with_data=True) for v in versions] v.revision, with_data=True) for v in self.versions]
self.assertEqual(self._migrate_up.call_args_list, upgraded) self.assertEqual(self._migrate_up.call_args_list, upgraded)
downgraded = [mock.call(None, v - 1) for v in reversed(versions)] downgraded = [mock.call(self.engine, self.config, v.revision)
for v in reversed(self.versions)]
self.assertEqual(self._migrate_down.call_args_list, downgraded) self.assertEqual(self._migrate_down.call_args_list, downgraded)
@mock.patch.object(script, 'ScriptDirectory')
@mock.patch.object(WalkVersionsMixin, '_migrate_up') @mock.patch.object(WalkVersionsMixin, '_migrate_up')
@mock.patch.object(WalkVersionsMixin, '_migrate_down') @mock.patch.object(WalkVersionsMixin, '_migrate_down')
def test_walk_versions_all_true(self, _migrate_up, _migrate_down): def test_walk_versions_all_false(self, _migrate_up, _migrate_down,
self.REPOSITORY.latest = 20 script_directory):
self.migration_api.db_version.return_value = self.INIT_VERSION script_directory.from_config().\
walk_revisions.return_value = self.versions
self.migration_api.version.return_value = None
self._walk_versions(self.engine, snake_walk=True, downgrade=True) self._walk_versions(self.engine, self.config, downgrade=False)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
upgraded = []
for v in versions:
upgraded.append(mock.call(self.engine, v, with_data=True))
upgraded.append(mock.call(self.engine, v))
upgraded.extend(
[mock.call(self.engine, v) for v in reversed(versions)]
)
self.assertEqual(upgraded, self._migrate_up.call_args_list)
downgraded_1 = [
mock.call(self.engine, v - 1, with_data=True) for v in versions
]
downgraded_2 = []
for v in reversed(versions):
downgraded_2.append(mock.call(self.engine, v - 1))
downgraded_2.append(mock.call(self.engine, v - 1))
downgraded = downgraded_1 + downgraded_2
self.assertEqual(self._migrate_down.call_args_list, downgraded)
@mock.patch.object(WalkVersionsMixin, '_migrate_up')
@mock.patch.object(WalkVersionsMixin, '_migrate_down')
def test_walk_versions_true_false(self, _migrate_up, _migrate_down):
self.REPOSITORY.latest = 20
self.migration_api.db_version.return_value = self.INIT_VERSION
self._walk_versions(self.engine, snake_walk=True, downgrade=False)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
upgraded = []
for v in versions:
upgraded.append(mock.call(self.engine, v, with_data=True))
upgraded.append(mock.call(self.engine, v))
self.assertEqual(upgraded, self._migrate_up.call_args_list)
downgraded = [
mock.call(self.engine, v - 1, with_data=True) for v in versions
]
self.assertEqual(self._migrate_down.call_args_list, downgraded)
@mock.patch.object(WalkVersionsMixin, '_migrate_up')
@mock.patch.object(WalkVersionsMixin, '_migrate_down')
def test_walk_versions_all_false(self, _migrate_up, _migrate_down):
self.REPOSITORY.latest = 20
self.migration_api.db_version.return_value = self.INIT_VERSION
self._walk_versions(self.engine, snake_walk=False, downgrade=False)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
upgraded = [ upgraded = [
mock.call(self.engine, v, with_data=True) for v in versions mock.call(self.engine, self.config,
v.revision, with_data=True) for v in self.versions
] ]
self.assertEqual(upgraded, self._migrate_up.call_args_list) self.assertEqual(upgraded, self._migrate_up.call_args_list)
@ -466,20 +411,11 @@ class TestMigrations(BaseMigrationTestCase, WalkVersionsMixin):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(TestMigrations, self).__init__(*args, **kwargs) super(TestMigrations, self).__init__(*args, **kwargs)
self.MIGRATE_FILE = ironic.db.sqlalchemy.migrate_repo.__file__
self.REPOSITORY = repository.Repository(
os.path.abspath(os.path.dirname(self.MIGRATE_FILE)))
def setUp(self): def setUp(self):
super(TestMigrations, self).setUp() super(TestMigrations, self).setUp()
self.config = migration._alembic_config()
self.migration = __import__('ironic.db.migration',
globals(), locals(), ['INIT_VERSION'], -1)
self.INIT_VERSION = self.migration.INIT_VERSION
if self.migration_api is None: if self.migration_api is None:
temp = __import__('ironic.db.sqlalchemy.migration', self.migration_api = migration
globals(), locals(), ['versioning_api'], -1)
self.migration_api = temp.versioning_api
def _test_mysql_opportunistically(self): def _test_mysql_opportunistically(self):
# Test that table creation on mysql only builds InnoDB tables # Test that table creation on mysql only builds InnoDB tables
@ -497,7 +433,7 @@ class TestMigrations(BaseMigrationTestCase, WalkVersionsMixin):
# build a fully populated mysql database with all the tables # build a fully populated mysql database with all the tables
self._reset_databases() self._reset_databases()
self._walk_versions(engine, False, False) self._walk_versions(engine, self.config, downgrade=False)
connection = engine.connect() connection = engine.connect()
# sanity check # sanity check
@ -510,7 +446,7 @@ class TestMigrations(BaseMigrationTestCase, WalkVersionsMixin):
"from information_schema.TABLES " "from information_schema.TABLES "
"where TABLE_SCHEMA='%s' " "where TABLE_SCHEMA='%s' "
"and ENGINE!='InnoDB' " "and ENGINE!='InnoDB' "
"and TABLE_NAME!='migrate_version'" % "and TABLE_NAME!='alembic_version'" %
database) database)
count = noninnodb.scalar() count = noninnodb.scalar()
self.assertEqual(count, 0, "%d non InnoDB tables created" % count) self.assertEqual(count, 0, "%d non InnoDB tables created" % count)
@ -523,20 +459,20 @@ class TestMigrations(BaseMigrationTestCase, WalkVersionsMixin):
# add this to the global lists to make reset work with it, it's removed # add this to the global lists to make reset work with it, it's removed
# automatically during Cleanup so no need to clean it up here. # automatically during Cleanup so no need to clean it up here.
connect_string = _get_connect_string("postgres", self.USER, connect_string = _get_connect_string("postgres", self.USER,
self.PASSWD, self.DATABASE) self.PASSWD, self.DATABASE)
engine = sqlalchemy.create_engine(connect_string) engine = sqlalchemy.create_engine(connect_string)
(user, password, database, host) = \ (user, password, database, host) = \
get_db_connection_info(urlparse.urlparse(connect_string)) get_db_connection_info(urlparse.urlparse(connect_string))
self.engines[database] = engine self.engines[database] = engine
self.test_databases[database] = connect_string self.test_databases[database] = connect_string
# build a fully populated postgresql database with all the tables # build a fully populated postgresql database with all the tables
self._reset_databases() self._reset_databases()
self._walk_versions(engine, False, False) self._walk_versions(engine, self.config, downgrade=False)
def test_walk_versions(self): def test_walk_versions(self):
for engine in self.engines.values(): for engine in self.engines.values():
self._walk_versions(engine, snake_walk=False, downgrade=False) self._walk_versions(engine, self.config, downgrade=False)
def test_mysql_opportunistically(self): def test_mysql_opportunistically(self):
self._test_mysql_opportunistically() self._test_mysql_opportunistically()
@ -563,206 +499,3 @@ class TestMigrations(BaseMigrationTestCase, WalkVersionsMixin):
if _is_backend_avail('postgres', "openstack_cifail", self.PASSWD, if _is_backend_avail('postgres', "openstack_cifail", self.PASSWD,
self.DATABASE): self.DATABASE):
self.fail("Shouldn't have connected") self.fail("Shouldn't have connected")
def _check_001(self, engine, data):
nodes = db_utils.get_table(engine, 'nodes')
nodes_col = {
'id': 'Integer', 'uuid': 'String', 'power_info': 'Text',
'cpu_arch': 'String', 'cpu_num': 'Integer', 'memory': 'Integer',
'local_storage_max': 'Integer', 'task_state': 'String',
'image_path': 'String', 'instance_uuid': 'String',
'instance_name': 'String', 'extra': 'Text',
'created_at': 'DateTime', 'updated_at': 'DateTime'
}
for col, coltype in nodes_col.items():
self.assertIsInstance(nodes.c[col].type,
getattr(sqlalchemy.types, coltype))
ifaces = db_utils.get_table(engine, 'ifaces')
ifaces_col = {
'id': 'Integer', 'address': 'String', 'node_id': 'Integer',
'extra': 'Text', 'created_at': 'DateTime', 'updated_at': 'DateTime'
}
for col, coltype in ifaces_col.items():
self.assertIsInstance(ifaces.c[col].type,
getattr(sqlalchemy.types, coltype))
fkey, = ifaces.c.node_id.foreign_keys
self.assertEqual(nodes.c.id.name, fkey.column.name)
self.assertEqual(fkey.column.table.name, 'nodes')
def _check_002(self, engine, data):
nodes = db_utils.get_table(engine, 'nodes')
new_col = {
'chassis_id': 'Integer', 'task_start': 'DateTime',
'properties': 'Text', 'control_driver': 'String',
'control_info': 'Text', 'deploy_driver': 'String',
'deploy_info': 'Text', 'reservation': 'String'
}
for col, coltype in new_col.items():
self.assertIsInstance(nodes.c[col].type,
getattr(sqlalchemy.types, coltype))
deleted_cols = ['power_info', 'cpu_arch', 'cpu_num', 'memory',
'local_storage_max', 'image_path', 'instance_name']
for column in nodes.c:
self.assertFalse(column.name in deleted_cols)
def _check_003(self, engine, data):
chassis = db_utils.get_table(engine, 'chassis')
self.assertIsInstance(chassis.c.id.type, sqlalchemy.types.Integer)
self.assertIsInstance(chassis.c.uuid.type, sqlalchemy.types.String)
def _check_004(self, engine, data):
self.assertTrue(engine.dialect.has_table(engine.connect(), 'ports'))
self.assertFalse(engine.dialect.has_table(engine.connect(), 'ifaces'))
def _check_005(self, engine, data):
nodes = db_utils.get_table(engine, 'nodes')
col_names = [column.name for column in nodes.c]
self.assertFalse('deploy_driver' in col_names)
self.assertFalse('deploy_info' in col_names)
self.assertTrue('driver' in col_names)
self.assertTrue('driver_info' in col_names)
def _check_006(self, engine, data):
ports = db_utils.get_table(engine, 'ports')
self.assertIsInstance(ports.c.uuid.type, sqlalchemy.types.String)
nodes = db_utils.get_table(engine, 'nodes')
nodes_data = {
'id': 1, 'uuid': 'uuu-111', 'driver': 'driver1',
'driver_info': 'info1', 'task_state': 'state1',
'extra': 'extra1'
}
nodes.insert().values(nodes_data).execute()
ports_data = {
'address': 'address0', 'node_id': 1, 'uuid': 'uuu-222',
'extra': 'extra2'
}
ports.insert().values(ports_data).execute()
self.assertRaises(
sqlalchemy.exc.IntegrityError,
ports.insert().execute,
{'address': 'address1', 'node_id': 1, 'uuid': 'uuu-222',
'extra': 'extra3'})
def _check_007(self, engine, data):
chassis = db_utils.get_table(engine, 'chassis')
new_col = {'extra': 'Text', 'created_at': 'DateTime',
'updated_at': 'DateTime'}
for col, coltype in new_col.items():
self.assertIsInstance(chassis.c[col].type,
getattr(sqlalchemy.types, coltype))
def _check_008(self, engine, data):
chassis = db_utils.get_table(engine, 'chassis')
self.assertIsInstance(chassis.c.description.type,
sqlalchemy.types.String)
def _check_009(self, engine, data):
nodes = db_utils.get_table(engine, 'nodes')
col_names = [column.name for column in nodes.c]
self.assertFalse('task_start' in col_names)
self.assertFalse('task_state' in col_names)
new_col = {'power_state': 'String',
'target_power_state': 'String',
'provision_state': 'String',
'target_provision_state': 'String'}
for col, coltype in new_col.items():
self.assertIsInstance(nodes.c[col].type,
getattr(sqlalchemy.types, coltype))
def _check_010(self, engine, data):
insp = sqlalchemy.engine.reflection.Inspector.from_engine(engine)
f_keys = insp.get_foreign_keys('nodes')
self.assertEqual(len(f_keys), 1)
f_key = f_keys[0]
self.assertEqual(f_key['referred_table'], 'chassis')
self.assertEqual(f_key['referred_columns'], ['id'])
self.assertEqual(f_key['constrained_columns'], ['chassis_id'])
def _check_011(self, engine, data):
chassis = db_utils.get_table(engine, 'chassis')
chassis_data = {'uuid': 'uuu-111-222', 'extra': 'extra1'}
chassis.insert().values(chassis_data).execute()
self.assertRaises(sqlalchemy.exc.IntegrityError,
chassis.insert().execute,
{'uuid': 'uuu-111-222', 'extra': 'extra2'})
def _check_012(self, engine, data):
self.assertTrue(engine.dialect.has_table(engine.connect(),
'conductors'))
conductor = db_utils.get_table(engine, 'conductors')
conductor_data = {'hostname': 'test-host'}
conductor.insert().values(conductor_data).execute()
self.assertRaises(sqlalchemy.exc.IntegrityError,
conductor.insert().execute,
conductor_data)
# NOTE(deva): different backends raise different error here.
if isinstance(engine.dialect,
sqlalchemy.dialects.sqlite.pysqlite.SQLiteDialect_pysqlite):
self.assertRaises(sqlalchemy.exc.IntegrityError,
conductor.insert().execute,
{'hostname': None})
if isinstance(engine.dialect,
sqlalchemy.dialects.mysql.pymysql.MySQLDialect_pymysql):
self.assertRaises(sqlalchemy.exc.OperationalError,
conductor.insert().execute,
{'hostname': None})
# FIXME: add check for postgres
def _pre_upgrade_013(self, engine):
nodes = db_utils.get_table(engine, 'nodes')
col_names = set(column.name for column in nodes.c)
self.assertFalse('last_error' in col_names)
return col_names
def _check_013(self, engine, col_names_pre):
nodes = db_utils.get_table(engine, 'nodes')
col_names = set(column.name for column in nodes.c)
# didn't lose any columns in the migration
self.assertEqual(col_names_pre, col_names.intersection(col_names_pre))
# only added one 'last_error' column
self.assertEqual(len(col_names_pre), len(col_names) - 1)
self.assertIsInstance(nodes.c['last_error'].type,
getattr(sqlalchemy.types, 'Text'))
def _check_014(self, engine, data):
if engine.name == 'sqlite':
ports = db_utils.get_table(engine, 'ports')
ports_data = {'address': 'BB:BB:AA:AA:AA:AA', 'extra': 'extra1'}
ports.insert().values(ports_data).execute()
self.assertRaises(sqlalchemy.exc.IntegrityError,
ports.insert().execute,
{'address': 'BB:BB:AA:AA:AA:AA',
'extra': 'extra2'})
# test recreate old UC
ports_data = {
'address': 'BB:BB:AA:AA:AA:BB',
'uuid': '1be26c0b-03f2-4d2e-ae87-c02d7f33c781',
'extra': 'extra2'}
ports.insert().values(ports_data).execute()
self.assertRaises(sqlalchemy.exc.IntegrityError,
ports.insert().execute,
{'address': 'CC:BB:AA:AA:AA:CC',
'uuid': '1be26c0b-03f2-4d2e-ae87-c02d7f33c781',
'extra': 'extra3'})
def _check_015(self, engine, data):
nodes = db_utils.get_table(engine, 'nodes')
col_names = [column.name for column in nodes.c]
self.assertIn('maintenance', col_names)
# in some backends bool type is integer
self.assertTrue(isinstance(nodes.c.maintenance.type,
sqlalchemy.types.Boolean) or
isinstance(nodes.c.maintenance.type,
sqlalchemy.types.Integer))

View File

@ -26,7 +26,7 @@ class DbSyncTestCase(base.DbTestCase):
def setUp(self): def setUp(self):
super(DbSyncTestCase, self).setUp() super(DbSyncTestCase, self).setUp()
def test_sync_and_version(self): def test_upgrade_and_version(self):
migration.db_sync() migration.upgrade('head')
v = migration.db_version() v = migration.version()
self.assertTrue(v > migration.INIT_VERSION) self.assertTrue(v)

View File

@ -1,5 +1,6 @@
pbr>=0.5.21,<1.0 pbr>=0.5.21,<1.0
SQLAlchemy>=0.7.8,<=0.8.99 SQLAlchemy>=0.7.8,<=0.8.99
alembic>=0.4.1
amqplib>=0.6.1 amqplib>=0.6.1
anyjson>=0.3.3 anyjson>=0.3.3
argparse argparse