Add DB2 support

Notes on migration changes/tests:

1. The initial havana migration does not create any foreign keys which
   require the instances.uuid column since it's nullable and DB2 won't
   allow creating a unique constraint over a nullable column. To create
   the foreign keys on instances.uuid, this change depends on commit
   e07a2b2d4b to make instances.uuid
   non-nullable and create a unique constraint on that column. Then
   the new migration here creates the missing instances.uuid
   related foreign keys for DB2.

2. Commit b930fb3a6b changed
   test_migrations.py to use oslo.db's opportunistic test fixture
   which currently only supports sqlite/mysql/postgresql. Unit test
   support for DB2 therefore needs to be added to oslo.db and then
   hooked into nova, but that's targeted for the 2016.1 'M' release
   so is not part of this change.

3. The 247_nullable_mismatch migration is updated for DB2 because
   making the pci_devices.deleted column nullable=True fails since
   the column is in a UniqueConstraint already (DB2 would require
   it to be in a unique index instead for the column to be nullable).
   There is a foreign key involved (created in 246) so for DB2 to
   have the same FKey it requires a UC and that requires the deleted
   column to be non-null, which doesn't work with the SoftDeleteMixin.
   So in this case the schema will diverge for DB2 until we make
   the deleted column non-nullable for all tables.

4. The 252_add_instance_extra_table migration is updated for DB2
   because a foreign key is created between the instance_extra
   table and the instances.uuid column (which is nullable at the
   time) so we have to handle that foreign key constraint creation
   in the 296 migration with the other foreign keys to
   instances.uuid.

5. The 271 migration is updated to add ibm_db_sa the same as
   sqlite and postgresql since it works the same in this case for
   DB2.

6. The API migrations need checks for DB2 to avoid a duplicate index
   SQL0605W error since DB2 implicitly creates an Index when a
   UniqueConstraint is created and will fail if trying to add a
   duplicate index on the same column(s) that are in the UC.

Implements blueprint db2-database

Change-Id: Ic4224e2545bcdfeb236b071642f9f16d9ee3b99f
This commit is contained in:
Matt Riedemann 2015-06-16 08:38:39 -07:00
parent 47b233f988
commit 9a84211729
10 changed files with 234 additions and 47 deletions

View File

@ -34,9 +34,14 @@ def upgrade(migrate_engine):
Column('transport_url', Text()),
Column('database_connection', Text()),
UniqueConstraint('uuid', name='uniq_cell_mappings0uuid'),
Index('uuid_idx', 'uuid'),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
# NOTE(mriedem): DB2 creates an index when a unique constraint is created
# so trying to add a second index on the uuid column will fail with
# error SQL0605W, so omit the index in the case of DB2.
if migrate_engine.name != 'ibm_db_sa':
Index('uuid_idx', cell_mappings.c.uuid)
cell_mappings.create(checkfirst=True)

View File

@ -35,11 +35,17 @@ def upgrade(migrate_engine):
Column('project_id', String(length=255), nullable=False),
UniqueConstraint('instance_uuid',
name='uniq_instance_mappings0instance_uuid'),
Index('instance_uuid_idx', 'instance_uuid'),
Index('project_id_idx', 'project_id'),
ForeignKeyConstraint(columns=['cell_id'],
refcolumns=[cell_mappings.c.id]),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
# NOTE(mriedem): DB2 creates an index when a unique constraint is created
# so trying to add a second index on the instance_uuid column will fail
# with error SQL0605W, so omit the index in the case of DB2.
if migrate_engine.name != 'ibm_db_sa':
Index('instance_uuid_idx', instance_mappings.c.instance_uuid)
instance_mappings.create(checkfirst=True)

View File

@ -34,10 +34,16 @@ def upgrade(migrate_engine):
Column('host', String(length=255), nullable=False),
UniqueConstraint('host',
name='uniq_host_mappings0host'),
Index('host_idx', 'host'),
ForeignKeyConstraint(columns=['cell_id'],
refcolumns=[cell_mappings.c.id]),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
# NOTE(mriedem): DB2 creates an index when a unique constraint is created
# so trying to add a second index on the host column will fail with error
# SQL0605W, so omit the index in the case of DB2.
if migrate_engine.name != 'ibm_db_sa':
Index('host_idx', host_mappings.c.host)
host_mappings.create(checkfirst=True)

View File

@ -328,6 +328,14 @@ def upgrade(migrate_engine):
mysql_charset='utf8'
)
# NOTE(mriedem): DB2 can't create the FK since we don't have the unique
# constraint on instances.uuid because it's nullable (so a unique
# constraint isn't created for instances.uuid, only a unique index).
consoles_instance_uuid_column_args = ['instance_uuid', String(length=36)]
if migrate_engine.name != 'ibm_db_sa':
consoles_instance_uuid_column_args.append(
ForeignKey('instances.uuid', name='consoles_instance_uuid_fkey'))
consoles = Table('consoles', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
@ -337,9 +345,7 @@ def upgrade(migrate_engine):
Column('password', String(length=255)),
Column('port', Integer),
Column('pool_id', Integer, ForeignKey('console_pools.id')),
Column('instance_uuid', String(length=36),
ForeignKey('instances.uuid',
name='consoles_instance_uuid_fkey')),
Column(*consoles_instance_uuid_column_args),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
@ -1455,39 +1461,30 @@ def upgrade(migrate_engine):
'block_device_mapping_instance_uuid_virtual_name_device_name_idx'
]
# NOTE(mriedem): DB2 doesn't allow duplicate indexes either.
DB2_INDEX_SKIPS = POSTGRES_INDEX_SKIPS
MYSQL_INDEX_SKIPS = [
# we create this one manually for MySQL above
'migrations_by_host_nodes_and_status_idx'
]
for index in common_indexes:
if migrate_engine.name == 'postgresql' and \
index.name in POSTGRES_INDEX_SKIPS:
continue
if migrate_engine.name == 'mysql' and \
index.name in MYSQL_INDEX_SKIPS:
if ((migrate_engine.name == 'postgresql' and
index.name in POSTGRES_INDEX_SKIPS) or
(migrate_engine.name == 'mysql' and
index.name in MYSQL_INDEX_SKIPS) or
(migrate_engine.name == 'ibm_db_sa' and
index.name in DB2_INDEX_SKIPS)):
continue
else:
index.create(migrate_engine)
Index('project_id', dns_domains.c.project_id).drop
# Common foreign keys
fkeys = [
[[fixed_ips.c.instance_uuid],
[instances.c.uuid],
'fixed_ips_instance_uuid_fkey'],
[[block_device_mapping.c.instance_uuid],
[instances.c.uuid],
'block_device_mapping_instance_uuid_fkey'],
[[instance_info_caches.c.instance_uuid],
[instances.c.uuid],
'instance_info_caches_instance_uuid_fkey'],
[[instance_metadata.c.instance_uuid],
[instances.c.uuid],
'instance_metadata_instance_uuid_fkey'],
[[instance_system_metadata.c.instance_uuid],
[instances.c.uuid],
'instance_system_metadata_ibfk_1'],
[[instance_type_projects.c.instance_type_id],
[instance_types.c.id],
'instance_type_projects_ibfk_1'],
@ -1497,35 +1494,65 @@ def upgrade(migrate_engine):
[[reservations.c.usage_id],
[quota_usages.c.id],
'reservations_ibfk_1'],
[[security_group_instance_association.c.instance_uuid],
[instances.c.uuid],
'security_group_instance_association_instance_uuid_fkey'],
[[security_group_instance_association.c.security_group_id],
[security_groups.c.id],
'security_group_instance_association_ibfk_1'],
[[virtual_interfaces.c.instance_uuid],
[instances.c.uuid],
'virtual_interfaces_instance_uuid_fkey'],
[[compute_node_stats.c.compute_node_id],
[compute_nodes.c.id],
'fk_compute_node_stats_compute_node_id'],
[[compute_nodes.c.service_id],
[services.c.id],
'fk_compute_nodes_service_id'],
[[instance_actions.c.instance_uuid],
[instances.c.uuid],
'fk_instance_actions_instance_uuid'],
[[instance_faults.c.instance_uuid],
[instances.c.uuid],
'fk_instance_faults_instance_uuid'],
[[migrations.c.instance_uuid],
[instances.c.uuid],
'fk_migrations_instance_uuid'],
]
# NOTE(mriedem): DB2 doesn't support unique constraints on columns that
# are nullable so we can only create foreign keys on unique constraints
# that actually exist, which excludes any FK on instances.uuid.
if migrate_engine.name != 'ibm_db_sa':
secgroup_instance_association_instance_uuid_fkey = (
'security_group_instance_association_instance_uuid_fkey')
fkeys.extend(
[
[[fixed_ips.c.instance_uuid],
[instances.c.uuid],
'fixed_ips_instance_uuid_fkey'],
[[block_device_mapping.c.instance_uuid],
[instances.c.uuid],
'block_device_mapping_instance_uuid_fkey'],
[[instance_info_caches.c.instance_uuid],
[instances.c.uuid],
'instance_info_caches_instance_uuid_fkey'],
[[instance_metadata.c.instance_uuid],
[instances.c.uuid],
'instance_metadata_instance_uuid_fkey'],
[[instance_system_metadata.c.instance_uuid],
[instances.c.uuid],
'instance_system_metadata_ibfk_1'],
[[security_group_instance_association.c.instance_uuid],
[instances.c.uuid],
secgroup_instance_association_instance_uuid_fkey],
[[virtual_interfaces.c.instance_uuid],
[instances.c.uuid],
'virtual_interfaces_instance_uuid_fkey'],
[[instance_actions.c.instance_uuid],
[instances.c.uuid],
'fk_instance_actions_instance_uuid'],
[[instance_faults.c.instance_uuid],
[instances.c.uuid],
'fk_instance_faults_instance_uuid'],
[[migrations.c.instance_uuid],
[instances.c.uuid],
'fk_migrations_instance_uuid']
])
for fkey_pair in fkeys:
if migrate_engine.name == 'mysql':
# For MySQL we name our fkeys explicitly so they match Havana
if migrate_engine.name in ('mysql', 'ibm_db_sa'):
# For MySQL and DB2 we name our fkeys explicitly
# so they match Havana
fkey = ForeignKeyConstraint(columns=fkey_pair[0],
refcolumns=fkey_pair[1],
name=fkey_pair[2])

View File

@ -24,7 +24,16 @@ def upgrade(migrate_engine):
quota_usages.c.resource.alter(nullable=False)
pci_devices = Table('pci_devices', meta, autoload=True)
pci_devices.c.deleted.alter(nullable=True)
# NOTE(mriedem): The deleted column is in a UniqueConstraint so making
# it nullable breaks DB2 with an SQL0542N error, so skip for DB2. There is
# a FKey in 246 so we're kind of stuck with DB2 since we can't create the
# FKey without the unique constraint and we can't have a unique constraint
# on a nullable column.
# TODO(mriedem): Revisit this once the deleted column (inherited from the
# SoftDeleteMixin in oslo.db) is non-null on all tables, which is going
# to be a non-trivial effort.
if migrate_engine.name != 'ibm_db_sa':
pci_devices.c.deleted.alter(nullable=True)
pci_devices.c.product_id.alter(nullable=False)
pci_devices.c.vendor_id.alter(nullable=False)
pci_devices.c.dev_type.alter(nullable=False)

View File

@ -52,7 +52,11 @@ def upgrade(migrate_engine):
instance_uuid_index.create(migrate_engine)
# Foreign key
if not prefix:
# NOTE(mriedem): DB2 won't create the ForeignKey over the
# instances.uuid column since it doesn't have a UniqueConstraint (added
# later in the 267 migration). The ForeignKey will be created for DB2
# in the 296 migration.
if not prefix and migrate_engine.name != 'ibm_db_sa':
fkey_columns = [table.c.instance_uuid]
fkey_refcolumns = [instances.c.uuid]
instance_fkey = ForeignKeyConstraint(

View File

@ -52,7 +52,7 @@ def upgrade(migrate_engine):
# the missing indexes in PostgreSQL and SQLite have conflicting names
# that MySQL allowed.
if migrate_engine.name in ('sqlite', 'postgresql'):
if migrate_engine.name in ('sqlite', 'postgresql', 'ibm_db_sa'):
for table_name, index_name, column_names in INDEXES:
ensure_index_exists(migrate_engine, table_name, index_name,
column_names)

View File

@ -0,0 +1,93 @@
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate import ForeignKeyConstraint
from sqlalchemy import MetaData, Table
DB2_FKEYS = [
# NOTE(mriedem): Added in 216.
{'table': 'fixed_ips',
'columns': ['instance_uuid'],
'refcolumns': ['instances.uuid'],
'name': 'fixed_ips_instance_uuid_fkey'},
{'table': 'block_device_mapping',
'columns': ['instance_uuid'],
'refcolumns': ['instances.uuid'],
'name': 'block_device_mapping_instance_uuid_fkey'},
{'table': 'instance_info_caches',
'columns': ['instance_uuid'],
'refcolumns': ['instances.uuid'],
'name': 'instance_info_caches_instance_uuid_fkey'},
{'table': 'instance_metadata',
'columns': ['instance_uuid'],
'refcolumns': ['instances.uuid'],
'name': 'instance_metadata_instance_uuid_fkey'},
{'table': 'instance_system_metadata',
'columns': ['instance_uuid'],
'refcolumns': ['instances.uuid'],
'name': 'instance_system_metadata_ibfk_1'},
{'table': 'security_group_instance_association',
'columns': ['instance_uuid'],
'refcolumns': ['instances.uuid'],
'name': 'security_group_instance_association_instance_uuid_fkey'},
{'table': 'virtual_interfaces',
'columns': ['instance_uuid'],
'refcolumns': ['instances.uuid'],
'name': 'virtual_interfaces_instance_uuid_fkey'},
{'table': 'instance_actions',
'columns': ['instance_uuid'],
'refcolumns': ['instances.uuid'],
'name': 'fk_instance_actions_instance_uuid'},
{'table': 'instance_faults',
'columns': ['instance_uuid'],
'refcolumns': ['instances.uuid'],
'name': 'fk_instance_faults_instance_uuid'},
{'table': 'migrations',
'columns': ['instance_uuid'],
'refcolumns': ['instances.uuid'],
'name': 'fk_migrations_instance_uuid'},
# NOTE(mriedem): Added in 252.
{'table': 'instance_extra',
'columns': ['instance_uuid'],
'refcolumns': ['instances.uuid'],
'name': 'fk_instance_extra_instance_uuid'}
]
def _get_refcolumns(metadata, refcolumns):
refcolumn_objects = []
for refcol in refcolumns:
table, column = refcol.split('.')
table = Table(table, metadata, autoload=True)
refcolumn_objects.append(table.c[column])
return refcolumn_objects
def upgrade(migrate_engine):
if migrate_engine.name == 'ibm_db_sa':
# create the foreign keys
metadata = MetaData(bind=migrate_engine)
for values in DB2_FKEYS:
# NOTE(mriedem): We have to load all of the tables in the same
# MetaData object for the ForeignKey object to work, so we just
# load up the Column objects here as well dynamically.
params = dict(name=values['name'])
table = Table(values['table'], metadata, autoload=True)
params['table'] = table
params['columns'] = [table.c[col] for col in values['columns']]
params['refcolumns'] = _get_refcolumns(metadata,
values['refcolumns'])
fkey = ForeignKeyConstraint(**params)
fkey.create()

View File

@ -332,7 +332,10 @@ class NovaMigrationsCheckers(test_migrations.ModelsMigrationsSync,
self.assertFalse(quota_usages.c.resource.nullable)
pci_devices = oslodbutils.get_table(engine, 'pci_devices')
self.assertTrue(pci_devices.c.deleted.nullable)
if engine.name == 'ibm_db_sa':
self.assertFalse(pci_devices.c.deleted.nullable)
else:
self.assertTrue(pci_devices.c.deleted.nullable)
self.assertFalse(pci_devices.c.product_id.nullable)
self.assertFalse(pci_devices.c.vendor_id.nullable)
self.assertFalse(pci_devices.c.dev_type.nullable)
@ -747,6 +750,14 @@ class NovaMigrationsCheckers(test_migrations.ModelsMigrationsSync,
self.assertIndexMembers(engine, 'virtual_interfaces',
'virtual_interfaces_uuid_idx', ['uuid'])
def _check_296(self, engine, data):
if engine.name == 'ibm_db_sa':
# Make sure the last FK in the list was created.
inspector = reflection.Inspector.from_engine(engine)
fkeys = inspector.get_foreign_keys('instance_extra')
fkey_names = [fkey['name'] for fkey in fkeys]
self.assertIn('fk_instance_extra_instance_uuid', fkey_names)
class TestNovaMigrationsSQLite(NovaMigrationsCheckers,
test_base.DbTestCase,

View File

@ -39,6 +39,10 @@ Run like:
./tools/db/schema_diff.py postgresql://localhost \
master:latest my_branch:82
DB2:
./tools/db/schema_diff.py ibm_db_sa://localhost \
master:latest my_branch:82
"""
from __future__ import print_function
@ -117,6 +121,28 @@ class Postgresql(object):
shell=True)
class Ibm_db_sa(object):
@classmethod
def db2cmd(cls, cmd):
"""Wraps a command to be run under the DB2 instance user."""
subprocess.check_call('su - $(db2ilist) -c "%s"' % cmd, shell=True)
def create(self, name):
self.db2cmd('db2 \'create database %s\'' % name)
def drop(self, name):
self.db2cmd('db2 \'drop database %s\'' % name)
def dump(self, name, dump_filename):
self.db2cmd('db2look -d %(name)s -e -o %(dump_filename)s' %
{'name': name, 'dump_filename': dump_filename})
# The output file gets dumped to the db2 instance user's home directory
# so we have to copy it back to our current working directory.
subprocess.check_call('cp /home/$(db2ilist)/%s ./' % dump_filename,
shell=True)
def _get_db_driver_class(db_url):
try:
return globals()[db_url.split('://')[0].capitalize()]