remove db2 support from tree
This removes db2 support from tree completely. This is an oddball non open database that made doing live data migrations difficult. It is used by 0% of users in the OpenStack User Survey. Supporting commercial software that doesn't have users at the cost of delivering features and fixes to our community is the wrong tradeoff. This corrects that. It rewrites migrations, which we typically don't ever do, but it is better if newton fresh environments fail early in db creation instead of very deep in the data migration process. Change-Id: Ifeb9929e4515e3483eb65d371126afd7672b92a4
This commit is contained in:
parent
813787644b
commit
cdf74c57a6
@ -34,14 +34,9 @@ def upgrade(migrate_engine):
|
|||||||
Column('transport_url', Text()),
|
Column('transport_url', Text()),
|
||||||
Column('database_connection', Text()),
|
Column('database_connection', Text()),
|
||||||
UniqueConstraint('uuid', name='uniq_cell_mappings0uuid'),
|
UniqueConstraint('uuid', name='uniq_cell_mappings0uuid'),
|
||||||
|
Index('uuid_idx', 'uuid'),
|
||||||
mysql_engine='InnoDB',
|
mysql_engine='InnoDB',
|
||||||
mysql_charset='utf8'
|
mysql_charset='utf8'
|
||||||
)
|
)
|
||||||
|
|
||||||
# NOTE(mriedem): DB2 creates an index when a unique constraint is created
|
|
||||||
# so trying to add a second index on the uuid column will fail with
|
|
||||||
# error SQL0605W, so omit the index in the case of DB2.
|
|
||||||
if migrate_engine.name != 'ibm_db_sa':
|
|
||||||
Index('uuid_idx', cell_mappings.c.uuid)
|
|
||||||
|
|
||||||
cell_mappings.create(checkfirst=True)
|
cell_mappings.create(checkfirst=True)
|
||||||
|
@ -35,6 +35,7 @@ def upgrade(migrate_engine):
|
|||||||
Column('project_id', String(length=255), nullable=False),
|
Column('project_id', String(length=255), nullable=False),
|
||||||
UniqueConstraint('instance_uuid',
|
UniqueConstraint('instance_uuid',
|
||||||
name='uniq_instance_mappings0instance_uuid'),
|
name='uniq_instance_mappings0instance_uuid'),
|
||||||
|
Index('instance_uuid_idx', 'instance_uuid'),
|
||||||
Index('project_id_idx', 'project_id'),
|
Index('project_id_idx', 'project_id'),
|
||||||
ForeignKeyConstraint(columns=['cell_id'],
|
ForeignKeyConstraint(columns=['cell_id'],
|
||||||
refcolumns=[cell_mappings.c.id]),
|
refcolumns=[cell_mappings.c.id]),
|
||||||
@ -42,10 +43,4 @@ def upgrade(migrate_engine):
|
|||||||
mysql_charset='utf8'
|
mysql_charset='utf8'
|
||||||
)
|
)
|
||||||
|
|
||||||
# NOTE(mriedem): DB2 creates an index when a unique constraint is created
|
|
||||||
# so trying to add a second index on the instance_uuid column will fail
|
|
||||||
# with error SQL0605W, so omit the index in the case of DB2.
|
|
||||||
if migrate_engine.name != 'ibm_db_sa':
|
|
||||||
Index('instance_uuid_idx', instance_mappings.c.instance_uuid)
|
|
||||||
|
|
||||||
instance_mappings.create(checkfirst=True)
|
instance_mappings.create(checkfirst=True)
|
||||||
|
@ -34,16 +34,11 @@ def upgrade(migrate_engine):
|
|||||||
Column('host', String(length=255), nullable=False),
|
Column('host', String(length=255), nullable=False),
|
||||||
UniqueConstraint('host',
|
UniqueConstraint('host',
|
||||||
name='uniq_host_mappings0host'),
|
name='uniq_host_mappings0host'),
|
||||||
|
Index('host_idx', 'host'),
|
||||||
ForeignKeyConstraint(columns=['cell_id'],
|
ForeignKeyConstraint(columns=['cell_id'],
|
||||||
refcolumns=[cell_mappings.c.id]),
|
refcolumns=[cell_mappings.c.id]),
|
||||||
mysql_engine='InnoDB',
|
mysql_engine='InnoDB',
|
||||||
mysql_charset='utf8'
|
mysql_charset='utf8'
|
||||||
)
|
)
|
||||||
|
|
||||||
# NOTE(mriedem): DB2 creates an index when a unique constraint is created
|
|
||||||
# so trying to add a second index on the host column will fail with error
|
|
||||||
# SQL0605W, so omit the index in the case of DB2.
|
|
||||||
if migrate_engine.name != 'ibm_db_sa':
|
|
||||||
Index('host_idx', host_mappings.c.host)
|
|
||||||
|
|
||||||
host_mappings.create(checkfirst=True)
|
host_mappings.create(checkfirst=True)
|
||||||
|
@ -33,14 +33,9 @@ def upgrade(migrate_engine):
|
|||||||
Column('spec', Text, nullable=False),
|
Column('spec', Text, nullable=False),
|
||||||
UniqueConstraint('instance_uuid',
|
UniqueConstraint('instance_uuid',
|
||||||
name='uniq_request_specs0instance_uuid'),
|
name='uniq_request_specs0instance_uuid'),
|
||||||
|
Index('request_spec_instance_uuid_idx', 'instance_uuid'),
|
||||||
mysql_engine='InnoDB',
|
mysql_engine='InnoDB',
|
||||||
mysql_charset='utf8'
|
mysql_charset='utf8'
|
||||||
)
|
)
|
||||||
|
|
||||||
# NOTE(mriedem): DB2 creates an index when a unique constraint is created
|
|
||||||
# so trying to add a second index on the host column will fail with error
|
|
||||||
# SQL0605W, so omit the index in the case of DB2.
|
|
||||||
if migrate_engine.name != 'ibm_db_sa':
|
|
||||||
Index('request_spec_instance_uuid_idx', request_specs.c.instance_uuid)
|
|
||||||
|
|
||||||
request_specs.create(checkfirst=True)
|
request_specs.create(checkfirst=True)
|
||||||
|
@ -58,18 +58,12 @@ def upgrade(migrate_engine):
|
|||||||
Column('value', String(length=255)),
|
Column('value', String(length=255)),
|
||||||
UniqueConstraint('flavor_id', 'key',
|
UniqueConstraint('flavor_id', 'key',
|
||||||
name='uniq_flavor_extra_specs0flavor_id0key'),
|
name='uniq_flavor_extra_specs0flavor_id0key'),
|
||||||
|
Index('flavor_extra_specs_flavor_id_key_idx', 'flavor_id', 'key'),
|
||||||
ForeignKeyConstraint(columns=['flavor_id'], refcolumns=[flavors.c.id]),
|
ForeignKeyConstraint(columns=['flavor_id'], refcolumns=[flavors.c.id]),
|
||||||
mysql_engine='InnoDB',
|
mysql_engine='InnoDB',
|
||||||
mysql_charset='utf8'
|
mysql_charset='utf8'
|
||||||
)
|
)
|
||||||
|
|
||||||
# NOTE(mriedem): DB2 creates an index when a unique constraint is created
|
|
||||||
# so trying to add a second index on the flavor_id/key column will fail
|
|
||||||
# with error SQL0605W, so omit the index in the case of DB2.
|
|
||||||
if migrate_engine.name != 'ibm_db_sa':
|
|
||||||
Index('flavor_extra_specs_flavor_id_key_idx',
|
|
||||||
flavor_extra_specs.c.flavor_id,
|
|
||||||
flavor_extra_specs.c.key)
|
|
||||||
flavor_extra_specs.create(checkfirst=True)
|
flavor_extra_specs.create(checkfirst=True)
|
||||||
|
|
||||||
flavor_projects = Table('flavor_projects', meta,
|
flavor_projects = Table('flavor_projects', meta,
|
||||||
|
@ -301,13 +301,9 @@ def upgrade(migrate_engine):
|
|||||||
mysql_charset='utf8'
|
mysql_charset='utf8'
|
||||||
)
|
)
|
||||||
|
|
||||||
# NOTE(mriedem): DB2 can't create the FK since we don't have the unique
|
|
||||||
# constraint on instances.uuid because it's nullable (so a unique
|
|
||||||
# constraint isn't created for instances.uuid, only a unique index).
|
|
||||||
consoles_instance_uuid_column_args = ['instance_uuid', String(length=36)]
|
consoles_instance_uuid_column_args = ['instance_uuid', String(length=36)]
|
||||||
if migrate_engine.name != 'ibm_db_sa':
|
consoles_instance_uuid_column_args.append(
|
||||||
consoles_instance_uuid_column_args.append(
|
ForeignKey('instances.uuid', name='consoles_instance_uuid_fkey'))
|
||||||
ForeignKey('instances.uuid', name='consoles_instance_uuid_fkey'))
|
|
||||||
|
|
||||||
consoles = Table('consoles', meta,
|
consoles = Table('consoles', meta,
|
||||||
Column('created_at', DateTime),
|
Column('created_at', DateTime),
|
||||||
@ -1434,9 +1430,6 @@ def upgrade(migrate_engine):
|
|||||||
'block_device_mapping_instance_uuid_virtual_name_device_name_idx'
|
'block_device_mapping_instance_uuid_virtual_name_device_name_idx'
|
||||||
]
|
]
|
||||||
|
|
||||||
# NOTE(mriedem): DB2 doesn't allow duplicate indexes either.
|
|
||||||
DB2_INDEX_SKIPS = POSTGRES_INDEX_SKIPS
|
|
||||||
|
|
||||||
MYSQL_INDEX_SKIPS = [
|
MYSQL_INDEX_SKIPS = [
|
||||||
# we create this one manually for MySQL above
|
# we create this one manually for MySQL above
|
||||||
'migrations_by_host_nodes_and_status_idx'
|
'migrations_by_host_nodes_and_status_idx'
|
||||||
@ -1446,9 +1439,7 @@ def upgrade(migrate_engine):
|
|||||||
if ((migrate_engine.name == 'postgresql' and
|
if ((migrate_engine.name == 'postgresql' and
|
||||||
index.name in POSTGRES_INDEX_SKIPS) or
|
index.name in POSTGRES_INDEX_SKIPS) or
|
||||||
(migrate_engine.name == 'mysql' and
|
(migrate_engine.name == 'mysql' and
|
||||||
index.name in MYSQL_INDEX_SKIPS) or
|
index.name in MYSQL_INDEX_SKIPS)):
|
||||||
(migrate_engine.name == 'ibm_db_sa' and
|
|
||||||
index.name in DB2_INDEX_SKIPS)):
|
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
index.create(migrate_engine)
|
index.create(migrate_engine)
|
||||||
@ -1479,52 +1470,47 @@ def upgrade(migrate_engine):
|
|||||||
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# NOTE(mriedem): DB2 doesn't support unique constraints on columns that
|
secgroup_instance_association_instance_uuid_fkey = (
|
||||||
# are nullable so we can only create foreign keys on unique constraints
|
'security_group_instance_association_instance_uuid_fkey')
|
||||||
# that actually exist, which excludes any FK on instances.uuid.
|
fkeys.extend(
|
||||||
if migrate_engine.name != 'ibm_db_sa':
|
[
|
||||||
|
|
||||||
secgroup_instance_association_instance_uuid_fkey = (
|
[[fixed_ips.c.instance_uuid],
|
||||||
'security_group_instance_association_instance_uuid_fkey')
|
[instances.c.uuid],
|
||||||
fkeys.extend(
|
'fixed_ips_instance_uuid_fkey'],
|
||||||
[
|
[[block_device_mapping.c.instance_uuid],
|
||||||
|
[instances.c.uuid],
|
||||||
|
'block_device_mapping_instance_uuid_fkey'],
|
||||||
|
[[instance_info_caches.c.instance_uuid],
|
||||||
|
[instances.c.uuid],
|
||||||
|
'instance_info_caches_instance_uuid_fkey'],
|
||||||
|
[[instance_metadata.c.instance_uuid],
|
||||||
|
[instances.c.uuid],
|
||||||
|
'instance_metadata_instance_uuid_fkey'],
|
||||||
|
[[instance_system_metadata.c.instance_uuid],
|
||||||
|
[instances.c.uuid],
|
||||||
|
'instance_system_metadata_ibfk_1'],
|
||||||
|
[[security_group_instance_association.c.instance_uuid],
|
||||||
|
[instances.c.uuid],
|
||||||
|
secgroup_instance_association_instance_uuid_fkey],
|
||||||
|
[[virtual_interfaces.c.instance_uuid],
|
||||||
|
[instances.c.uuid],
|
||||||
|
'virtual_interfaces_instance_uuid_fkey'],
|
||||||
|
[[instance_actions.c.instance_uuid],
|
||||||
|
[instances.c.uuid],
|
||||||
|
'fk_instance_actions_instance_uuid'],
|
||||||
|
[[instance_faults.c.instance_uuid],
|
||||||
|
[instances.c.uuid],
|
||||||
|
'fk_instance_faults_instance_uuid'],
|
||||||
|
[[migrations.c.instance_uuid],
|
||||||
|
[instances.c.uuid],
|
||||||
|
'fk_migrations_instance_uuid']
|
||||||
|
|
||||||
[[fixed_ips.c.instance_uuid],
|
])
|
||||||
[instances.c.uuid],
|
|
||||||
'fixed_ips_instance_uuid_fkey'],
|
|
||||||
[[block_device_mapping.c.instance_uuid],
|
|
||||||
[instances.c.uuid],
|
|
||||||
'block_device_mapping_instance_uuid_fkey'],
|
|
||||||
[[instance_info_caches.c.instance_uuid],
|
|
||||||
[instances.c.uuid],
|
|
||||||
'instance_info_caches_instance_uuid_fkey'],
|
|
||||||
[[instance_metadata.c.instance_uuid],
|
|
||||||
[instances.c.uuid],
|
|
||||||
'instance_metadata_instance_uuid_fkey'],
|
|
||||||
[[instance_system_metadata.c.instance_uuid],
|
|
||||||
[instances.c.uuid],
|
|
||||||
'instance_system_metadata_ibfk_1'],
|
|
||||||
[[security_group_instance_association.c.instance_uuid],
|
|
||||||
[instances.c.uuid],
|
|
||||||
secgroup_instance_association_instance_uuid_fkey],
|
|
||||||
[[virtual_interfaces.c.instance_uuid],
|
|
||||||
[instances.c.uuid],
|
|
||||||
'virtual_interfaces_instance_uuid_fkey'],
|
|
||||||
[[instance_actions.c.instance_uuid],
|
|
||||||
[instances.c.uuid],
|
|
||||||
'fk_instance_actions_instance_uuid'],
|
|
||||||
[[instance_faults.c.instance_uuid],
|
|
||||||
[instances.c.uuid],
|
|
||||||
'fk_instance_faults_instance_uuid'],
|
|
||||||
[[migrations.c.instance_uuid],
|
|
||||||
[instances.c.uuid],
|
|
||||||
'fk_migrations_instance_uuid']
|
|
||||||
|
|
||||||
])
|
|
||||||
|
|
||||||
for fkey_pair in fkeys:
|
for fkey_pair in fkeys:
|
||||||
if migrate_engine.name in ('mysql', 'ibm_db_sa'):
|
if migrate_engine.name in ('mysql'):
|
||||||
# For MySQL and DB2 we name our fkeys explicitly
|
# For MySQL we name our fkeys explicitly
|
||||||
# so they match Havana
|
# so they match Havana
|
||||||
fkey = ForeignKeyConstraint(columns=fkey_pair[0],
|
fkey = ForeignKeyConstraint(columns=fkey_pair[0],
|
||||||
refcolumns=fkey_pair[1],
|
refcolumns=fkey_pair[1],
|
||||||
|
@ -24,16 +24,7 @@ def upgrade(migrate_engine):
|
|||||||
quota_usages.c.resource.alter(nullable=False)
|
quota_usages.c.resource.alter(nullable=False)
|
||||||
|
|
||||||
pci_devices = Table('pci_devices', meta, autoload=True)
|
pci_devices = Table('pci_devices', meta, autoload=True)
|
||||||
# NOTE(mriedem): The deleted column is in a UniqueConstraint so making
|
pci_devices.c.deleted.alter(nullable=True)
|
||||||
# it nullable breaks DB2 with an SQL0542N error, so skip for DB2. There is
|
|
||||||
# a FKey in 246 so we're kind of stuck with DB2 since we can't create the
|
|
||||||
# FKey without the unique constraint and we can't have a unique constraint
|
|
||||||
# on a nullable column.
|
|
||||||
# TODO(mriedem): Revisit this once the deleted column (inherited from the
|
|
||||||
# SoftDeleteMixin in oslo.db) is non-null on all tables, which is going
|
|
||||||
# to be a non-trivial effort.
|
|
||||||
if migrate_engine.name != 'ibm_db_sa':
|
|
||||||
pci_devices.c.deleted.alter(nullable=True)
|
|
||||||
pci_devices.c.product_id.alter(nullable=False)
|
pci_devices.c.product_id.alter(nullable=False)
|
||||||
pci_devices.c.vendor_id.alter(nullable=False)
|
pci_devices.c.vendor_id.alter(nullable=False)
|
||||||
pci_devices.c.dev_type.alter(nullable=False)
|
pci_devices.c.dev_type.alter(nullable=False)
|
||||||
|
@ -52,11 +52,7 @@ def upgrade(migrate_engine):
|
|||||||
instance_uuid_index.create(migrate_engine)
|
instance_uuid_index.create(migrate_engine)
|
||||||
|
|
||||||
# Foreign key
|
# Foreign key
|
||||||
# NOTE(mriedem): DB2 won't create the ForeignKey over the
|
if not prefix:
|
||||||
# instances.uuid column since it doesn't have a UniqueConstraint (added
|
|
||||||
# later in the 267 migration). The ForeignKey will be created for DB2
|
|
||||||
# in the 296 migration.
|
|
||||||
if not prefix and migrate_engine.name != 'ibm_db_sa':
|
|
||||||
fkey_columns = [table.c.instance_uuid]
|
fkey_columns = [table.c.instance_uuid]
|
||||||
fkey_refcolumns = [instances.c.uuid]
|
fkey_refcolumns = [instances.c.uuid]
|
||||||
instance_fkey = ForeignKeyConstraint(
|
instance_fkey = ForeignKeyConstraint(
|
||||||
|
@ -52,7 +52,7 @@ def upgrade(migrate_engine):
|
|||||||
# the missing indexes in PostgreSQL and SQLite have conflicting names
|
# the missing indexes in PostgreSQL and SQLite have conflicting names
|
||||||
# that MySQL allowed.
|
# that MySQL allowed.
|
||||||
|
|
||||||
if migrate_engine.name in ('sqlite', 'postgresql', 'ibm_db_sa'):
|
if migrate_engine.name in ('sqlite', 'postgresql'):
|
||||||
for table_name, index_name, column_names in INDEXES:
|
for table_name, index_name, column_names in INDEXES:
|
||||||
ensure_index_exists(migrate_engine, table_name, index_name,
|
ensure_index_exists(migrate_engine, table_name, index_name,
|
||||||
column_names)
|
column_names)
|
||||||
|
@ -12,82 +12,8 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from migrate import ForeignKeyConstraint
|
|
||||||
from sqlalchemy import MetaData, Table
|
|
||||||
|
|
||||||
|
|
||||||
DB2_FKEYS = [
|
|
||||||
# NOTE(mriedem): Added in 216.
|
|
||||||
{'table': 'fixed_ips',
|
|
||||||
'columns': ['instance_uuid'],
|
|
||||||
'refcolumns': ['instances.uuid'],
|
|
||||||
'name': 'fixed_ips_instance_uuid_fkey'},
|
|
||||||
{'table': 'block_device_mapping',
|
|
||||||
'columns': ['instance_uuid'],
|
|
||||||
'refcolumns': ['instances.uuid'],
|
|
||||||
'name': 'block_device_mapping_instance_uuid_fkey'},
|
|
||||||
{'table': 'instance_info_caches',
|
|
||||||
'columns': ['instance_uuid'],
|
|
||||||
'refcolumns': ['instances.uuid'],
|
|
||||||
'name': 'instance_info_caches_instance_uuid_fkey'},
|
|
||||||
{'table': 'instance_metadata',
|
|
||||||
'columns': ['instance_uuid'],
|
|
||||||
'refcolumns': ['instances.uuid'],
|
|
||||||
'name': 'instance_metadata_instance_uuid_fkey'},
|
|
||||||
{'table': 'instance_system_metadata',
|
|
||||||
'columns': ['instance_uuid'],
|
|
||||||
'refcolumns': ['instances.uuid'],
|
|
||||||
'name': 'instance_system_metadata_ibfk_1'},
|
|
||||||
{'table': 'security_group_instance_association',
|
|
||||||
'columns': ['instance_uuid'],
|
|
||||||
'refcolumns': ['instances.uuid'],
|
|
||||||
'name': 'security_group_instance_association_instance_uuid_fkey'},
|
|
||||||
{'table': 'virtual_interfaces',
|
|
||||||
'columns': ['instance_uuid'],
|
|
||||||
'refcolumns': ['instances.uuid'],
|
|
||||||
'name': 'virtual_interfaces_instance_uuid_fkey'},
|
|
||||||
{'table': 'instance_actions',
|
|
||||||
'columns': ['instance_uuid'],
|
|
||||||
'refcolumns': ['instances.uuid'],
|
|
||||||
'name': 'fk_instance_actions_instance_uuid'},
|
|
||||||
{'table': 'instance_faults',
|
|
||||||
'columns': ['instance_uuid'],
|
|
||||||
'refcolumns': ['instances.uuid'],
|
|
||||||
'name': 'fk_instance_faults_instance_uuid'},
|
|
||||||
{'table': 'migrations',
|
|
||||||
'columns': ['instance_uuid'],
|
|
||||||
'refcolumns': ['instances.uuid'],
|
|
||||||
'name': 'fk_migrations_instance_uuid'},
|
|
||||||
# NOTE(mriedem): Added in 252.
|
|
||||||
{'table': 'instance_extra',
|
|
||||||
'columns': ['instance_uuid'],
|
|
||||||
'refcolumns': ['instances.uuid'],
|
|
||||||
'name': 'fk_instance_extra_instance_uuid'}
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def _get_refcolumns(metadata, refcolumns):
|
|
||||||
refcolumn_objects = []
|
|
||||||
for refcol in refcolumns:
|
|
||||||
table, column = refcol.split('.')
|
|
||||||
table = Table(table, metadata, autoload=True)
|
|
||||||
refcolumn_objects.append(table.c[column])
|
|
||||||
return refcolumn_objects
|
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE(sdague): this was a db2 specific migration that was removed
|
||||||
|
# when we removed db2 support from tree.
|
||||||
def upgrade(migrate_engine):
|
def upgrade(migrate_engine):
|
||||||
if migrate_engine.name == 'ibm_db_sa':
|
pass
|
||||||
# create the foreign keys
|
|
||||||
metadata = MetaData(bind=migrate_engine)
|
|
||||||
for values in DB2_FKEYS:
|
|
||||||
# NOTE(mriedem): We have to load all of the tables in the same
|
|
||||||
# MetaData object for the ForeignKey object to work, so we just
|
|
||||||
# load up the Column objects here as well dynamically.
|
|
||||||
params = dict(name=values['name'])
|
|
||||||
table = Table(values['table'], metadata, autoload=True)
|
|
||||||
params['table'] = table
|
|
||||||
params['columns'] = [table.c[col] for col in values['columns']]
|
|
||||||
params['refcolumns'] = _get_refcolumns(metadata,
|
|
||||||
values['refcolumns'])
|
|
||||||
fkey = ForeignKeyConstraint(**params)
|
|
||||||
fkey.create()
|
|
||||||
|
@ -33,11 +33,8 @@ def upgrade(migrate_engine):
|
|||||||
mysql_engine='InnoDB',
|
mysql_engine='InnoDB',
|
||||||
mysql_charset='latin1'
|
mysql_charset='latin1'
|
||||||
)
|
)
|
||||||
# NOTE(mriedem): DB2 creates an index when a unique constraint is created
|
|
||||||
# so trying to add a second index on the name column will fail with error
|
Index('resource_providers_uuid_idx', resource_providers.c.uuid)
|
||||||
# SQL0605W, so omit the index in the case of DB2.
|
|
||||||
if migrate_engine.name != 'ibm_db_sa':
|
|
||||||
Index('resource_providers_uuid_idx', resource_providers.c.uuid)
|
|
||||||
|
|
||||||
inventories = Table(
|
inventories = Table(
|
||||||
'inventories', meta,
|
'inventories', meta,
|
||||||
|
@ -48,12 +48,9 @@ def upgrade(migrate_engine):
|
|||||||
name='uniq_resource_providers0name')
|
name='uniq_resource_providers0name')
|
||||||
uc.create()
|
uc.create()
|
||||||
|
|
||||||
# DB2 automatically creates an index for the unique
|
utils.add_index(migrate_engine, 'resource_providers',
|
||||||
# constraint above, so skip adding the index on DB2.
|
'resource_providers_name_idx',
|
||||||
if migrate_engine.name != 'ibm_db_sa':
|
['name'])
|
||||||
utils.add_index(migrate_engine, 'resource_providers',
|
|
||||||
'resource_providers_name_idx',
|
|
||||||
['name'])
|
|
||||||
|
|
||||||
if not hasattr(resource_providers.c, 'generation'):
|
if not hasattr(resource_providers.c, 'generation'):
|
||||||
resource_providers.create_column(generation)
|
resource_providers.create_column(generation)
|
||||||
@ -87,7 +84,7 @@ def upgrade(migrate_engine):
|
|||||||
'resource_provider_id', 'resource_class_id', table=inventories,
|
'resource_provider_id', 'resource_class_id', table=inventories,
|
||||||
name='uniq_inventories0resource_provider_resource_class')
|
name='uniq_inventories0resource_provider_resource_class')
|
||||||
inventories_uc.create()
|
inventories_uc.create()
|
||||||
if migrate_engine.name != 'ibm_db_sa':
|
|
||||||
utils.add_index(migrate_engine, 'inventories',
|
utils.add_index(migrate_engine, 'inventories',
|
||||||
'inventories_resource_provider_resource_class_idx',
|
'inventories_resource_provider_resource_class_idx',
|
||||||
['resource_provider_id', 'resource_class_id'])
|
['resource_provider_id', 'resource_class_id'])
|
||||||
|
@ -230,9 +230,8 @@ class NovaAPIMigrationsWalk(test_migrations.WalkVersionsMixin):
|
|||||||
|
|
||||||
self.assertUniqueConstraintExists(engine, 'request_specs',
|
self.assertUniqueConstraintExists(engine, 'request_specs',
|
||||||
['instance_uuid'])
|
['instance_uuid'])
|
||||||
if engine.name != 'ibm_db_sa':
|
self.assertIndexExists(engine, 'request_specs',
|
||||||
self.assertIndexExists(engine, 'request_specs',
|
'request_spec_instance_uuid_idx')
|
||||||
'request_spec_instance_uuid_idx')
|
|
||||||
|
|
||||||
def _check_005(self, engine, data):
|
def _check_005(self, engine, data):
|
||||||
# flavors
|
# flavors
|
||||||
@ -250,9 +249,8 @@ class NovaAPIMigrationsWalk(test_migrations.WalkVersionsMixin):
|
|||||||
'value']:
|
'value']:
|
||||||
self.assertColumnExists(engine, 'flavor_extra_specs', column)
|
self.assertColumnExists(engine, 'flavor_extra_specs', column)
|
||||||
|
|
||||||
if engine.name != 'ibm_db_sa':
|
self.assertIndexExists(engine, 'flavor_extra_specs',
|
||||||
self.assertIndexExists(engine, 'flavor_extra_specs',
|
'flavor_extra_specs_flavor_id_key_idx')
|
||||||
'flavor_extra_specs_flavor_id_key_idx')
|
|
||||||
self.assertUniqueConstraintExists(engine, 'flavor_extra_specs',
|
self.assertUniqueConstraintExists(engine, 'flavor_extra_specs',
|
||||||
['flavor_id', 'key'])
|
['flavor_id', 'key'])
|
||||||
|
|
||||||
|
@ -332,10 +332,7 @@ class NovaMigrationsCheckers(test_migrations.ModelsMigrationsSync,
|
|||||||
self.assertFalse(quota_usages.c.resource.nullable)
|
self.assertFalse(quota_usages.c.resource.nullable)
|
||||||
|
|
||||||
pci_devices = oslodbutils.get_table(engine, 'pci_devices')
|
pci_devices = oslodbutils.get_table(engine, 'pci_devices')
|
||||||
if engine.name == 'ibm_db_sa':
|
self.assertTrue(pci_devices.c.deleted.nullable)
|
||||||
self.assertFalse(pci_devices.c.deleted.nullable)
|
|
||||||
else:
|
|
||||||
self.assertTrue(pci_devices.c.deleted.nullable)
|
|
||||||
self.assertFalse(pci_devices.c.product_id.nullable)
|
self.assertFalse(pci_devices.c.product_id.nullable)
|
||||||
self.assertFalse(pci_devices.c.vendor_id.nullable)
|
self.assertFalse(pci_devices.c.vendor_id.nullable)
|
||||||
self.assertFalse(pci_devices.c.dev_type.nullable)
|
self.assertFalse(pci_devices.c.dev_type.nullable)
|
||||||
@ -751,12 +748,7 @@ class NovaMigrationsCheckers(test_migrations.ModelsMigrationsSync,
|
|||||||
'virtual_interfaces_uuid_idx', ['uuid'])
|
'virtual_interfaces_uuid_idx', ['uuid'])
|
||||||
|
|
||||||
def _check_296(self, engine, data):
|
def _check_296(self, engine, data):
|
||||||
if engine.name == 'ibm_db_sa':
|
pass
|
||||||
# Make sure the last FK in the list was created.
|
|
||||||
inspector = reflection.Inspector.from_engine(engine)
|
|
||||||
fkeys = inspector.get_foreign_keys('instance_extra')
|
|
||||||
fkey_names = [fkey['name'] for fkey in fkeys]
|
|
||||||
self.assertIn('fk_instance_extra_instance_uuid', fkey_names)
|
|
||||||
|
|
||||||
def _check_297(self, engine, data):
|
def _check_297(self, engine, data):
|
||||||
self.assertColumnExists(engine, 'services', 'forced_down')
|
self.assertColumnExists(engine, 'services', 'forced_down')
|
||||||
|
9
releasenotes/notes/rm_db2-926e38cbda44a55f.yaml
Normal file
9
releasenotes/notes/rm_db2-926e38cbda44a55f.yaml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
---
|
||||||
|
upgrade:
|
||||||
|
- DB2 database support was removed from tree. This is a non open
|
||||||
|
source database that had no 3rd party CI, and a set of constraints
|
||||||
|
that meant we had to keep special casing it in code. It also made
|
||||||
|
the online data migrations needed for cells v2 and placement
|
||||||
|
engine much more difficult. With 0% of OpenStack survey users
|
||||||
|
reporting usage we decided it was time to remove this to focus on
|
||||||
|
features needed by the larger community.
|
@ -40,9 +40,6 @@ Run like:
|
|||||||
./tools/db/schema_diff.py postgresql://localhost \
|
./tools/db/schema_diff.py postgresql://localhost \
|
||||||
master:latest my_branch:82
|
master:latest my_branch:82
|
||||||
|
|
||||||
DB2:
|
|
||||||
./tools/db/schema_diff.py ibm_db_sa://localhost \
|
|
||||||
master:latest my_branch:82
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
@ -121,28 +118,6 @@ class Postgresql(object):
|
|||||||
shell=True)
|
shell=True)
|
||||||
|
|
||||||
|
|
||||||
class Ibm_db_sa(object):
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def db2cmd(cls, cmd):
|
|
||||||
"""Wraps a command to be run under the DB2 instance user."""
|
|
||||||
subprocess.check_call('su - $(db2ilist) -c "%s"' % cmd, shell=True)
|
|
||||||
|
|
||||||
def create(self, name):
|
|
||||||
self.db2cmd('db2 \'create database %s\'' % name)
|
|
||||||
|
|
||||||
def drop(self, name):
|
|
||||||
self.db2cmd('db2 \'drop database %s\'' % name)
|
|
||||||
|
|
||||||
def dump(self, name, dump_filename):
|
|
||||||
self.db2cmd('db2look -d %(name)s -e -o %(dump_filename)s' %
|
|
||||||
{'name': name, 'dump_filename': dump_filename})
|
|
||||||
# The output file gets dumped to the db2 instance user's home directory
|
|
||||||
# so we have to copy it back to our current working directory.
|
|
||||||
subprocess.check_call('cp /home/$(db2ilist)/%s ./' % dump_filename,
|
|
||||||
shell=True)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_db_driver_class(db_url):
|
def _get_db_driver_class(db_url):
|
||||||
try:
|
try:
|
||||||
return globals()[db_url.split('://')[0].capitalize()]
|
return globals()[db_url.split('://')[0].capitalize()]
|
||||||
|
Loading…
Reference in New Issue
Block a user