Remove patch_migrate()

This is essentially a revert of oslo-incubator commit 3f503faac since
sqlalchemy-migrate 0.9.1 now has that code via commit 93efb62fd.

Change-Id: I5d7d3eaec236140cd47fe419ed6905115354501d
This commit is contained in:
Matt Riedemann 2014-05-05 19:52:31 -07:00
parent 9e8de204e4
commit 5671755549
4 changed files with 1 additions and 218 deletions

View File

@ -40,134 +40,16 @@
# THE SOFTWARE.
import os
import re
from migrate.changeset import ansisql
from migrate.changeset.databases import sqlite
from migrate import exceptions as versioning_exceptions
from migrate.versioning import api as versioning_api
from migrate.versioning.repository import Repository
import sqlalchemy
from sqlalchemy.schema import UniqueConstraint
from oslo.db import exception
from oslo.db.openstack.common.gettextutils import _
def _get_unique_constraints(self, table):
"""Retrieve information about existing unique constraints of the table
This feature is needed for _recreate_table() to work properly.
Unfortunately, it's not available in sqlalchemy 0.7.x/0.8.x.
"""
data = table.metadata.bind.execute(
"""SELECT sql
FROM sqlite_master
WHERE
type='table' AND
name=:table_name""",
table_name=table.name
).fetchone()[0]
UNIQUE_PATTERN = "CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)"
return [
UniqueConstraint(
*[getattr(table.columns, c.strip(' "')) for c in cols.split(",")],
name=name
)
for name, cols in re.findall(UNIQUE_PATTERN, data)
]
def _recreate_table(self, table, column=None, delta=None, omit_uniques=None):
"""Recreate the table properly
Unlike the corresponding original method of sqlalchemy-migrate this one
doesn't drop existing unique constraints when creating a new one.
"""
table_name = self.preparer.format_table(table)
# we remove all indexes so as not to have
# problems during copy and re-create
for index in table.indexes:
index.drop()
# reflect existing unique constraints
for uc in self._get_unique_constraints(table):
table.append_constraint(uc)
# omit given unique constraints when creating a new table if required
table.constraints = set([
cons for cons in table.constraints
if omit_uniques is None or cons.name not in omit_uniques
])
self.append('ALTER TABLE %s RENAME TO migration_tmp' % table_name)
self.execute()
insertion_string = self._modify_table(table, column, delta)
table.create(bind=self.connection)
self.append(insertion_string % {'table_name': table_name})
self.execute()
self.append('DROP TABLE migration_tmp')
self.execute()
def _visit_migrate_unique_constraint(self, *p, **k):
"""Drop the given unique constraint
The corresponding original method of sqlalchemy-migrate just
raises NotImplemented error
"""
self.recreate_table(p[0].table, omit_uniques=[p[0].name])
def patch_migrate():
"""A workaround for SQLite's inability to alter things
SQLite abilities to alter tables are very limited (please read
http://www.sqlite.org/lang_altertable.html for more details).
E. g. one can't drop a column or a constraint in SQLite. The
workaround for this is to recreate the original table omitting
the corresponding constraint (or column).
sqlalchemy-migrate library has recreate_table() method that
implements this workaround, but it does it wrong:
- information about unique constraints of a table
is not retrieved. So if you have a table with one
unique constraint and a migration adding another one
you will end up with a table that has only the
latter unique constraint, and the former will be lost
- dropping of unique constraints is not supported at all
The proper way to fix this is to provide a pull-request to
sqlalchemy-migrate, but the project seems to be dead. So we
can go on with monkey-patching of the lib at least for now.
"""
# this patch is needed to ensure that recreate_table() doesn't drop
# existing unique constraints of the table when creating a new one
helper_cls = sqlite.SQLiteHelper
helper_cls.recreate_table = _recreate_table
helper_cls._get_unique_constraints = _get_unique_constraints
# this patch is needed to be able to drop existing unique constraints
constraint_cls = sqlite.SQLiteConstraintDropper
constraint_cls.visit_migrate_unique_constraint = \
_visit_migrate_unique_constraint
constraint_cls.__bases__ = (ansisql.ANSIColumnDropper,
sqlite.SQLiteConstraintGenerator)
def db_sync(engine, abs_path, version=None, init_version=0, sanity_check=True):
"""Upgrade or downgrade a database.

View File

@ -4,5 +4,5 @@ iso8601>=0.1.9
lockfile>=0.8
oslo.config>=1.2.0
SQLAlchemy>=0.7.8,<=0.9.99
sqlalchemy-migrate>=0.8.2,!=0.8.4
sqlalchemy-migrate>=0.9.1
stevedore>=0.14

View File

@ -1,97 +0,0 @@
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.changeset.constraint import UniqueConstraint
from migrate.changeset.databases import sqlite
import sqlalchemy as sa
from oslo.db.sqlalchemy import migration
from oslo.db.sqlalchemy import test_base
def uniques(*constraints):
"""Make a sequence of UniqueConstraint instances easily comparable
Convert a sequence of UniqueConstraint instances into a set of
tuples of form (constraint_name, (constraint_columns)) so that
assertEqual() will be able to compare sets of unique constraints
"""
return set((uc.name, tuple(uc.columns.keys())) for uc in constraints)
class TestSqliteUniqueConstraints(test_base.DbTestCase):
def setUp(self):
super(TestSqliteUniqueConstraints, self).setUp()
migration.patch_migrate()
self.helper = sqlite.SQLiteHelper()
test_table = sa.Table(
'test_table',
sa.schema.MetaData(bind=self.engine),
sa.Column('a', sa.Integer),
sa.Column('b', sa.String(10)),
sa.Column('c', sa.Integer),
sa.UniqueConstraint('a', 'b', name='unique_a_b'),
sa.UniqueConstraint('b', 'c', name='unique_b_c')
)
test_table.create()
self.addCleanup(test_table.drop)
# NOTE(rpodolyaka): it's important to use the reflected table here
# rather than original one because this is what
# we actually do in db migrations code
self.reflected_table = sa.Table(
'test_table',
sa.schema.MetaData(bind=self.engine),
autoload=True
)
@test_base.backend_specific('sqlite')
def test_get_unique_constraints(self):
table = self.reflected_table
existing = uniques(*self.helper._get_unique_constraints(table))
should_be = uniques(
sa.UniqueConstraint(table.c.a, table.c.b, name='unique_a_b'),
sa.UniqueConstraint(table.c.b, table.c.c, name='unique_b_c'),
)
self.assertEqual(should_be, existing)
@test_base.backend_specific('sqlite')
def test_add_unique_constraint(self):
table = self.reflected_table
UniqueConstraint(table.c.a, table.c.c, name='unique_a_c').create()
existing = uniques(*self.helper._get_unique_constraints(table))
should_be = uniques(
sa.UniqueConstraint(table.c.a, table.c.b, name='unique_a_b'),
sa.UniqueConstraint(table.c.b, table.c.c, name='unique_b_c'),
sa.UniqueConstraint(table.c.a, table.c.c, name='unique_a_c'),
)
self.assertEqual(should_be, existing)
@test_base.backend_specific('sqlite')
def test_drop_unique_constraint(self):
table = self.reflected_table
UniqueConstraint(table.c.a, table.c.b, name='unique_a_b').drop()
existing = uniques(*self.helper._get_unique_constraints(table))
should_be = uniques(
sa.UniqueConstraint(table.c.b, table.c.c, name='unique_b_c'),
)
self.assertEqual(should_be, existing)

View File

@ -34,7 +34,6 @@ from sqlalchemy.types import UserDefinedType, NullType
from oslo.db import exception
from oslo.db.openstack.common.fixture import moxstubout
from oslo.db.sqlalchemy import migration
from oslo.db.sqlalchemy import models
from oslo.db.sqlalchemy import session
from oslo.db.sqlalchemy import test_migrations
@ -184,7 +183,6 @@ class TestMigrationUtils(test_migrations.BaseMigrationTestCase):
def setUp(self):
super(TestMigrationUtils, self).setUp()
migration.patch_migrate()
def _populate_db_for_drop_duplicate_entries(self, engine, meta,
table_name):