Merge "Replace SQL code for ORM analog in DB migration scripts"

This commit is contained in:
Jenkins 2015-04-02 13:51:49 +00:00 committed by Gerrit Code Review
commit e7259cb867
3 changed files with 128 additions and 88 deletions

View File

@ -30,23 +30,12 @@ from manila.i18n import _LI
from manila.openstack.common import log as logging
import sqlalchemy as sa
from sqlalchemy.sql import table
LOG = logging.getLogger(__name__)
def upgrade():
LOG.info(_LI("Renaming column name volume_type_extra_specs.key to "
"volume_type_extra_specs.spec_key"))
op.alter_column("volume_type_extra_specs", "key",
new_column_name="spec_key",
type_=sa.String(length=255))
LOG.info(_LI("Renaming column name volume_type_extra_specs.value to "
"volume_type_extra_specs.spec_value"))
op.alter_column("volume_type_extra_specs", "value",
new_column_name="spec_value",
type_=sa.String(length=255))
LOG.info(_LI("Renaming column name shares.volume_type_id to "
"shares.share_type.id"))
op.alter_column("shares", "volume_type_id",
@ -60,7 +49,7 @@ def upgrade():
['name', 'deleted'])
LOG.info(_LI("Creating share_type_extra_specs table"))
op.create_table(
st_es = op.create_table(
'share_type_extra_specs',
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime),
@ -72,17 +61,11 @@ def upgrade():
nullable=False),
sa.Column('spec_key', sa.String(length=255)),
sa.Column('spec_value', sa.String(length=255)),
mysql_engine='InnoDB'
)
mysql_engine='InnoDB')
LOG.info(_LI("Migrating volume_type_extra_specs to "
"share_type_extra_specs"))
op.execute("INSERT INTO share_type_extra_specs "
"(created_at, updated_at, deleted_at, deleted, "
"id, share_type_id, spec_key, spec_value) "
"SELECT created_at, updated_at, deleted_at, deleted, "
"id, volume_type_id, spec_key, spec_value "
"FROM volume_type_extra_specs")
_copy_records(destination_table=st_es, up_migration=True)
LOG.info(_LI("Dropping volume_type_extra_specs table"))
op.drop_table("volume_type_extra_specs")
@ -90,7 +73,7 @@ def upgrade():
def downgrade():
LOG.info(_LI("Creating volume_type_extra_specs table"))
op.create_table(
vt_es = op.create_table(
'volume_type_extra_specs',
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime),
@ -99,19 +82,13 @@ def downgrade():
sa.Column('id', sa.Integer, primary_key=True, nullable=False),
sa.Column('volume_type_id', sa.String(length=36),
sa.ForeignKey('share_types.id'), nullable=False),
sa.Column('spec_key', sa.String(length=255)),
sa.Column('spec_value', sa.String(length=255)),
mysql_engine='InnoDB'
)
sa.Column('key', sa.String(length=255)),
sa.Column('value', sa.String(length=255)),
mysql_engine='InnoDB')
LOG.info(_LI("Migrating share_type_extra_specs to "
"volume_type_extra_specs"))
op.execute("INSERT INTO volume_type_extra_specs "
"(created_at, updated_at, deleted_at, deleted, "
"id, volume_type_id, spec_key, spec_value) "
"SELECT created_at, updated_at, deleted_at, deleted, "
"id, share_type_id, spec_key, spec_value "
"FROM share_type_extra_specs")
_copy_records(destination_table=vt_es, up_migration=False)
LOG.info(_LI("Dropping share_type_extra_specs table"))
op.drop_table("share_type_extra_specs")
@ -128,14 +105,32 @@ def downgrade():
new_column_name="volume_type_id",
type_=sa.String(length=36))
LOG.info(_LI("Renaming column name volume_type_extra_specs.spec_key to "
"volume_type_extra_specs.key"))
op.alter_column("volume_type_extra_specs", "spec_key",
new_column_name="key",
type_=sa.String(length=255))
LOG.info(_LI("Renaming column name volume_type_extra_specs.spec_value to "
"volume_type_extra_specs.value"))
op.alter_column("volume_type_extra_specs", "spec_value",
new_column_name="value",
type_=sa.String(length=255))
def _copy_records(destination_table, up_migration=True):
old = ('volume', '')
new = ('share', 'spec_')
data_from, data_to = (old, new) if up_migration else (new, old)
from_table = table(
data_from[0] + '_type_extra_specs',
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime),
sa.Column('deleted_at', sa.DateTime),
sa.Column('deleted', sa.Boolean),
sa.Column('id', sa.Integer, primary_key=True, nullable=False),
sa.Column(data_from[0] + '_type_id', sa.String(length=36)),
sa.Column(data_from[1] + 'key', sa.String(length=255)),
sa.Column(data_from[1] + 'value', sa.String(length=255)))
extra_specs = []
for es in op.get_bind().execute(from_table.select()):
extra_specs.append({
'created_at': es.created_at,
'updated_at': es.updated_at,
'deleted_at': es.deleted_at,
'deleted': es.deleted,
'id': es.id,
data_to[0] + '_type_id': getattr(es, data_from[0] + '_type_id'),
data_to[1] + 'key': getattr(es, data_from[1] + 'key'),
data_to[1] + 'value': getattr(es, data_from[1] + 'value'),
})
op.bulk_insert(destination_table, extra_specs)

View File

@ -26,28 +26,46 @@ revision = '56cdbe267881'
down_revision = '30cb96d995fa'
from alembic import op
import sqlalchemy as sql
import sqlalchemy as sa
from sqlalchemy import func
from sqlalchemy.sql import table
def upgrade():
op.create_table(
export_locations_table = op.create_table(
'share_export_locations',
sql.Column('id', sql.Integer, primary_key=True, nullable=False),
sql.Column('created_at', sql.DateTime),
sql.Column('updated_at', sql.DateTime),
sql.Column('deleted_at', sql.DateTime),
sql.Column('deleted', sql.Integer, default=0),
sql.Column('path', sql.String(2000)),
sql.Column('share_id', sql.String(36),
sql.ForeignKey('shares.id', name="sel_id_fk")),
sa.Column('id', sa.Integer, primary_key=True, nullable=False),
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime),
sa.Column('deleted_at', sa.DateTime),
sa.Column('deleted', sa.Integer, default=0),
sa.Column('path', sa.String(2000)),
sa.Column('share_id', sa.String(36),
sa.ForeignKey('shares.id', name="sel_id_fk")),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
mysql_charset='utf8')
op.execute("INSERT INTO share_export_locations "
"(created_at, deleted, path, share_id) "
"SELECT created_at, 0, export_location, id "
"FROM shares")
shares_table = table(
'shares',
sa.Column('created_at', sa.DateTime),
sa.Column('deleted_at', sa.DateTime),
sa.Column('deleted', sa.Integer),
sa.Column('export_location', sa.String(length=255)),
sa.Column('id', sa.String(length=36)),
sa.Column('updated_at', sa.DateTime))
export_locations = []
for share in op.get_bind().execute(shares_table.select()):
deleted = share.deleted if isinstance(share.deleted, int) else 0
export_locations.append({
'created_at': share.created_at,
'updated_at': share.updated_at,
'deleted_at': share.deleted_at,
'deleted': deleted,
'share_id': share.id,
'path': share.export_location,
})
op.bulk_insert(export_locations_table, export_locations)
op.drop_column('shares', 'export_location')
@ -60,22 +78,31 @@ def downgrade():
"""
op.add_column('shares',
sql.Column('export_location', sql.String(255)))
sa.Column('export_location', sa.String(255)))
export_locations_table = table(
'share_export_locations',
sa.Column('share_id', sa.String(length=36)),
sa.Column('path', sa.String(length=255)),
sa.Column('updated_at', sa.DateTime),
sa.Column('deleted', sa.Integer))
connection = op.get_bind()
session = sa.orm.Session(bind=connection.connect())
export_locations = session.query(
func.min(export_locations_table.c.updated_at),
export_locations_table.c.share_id,
export_locations_table.c.path).filter(
export_locations_table.c.deleted == 0).group_by(
export_locations_table.c.share_id,
export_locations_table.c.path).all()
export_locations = connection.execute(
"SELECT share_id, path FROM share_export_locations sel WHERE deleted=0"
" AND updated_at = ("
" SELECT MIN(updated_at) FROM share_export_locations sel2 "
" WHERE sel.share_id = sel2.share_id)")
shares = sql.Table('shares', sql.MetaData(),
autoload=True, autoload_with=connection)
shares = sa.Table('shares', sa.MetaData(),
autoload=True, autoload_with=connection)
for location in export_locations:
update = shares.update().where(shares.c.id == location['share_id']). \
values(export_location=location['path'])
update = shares.update().where(shares.c.id == location.share_id). \
values(export_location=location.path)
connection.execute(update)
op.drop_table('share_export_locations')

View File

@ -25,30 +25,49 @@ revision = '59eb64046740'
down_revision = '4ee2cf4be19a'
from alembic import op
from sqlalchemy import sql
from oslo_utils import timeutils
import sqlalchemy as sa
from sqlalchemy.sql import table
def upgrade():
connection = op.get_bind()
session = sa.orm.Session(bind=op.get_bind().connect())
def escape_column_name(name):
return sql.column(name).compile(bind=connection)
es_table = table(
'share_type_extra_specs',
sa.Column('created_at', sa.DateTime),
sa.Column('deleted', sa.Integer),
sa.Column('share_type_id', sa.String(length=36)),
sa.Column('spec_key', sa.String(length=255)),
sa.Column('spec_value', sa.String(length=255)))
insert_required_extra_spec = (
"INSERT INTO share_type_extra_specs "
" (%(deleted)s, %(share_type_id)s, %(key)s, %(value)s)"
" SELECT '0', st.id, 'driver_handles_share_servers', 'True'"
" FROM share_types as st "
" WHERE st.id NOT IN ( "
" SELECT es.share_type_id FROM share_type_extra_specs as es "
" WHERE es.spec_key = 'driver_handles_share_servers' )" % ({
'deleted': escape_column_name('deleted'),
'share_type_id': escape_column_name('share_type_id'),
'key': escape_column_name('spec_key'),
'value': escape_column_name('spec_value'),
}))
st_table = table(
'share_types',
sa.Column('deleted', sa.Integer),
sa.Column('id', sa.Integer))
connection.execute(insert_required_extra_spec)
existing_required_extra_specs = session.query(es_table).\
filter(es_table.c.spec_key == 'driver_handles_share_servers').\
filter(es_table.c.deleted.in_(('0', False))).\
all()
exclude_st_ids = [es.share_type_id for es in existing_required_extra_specs]
share_types = session.query(st_table).\
filter(st_table.c.deleted.in_(('0', 'False', ))).\
filter(st_table.c.id.notin_(exclude_st_ids)).\
all()
extra_specs = []
for st in share_types:
extra_specs.append({
'spec_key': 'driver_handles_share_servers',
'spec_value': 'True',
'deleted': 0,
'created_at': timeutils.utcnow(),
'share_type_id': st.id,
})
op.bulk_insert(es_table, extra_specs)
def downgrade():
@ -57,4 +76,3 @@ def downgrade():
We can't determine, which extra specs should be removed after insertion,
that's why do nothing here.
"""
pass