Merge "Change to use alembic to create db table schema"

This commit is contained in:
Jenkins 2014-09-02 08:30:04 +00:00 committed by Gerrit Code Review
commit 3ea6a37893
9 changed files with 373 additions and 2 deletions

View File

@ -0,0 +1,9 @@
#!/usr/bin/env python
import sys
from powervc.neutron.db.migration.cli import main
if __name__ == "__main__":
sys.exit(main())

View File

@ -0,0 +1 @@
# Copyright 2014 IBM Corp.

View File

@ -0,0 +1,59 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic_migration
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# max length of characters to apply to the
# "slug" field
#truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
sqlalchemy.url =
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -0,0 +1 @@
Generic single-database configuration.

View File

@ -0,0 +1,67 @@
# Copyright 2014 IBM Corp.
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
from oslo.db.sqlalchemy import session
from powervc.neutron.db.powervc_models_v2 import PowerVCMapping
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# set the target for 'autogenerate' support
target_metadata = PowerVCMapping.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.powervc_config.DATABASE.connection
context.configure(url=url, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = session.create_engine(config.powervc_config.DATABASE.connection)
connection = engine.connect()
context.configure(connection=connection,
target_metadata=target_metadata)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,22 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision}
Create Date: ${create_date}
"""
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,56 @@
"""create powervcmappings tables
Revision ID: 1ba55441bcc2
Revises: None
Create Date: 2014-08-27 16:24:01.765923
"""
# revision identifiers, used by Alembic.
revision = '1ba55441bcc2'
down_revision = None
from alembic import op
import sqlalchemy as sa
from oslo.db.sqlalchemy import session
from alembic import util as alembic_util
from alembic import context
from neutron.openstack.common import uuidutils
from powervc.neutron.common import constants
tablename = 'powervcmappings'
def upgrade():
url = context.config.powervc_config.DATABASE.connection
engine = session.create_engine(url)
# In previous release, we do not use alembic or any other migration,
# as we need to support migration case, we need to check if the table
# exists or not
if engine.dialect.has_table(engine.connect(), tablename):
alembic_util.msg("table has been already exists!")
return
op.create_table(
tablename,
sa.Column('id', sa.String(36),
primary_key=True,
default=uuidutils.generate_uuid),
sa.Column('obj_type', sa.Enum(constants.OBJ_TYPE_NETWORK,
constants.OBJ_TYPE_SUBNET,
constants.OBJ_TYPE_PORT,
name='mapping_object_type'),
nullable=False),
sa.Column('status', sa.Enum(constants.STATUS_CREATING,
constants.STATUS_ACTIVE,
constants.STATUS_DELETING,
name='mapping_state'),
nullable=False),
sa.Column('sync_key', sa.String(255), nullable=False),
sa.Column('local_id', sa.String(36)),
sa.Column('pvc_id', sa.String(36)),
sa.Column('update_data', sa.String(512))
)
def downgrade():
op.drop_table(tablename)

View File

@ -0,0 +1,147 @@
# Copyright 2014 IBM Corp.
import os
import neutron
from alembic import command as alembic_command
from alembic import config as alembic_config
from alembic import script as alembic_script
from alembic import util as alembic_util
from oslo.config import cfg
HEAD_FILENAME = 'HEAD'
_db_opts = [
cfg.StrOpt('connection',
deprecated_name='sql_connection',
default='',
secret=True,
help=_('URL to database')),
cfg.StrOpt('engine',
default='',
help=_('Database engine')),
]
CONF = cfg.ConfigOpts()
CONF.register_cli_opts(_db_opts, 'DATABASE')
def do_alembic_command(config, cmd, *args, **kwargs):
try:
getattr(alembic_command, cmd)(config, *args, **kwargs)
except alembic_util.CommandError as e:
alembic_util.err(str(e))
def do_check_migration(config, cmd):
do_alembic_command(config, 'branches')
validate_head_file(config)
def do_upgrade_downgrade(config, cmd):
if not CONF.command.revision and not CONF.command.delta:
raise SystemExit(_('You must provide a revision or relative delta'))
revision = CONF.command.revision
if CONF.command.delta:
sign = '+' if CONF.command.name == 'upgrade' else '-'
revision = sign + str(CONF.command.delta)
else:
revision = CONF.command.revision
do_alembic_command(config, cmd, revision, sql=CONF.command.sql)
def do_stamp(config, cmd):
do_alembic_command(config, cmd,
CONF.command.revision,
sql=CONF.command.sql)
def do_revision(config, cmd):
do_alembic_command(config, cmd,
message=CONF.command.message,
autogenerate=CONF.command.autogenerate,
sql=CONF.command.sql)
update_head_file(config)
def validate_head_file(config):
script = alembic_script.ScriptDirectory.from_config(config)
if len(script.get_heads()) > 1:
alembic_util.err(_('Timeline branches unable to generate timeline'))
head_path = os.path.join(script.versions, HEAD_FILENAME)
current_head = script.get_current_head()
if os.path.isfile(head_path):
head_in_file = open(head_path).read().strip()
if current_head == head_in_file:
return
else:
alembic_util.err(_('HEAD file does not match migration timeline head'))
def update_head_file(config):
script = alembic_script.ScriptDirectory.from_config(config)
if len(script.get_heads()) > 1:
alembic_util.err(_('Timeline branches unable to generate timeline'))
head_path = os.path.join(script.versions, HEAD_FILENAME)
with open(head_path, 'w+') as f:
f.write(script.get_current_head())
def add_command_parsers(subparsers):
for name in ['current', 'history', 'branches']:
parser = subparsers.add_parser(name)
parser.set_defaults(func=do_alembic_command)
parser = subparsers.add_parser('check_migration')
parser.set_defaults(func=do_check_migration)
for name in ['upgrade', 'downgrade']:
parser = subparsers.add_parser(name)
parser.add_argument('--delta', type=int)
parser.add_argument('--sql', action='store_true')
parser.add_argument('revision', nargs='?')
parser.add_argument('--mysql-engine',
default='',
help='Change MySQL storage engine of current '
'existing tables')
parser.set_defaults(func=do_upgrade_downgrade)
parser = subparsers.add_parser('stamp')
parser.add_argument('--sql', action='store_true')
parser.add_argument('revision')
parser.set_defaults(func=do_stamp)
parser = subparsers.add_parser('revision')
parser.add_argument('-m', '--message')
parser.add_argument('--autogenerate', action='store_true')
parser.add_argument('--sql', action='store_true')
parser.set_defaults(func=do_revision)
command_opt = cfg.SubCommandOpt('command',
title='Command',
help=_('Available commands'),
handler=add_command_parsers)
CONF.register_cli_opt(command_opt)
def main():
config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini')
)
config.set_main_option('script_location',
'powervc.neutron.db.migration:alembic_migration')
# attach the Neutron conf to the Alembic conf
config.powervc_config = CONF
CONF()
CONF.command.func(config, CONF.command.name)

View File

@ -1,5 +1,6 @@
# Copyright 2013 IBM Corp.
# Copyright 2013, 2014 IBM Corp.
import sqlalchemy as sql
from sqlalchemy.orm import exc
import neutron.db.api as db_api
@ -18,7 +19,15 @@ class PowerVCAgentDB(object):
def __init__(self):
self.session = db_api.get_session()
db_api.configure_db()
self.register_models()
def register_models(self):
"""Register Models and create properties."""
try:
engine = db_api.get_engine()
model.PowerVCMapping.metadata.create_all(engine)
except sql.exc.OperationalError as e:
LOG.info(_("Database registration exception: %s"), e)
def _create_object(self, obj_type, sync_key, update_data=None,
local_id=None, pvc_id=None):