Merge "Initial migration for database structure"

This commit is contained in:
Jenkins 2014-09-09 17:45:58 +00:00 committed by Gerrit Code Review
commit 406f35244f
6 changed files with 555 additions and 2 deletions

View File

@ -0,0 +1,59 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = %(here)s/alembic_migrations
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# max length of characters to apply to the
# "slug" field
#truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
sqlalchemy.url = mysql://root:password@localhost/octavia
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -0,0 +1,16 @@
The migrations in the alembic/versions contain the migrations.
Before runnign this migration ensure that the database ocatavia exists.
Currently the database connection string is in octavia/db/migration/alembic.ini
but this should eventually be pulled out into an octavia configuration file.
Set connection string is set by the line:
sqlalchemy.url = mysql://<user>:<password>@localhost/<database>
To run migrations you must first be in the octavia/db/migrate directory.
To migrate to the most current version run:
$ alembic upgrade head
To downgrade one migration run:
$ alembic downgrade -1

View File

@ -0,0 +1,84 @@
# Copyright 2014 Rackspace
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging import config as logging_config
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
logging_config.fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(connection=connection,
target_metadata=target_metadata)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,22 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision}
Create Date: ${create_date}
"""
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,372 @@
# Copyright 2014 Rackspace
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''initial_create
Revision ID: 35dee79d5865
Revises: None
Create Date: 2014-08-15 11:01:14.897223
'''
# revision identifiers, used by Alembic.
revision = '35dee79d5865'
down_revision = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy import sql
def upgrade():
# Create lookup tables
op.create_table(
u'health_monitor_type',
sa.Column(u'name', sa.String(30), primary_key=True),
sa.Column(u'description', sa.String(255), nullable=True)
)
# Create temporary table for table data seeding
insert_table = sql.table(
u'health_monitor_type',
sql.column(u'name', sa.String),
sql.column(u'description', sa.String)
)
op.bulk_insert(
insert_table,
[
{'name': 'HTTP'},
{'name': 'HTTPS'},
{'name': 'TCP'}
]
)
op.create_table(
u'protocol',
sa.Column(u'name', sa.String(30), primary_key=True),
sa.Column(u'description', sa.String(255), nullable=True)
)
insert_table = sql.table(
u'protocol',
sql.column(u'name', sa.String),
sql.column(u'description', sa.String)
)
op.bulk_insert(
insert_table,
[
{'name': 'HTTP'},
{'name': 'HTTPS'},
{'name': 'TCP'}
]
)
op.create_table(
u'algorithm',
sa.Column(u'name', sa.String(30), primary_key=True),
sa.Column(u'description', sa.String(255), nullable=True)
)
insert_table = sql.table(
u'algorithm',
sql.column(u'name', sa.String),
sql.column(u'description', sa.String)
)
op.bulk_insert(
insert_table,
[
{'name': 'ROUND_ROBIN'},
{'name': 'LEAST_CONNECTIONS'},
{'name': 'SOURCE_IP'}
]
)
op.create_table(
u'session_persistence_type',
sa.Column(u'name', sa.String(30), primary_key=True),
sa.Column(u'description', sa.String(255), nullable=True)
)
insert_table = sql.table(
u'session_persistence_type',
sql.column(u'name', sa.String),
sql.column(u'description', sa.String)
)
op.bulk_insert(
insert_table,
[
{'name': 'SOURCE_IP'},
{'name': 'HTTP_COOKIE'},
{'name': 'APP_COOKIE'}
]
)
op.create_table(
u'provisioning_status',
sa.Column(u'name', sa.String(30), primary_key=True),
sa.Column(u'description', sa.String(255), nullable=True)
)
insert_table = sql.table(
u'provisioning_status',
sql.column(u'name', sa.String),
sql.column(u'description', sa.String)
)
op.bulk_insert(
insert_table,
[
{'name': 'ACTIVE'},
{'name': 'PENDING_CREATE'},
{'name': 'PENDING_UPDATE'},
{'name': 'PENDING_DELETE'},
{'name': 'DELETED'},
{'name': 'ERROR'}
]
)
op.create_table(
u'operating_status',
sa.Column(u'name', sa.String(30), primary_key=True),
sa.Column(u'description', sa.String(255), nullable=True)
)
insert_table = sql.table(
u'operating_status',
sql.column(u'name', sa.String),
sql.column(u'description', sa.String)
)
op.bulk_insert(
insert_table,
[
{'name': 'ONLINE'},
{'name': 'OFFLINE'},
{'name': 'DEGRADED'},
{'name': 'ERROR'}
]
)
op.create_table(
u'pool',
sa.Column(u'tenant_id', sa.String(255), nullable=True),
sa.Column(u'id', sa.String(36), nullable=False),
sa.Column(u'name', sa.String(255), nullable=True),
sa.Column(u'description', sa.String(255), nullable=True),
sa.Column(u'protocol', sa.String(16), nullable=False),
sa.Column(u'lb_algorithm', sa.String(16), nullable=False),
sa.Column(u'operating_status', sa.String(16), nullable=False),
sa.Column(u'enabled', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint(u'id'),
sa.ForeignKeyConstraint([u'protocol'],
[u'protocol.name'],
name=u'fk_pool_protocol_name'),
sa.ForeignKeyConstraint([u'lb_algorithm'],
[u'algorithm.name'],
name=u'fk_pool_algorithm_name'),
sa.ForeignKeyConstraint([u'operating_status'],
[u'operating_status.name'],
name=u'fk_pool_operating_status_name')
)
op.create_table(
u'health_monitor',
sa.Column(u'pool_id', sa.String(36), nullable=False),
sa.Column(u'type', sa.String(36), nullable=False),
sa.Column(u'delay', sa.Integer(), nullable=False),
sa.Column(u'timeout', sa.Integer(), nullable=False),
sa.Column(u'fall_threshold', sa.Integer(), nullable=False),
sa.Column(u'rise_threshold', sa.Integer(), nullable=False),
sa.Column(u'http_method', sa.String(16), nullable=True),
sa.Column(u'url_path', sa.String(255), nullable=True),
sa.Column(u'expected_codes', sa.String(64), nullable=True),
sa.Column(u'enabled', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint(u'pool_id'),
sa.ForeignKeyConstraint([u'pool_id'], [u'pool.id'],
name=u'fk_health_monitor_pool_id'),
sa.ForeignKeyConstraint(
[u'type'], [u'health_monitor_type.name'],
name=u'fk_health_monitor_health_monitor_type_name')
)
op.create_table(
u'session_persistence',
sa.Column(u'pool_id', sa.String(36), nullable=False),
sa.Column(u'type', sa.String(16), nullable=False),
sa.Column(u'cookie_name', sa.String(255), nullable=True),
sa.ForeignKeyConstraint(
[u'type'], [u'session_persistence_type.name'],
name=u'fk_session_persistence_session_persistence_type_name'),
sa.ForeignKeyConstraint([u'pool_id'], [u'pool.id'],
name=u'fk_session_persistence_pool_id'),
sa.PrimaryKeyConstraint(u'pool_id')
)
op.create_table(
u'member',
sa.Column(u'tenant_id', sa.String(255), nullable=True),
sa.Column(u'id', sa.String(36), nullable=False),
sa.Column(u'pool_id', sa.String(36), nullable=False),
sa.Column(u'subnet_id', sa.String(36), nullable=True),
sa.Column(u'address', sa.String(64), nullable=False),
sa.Column(u'protocol_port', sa.Integer(), nullable=False),
sa.Column(u'weight', sa.Integer(), nullable=True),
sa.Column(u'operating_status', sa.String(16), nullable=False),
sa.Column(u'enabled', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint(u'id'),
sa.ForeignKeyConstraint([u'pool_id'], [u'pool.id'],
name=u'fk_member_pool_id'),
sa.ForeignKeyConstraint([u'operating_status'],
[u'operating_status.name'],
name=u'fk_member_operating_status_name'),
sa.UniqueConstraint(u'pool_id', u'address', u'protocol_port',
name=u'uq_member_pool_id_address_protocol_port')
)
op.create_table(
u'load_balancer',
sa.Column(u'tenant_id', sa.String(255), nullable=True),
sa.Column(u'id', sa.String(36), nullable=False),
sa.Column(u'name', sa.String(255), nullable=True),
sa.Column(u'description', sa.String(255), nullable=True),
sa.Column(u'provisioning_status', sa.String(16), nullable=False),
sa.Column(u'operating_status', sa.String(16), nullable=False),
sa.Column(u'enabled', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint(u'id'),
sa.ForeignKeyConstraint(
[u'provisioning_status'], [u'provisioning_status.name'],
name=u'fk_load_balancer_provisioning_status_name'),
sa.ForeignKeyConstraint([u'operating_status'],
[u'operating_status.name'],
name=u'fk_load_balancer_operating_status_name')
)
op.create_table(
u'vip',
sa.Column(u'load_balancer_id', sa.String(36), nullable=False),
sa.Column(u'ip_address', sa.String(36), nullable=True),
sa.Column(u'net_port_id', sa.String(36), nullable=True),
sa.Column(u'subnet_id', sa.String(36), nullable=True),
sa.Column(u'floating_ip_id', sa.String(36), nullable=True),
sa.Column(u'floating_ip_network_id', sa.String(36), nullable=True),
sa.PrimaryKeyConstraint(u'load_balancer_id'),
sa.ForeignKeyConstraint([u'load_balancer_id'], [u'load_balancer.id'],
name=u'fk_vip_load_balancer_id')
)
op.create_table(
u'listener',
sa.Column(u'tenant_id', sa.String(255), nullable=True),
sa.Column(u'id', sa.String(36), nullable=False),
sa.Column(u'name', sa.String(255), nullable=True),
sa.Column(u'description', sa.String(255), nullable=True),
sa.Column(u'protocol', sa.String(16), nullable=False),
sa.Column(u'protocol_port', sa.Integer(), nullable=False),
sa.Column(u'connection_limit', sa.Integer(), nullable=True),
sa.Column(u'load_balancer_id', sa.String(36), nullable=True),
sa.Column(u'default_tls_container_id', sa.String(36), nullable=True),
sa.Column(u'default_pool_id', sa.String(36), nullable=True),
sa.Column(u'provisioning_status', sa.String(16), nullable=False),
sa.Column(u'operating_status', sa.String(16), nullable=False),
sa.Column(u'enabled', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint([u'load_balancer_id'], [u'load_balancer.id'],
name=u'fk_listener_load_balancer_id'),
sa.ForeignKeyConstraint([u'default_pool_id'], [u'pool.id'],
name=u'fk_listener_pool_id'),
sa.ForeignKeyConstraint([u'protocol'], [u'protocol.name'],
name=u'fk_listener_protocol_name'),
sa.ForeignKeyConstraint([u'provisioning_status'],
[u'provisioning_status.name'],
name=u'fk_listener_provisioning_status_name'),
sa.ForeignKeyConstraint([u'operating_status'],
[u'operating_status.name'],
name=u'fk_listener_operating_status_name'),
sa.UniqueConstraint(u'default_pool_id',
name=u'uq_listener_default_pool_id'),
sa.UniqueConstraint(
u'load_balancer_id', u'protocol_port',
name=u'uq_listener_load_balancer_id_protocol_port'),
sa.PrimaryKeyConstraint(u'id')
)
op.create_table(
u'sni',
sa.Column(u'listener_id', sa.String(36), nullable=False),
sa.Column(u'tls_container_id', sa.String(36), nullable=False),
sa.Column(u'position', sa.Integer, nullable=True),
sa.ForeignKeyConstraint([u'listener_id'], [u'listener.id'],
name=u'fk_sni_listener_id'),
sa.PrimaryKeyConstraint(u'listener_id', u'tls_container_id')
)
op.create_table(
u'listener_statistics',
sa.Column(u'listener_id', sa.String(36), nullable=False),
sa.Column(u'bytes_in', sa.BigInteger(), nullable=False),
sa.Column(u'bytes_out', sa.BigInteger(), nullable=False),
sa.Column(u'active_connections', sa.Integer(), nullable=False),
sa.Column(u'total_connections', sa.BigInteger(), nullable=False),
sa.PrimaryKeyConstraint(u'listener_id'),
sa.ForeignKeyConstraint([u'listener_id'], [u'listener.id'],
name=u'fk_listener_statistics_listener_id')
)
op.create_table(
u'amphora',
# id should come from the service providing the amphora (i.e. nova)
sa.Column(u'id', sa.String(36), nullable=False, autoincrement=False),
sa.Column(u'host_id', sa.String(36), nullable=False),
sa.Column(u'status', sa.String(36), nullable=False),
sa.PrimaryKeyConstraint(u'id'),
sa.ForeignKeyConstraint(
[u'status'], [u'provisioning_status.name'],
name=u'fk_container_provisioning_status_name')
)
op.create_table(
u'load_balancer_amphora',
sa.Column(u'amphora_id', sa.String(36), nullable=False),
sa.Column(u'load_balancer_id', sa.String(36), nullable=False),
sa.ForeignKeyConstraint(
[u'load_balancer_id'], [u'load_balancer.id'],
name=u'fk_load_balancer_amphora_load_balancer_id'),
sa.ForeignKeyConstraint([u'amphora_id'],
[u'amphora.id'],
name=u'fk_load_balancer_amphora_id'),
sa.PrimaryKeyConstraint(u'amphora_id', u'load_balancer_id')
)
def downgrade():
op.drop_table(u'load_balancer_amphora')
op.drop_table(u'amphora')
op.drop_table(u'listener_statistics')
op.drop_table(u'sni')
op.drop_table(u'listener')
op.drop_table(u'vip')
op.drop_table(u'load_balancer')
op.drop_table(u'member')
op.drop_table(u'session_persistence')
op.drop_table(u'health_monitor')
op.drop_table(u'pool')
op.drop_table(u'provisioning_status')
op.drop_table(u'operating_status')
op.drop_table(u'session_persistence_type')
op.drop_table(u'algorithm')
op.drop_table(u'protocol')
op.drop_table(u'health_monitor_type')

View File

@ -1,4 +1,5 @@
actdiag
alembic>=0.6.4
blockdiag
docutils==0.11
nwdiag
@ -10,6 +11,7 @@ sphinxcontrib-actdiag
sphinxcontrib-blockdiag
sphinxcontrib-nwdiag
sphinxcontrib-seqdiag
SQLAlchemy>=0.7.8,<=0.9.99
graphviz
anyjson>=0.3.3
@ -19,9 +21,7 @@ requests>=1.2.1
jsonrpclib
netaddr>=0.7.6
python-neutronclient>=2.3.6,<3
SQLAlchemy>=0.8.4,<=0.8.99,>=0.9.7,<=0.9.99
WebOb>=1.2.3
alembic>=0.6.4
six>=1.7.0
oslo.config>=1.4.0.0a3
oslo.db>=0.4.0 # Apache-2.0