Revert "Optional separate database for placement API"

This reverts commit 1b5f9f8203.

On IRC I we decided that we agreed no migrations should be placement
specific, we should just use the API table migrations to generate the
schema for both DBs.

There is also a separate debate around the alias for the aggregates
table, but that is not really a reason to revert, its just things in
here that will need rework.

Change-Id: I275945aee9d9be8e35d6ddc05515df39d559457a
This commit is contained in:
John Garbutt 2016-08-25 15:30:15 +00:00 committed by Dan Smith
parent 77f80e8150
commit 39fb302fd9
15 changed files with 26 additions and 256 deletions

View File

@ -944,24 +944,6 @@ class ApiDbCommands(object):
print(migration.db_version(database='api')) print(migration.db_version(database='api'))
# NOTE(cdent): The behavior of these commands is undefined when
# the placement configuration is undefined.
class PlacementCommands(object):
"""Class for managing the placement database."""
def __init__(self):
pass
@args('--version', metavar='<version>', help='Database version')
def sync(self, version=None):
"""Sync the database up to the most recent version."""
return migration.db_sync(version, database='placement')
def version(self):
"""Print the current database version."""
print(migration.db_version(database='placement'))
class AgentBuildCommands(object): class AgentBuildCommands(object):
"""Class for managing agent builds.""" """Class for managing agent builds."""
@ -1509,7 +1491,6 @@ CATEGORIES = {
'host': HostCommands, 'host': HostCommands,
'logs': GetLogCommands, 'logs': GetLogCommands,
'network': NetworkCommands, 'network': NetworkCommands,
'placement': PlacementCommands,
'project': ProjectCommands, 'project': ProjectCommands,
'shell': ShellCommands, 'shell': ShellCommands,
'vm': VmCommands, 'vm': VmCommands,

View File

@ -75,7 +75,6 @@ LOG = logging.getLogger(__name__)
main_context_manager = enginefacade.transaction_context() main_context_manager = enginefacade.transaction_context()
api_context_manager = enginefacade.transaction_context() api_context_manager = enginefacade.transaction_context()
placement_context_manager = enginefacade.transaction_context()
def _get_db_conf(conf_group, connection=None): def _get_db_conf(conf_group, connection=None):
@ -109,10 +108,6 @@ def _context_manager_from_context(context):
def configure(conf): def configure(conf):
main_context_manager.configure(**_get_db_conf(conf.database)) main_context_manager.configure(**_get_db_conf(conf.database))
api_context_manager.configure(**_get_db_conf(conf.api_database)) api_context_manager.configure(**_get_db_conf(conf.api_database))
if conf.placement_database.connection is None:
conf.placement_database = conf.api_database
placement_context_manager.configure(
**_get_db_conf(conf.placement_database))
def create_context_manager(connection=None): def create_context_manager(connection=None):
@ -147,10 +142,6 @@ def get_api_engine():
return api_context_manager.get_legacy_facade().get_engine() return api_context_manager.get_legacy_facade().get_engine()
def get_placement_engine():
return placement_context_manager.get_legacy_facade().get_engine()
_SHADOW_TABLE_PREFIX = 'shadow_' _SHADOW_TABLE_PREFIX = 'shadow_'
_DEFAULT_QUOTA_NAME = 'default' _DEFAULT_QUOTA_NAME = 'default'
PER_PROJECT_QUOTAS = ['fixed_ips', 'floating_ips', 'networks'] PER_PROJECT_QUOTAS = ['fixed_ips', 'floating_ips', 'networks']

View File

@ -354,31 +354,6 @@ class ResourceProviderAggregate(API_BASE):
aggregate_id = Column(Integer, primary_key=True, nullable=False) aggregate_id = Column(Integer, primary_key=True, nullable=False)
class PlacementAggregate(API_BASE):
"""Represents a grouping of resource providers."""
# NOTE(rpodolyaka): placement API can optionally use the subset of tables
# of api DB instead of requiring its own DB. aggregates table is the only
# table which schema is a bit different (additional `name` column), but we
# can work around that by providing an additional mapping class to a
# subset of table columns, so that this model works for both separate and
# shared DBs cases.
__table__ = API_BASE.metadata.tables['aggregates']
__mapper_args__ = {
'exclude_properties': ['name']
}
resource_providers = orm.relationship(
'ResourceProvider',
secondary='resource_provider_aggregates',
primaryjoin=('PlacementAggregate.id == '
'ResourceProviderAggregate.aggregate_id'),
secondaryjoin=('ResourceProviderAggregate.resource_provider_id == '
'ResourceProvider.id'),
backref='aggregates'
)
class InstanceGroupMember(API_BASE): class InstanceGroupMember(API_BASE):
"""Represents the members for an instance group.""" """Represents the members for an instance group."""
__tablename__ = 'instance_group_member' __tablename__ = 'instance_group_member'

View File

@ -31,7 +31,6 @@ from nova.i18n import _
INIT_VERSION = {} INIT_VERSION = {}
INIT_VERSION['main'] = 215 INIT_VERSION['main'] = 215
INIT_VERSION['api'] = 0 INIT_VERSION['api'] = 0
INIT_VERSION['placement'] = 0
_REPOSITORY = {} _REPOSITORY = {}
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@ -42,8 +41,6 @@ def get_engine(database='main', context=None):
return db_session.get_engine(context=context) return db_session.get_engine(context=context)
if database == 'api': if database == 'api':
return db_session.get_api_engine() return db_session.get_api_engine()
if database == 'placement':
return db_session.get_placement_engine()
def db_sync(version=None, database='main', context=None): def db_sync(version=None, database='main', context=None):
@ -174,8 +171,6 @@ def _find_migrate_repo(database='main'):
rel_path = 'migrate_repo' rel_path = 'migrate_repo'
if database == 'api': if database == 'api':
rel_path = os.path.join('api_migrations', 'migrate_repo') rel_path = os.path.join('api_migrations', 'migrate_repo')
if database == 'placement':
rel_path = os.path.join('placement_migrations', 'migrate_repo')
path = os.path.join(os.path.abspath(os.path.dirname(__file__)), path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
rel_path) rel_path)
assert os.path.exists(path) assert os.path.exists(path)

View File

@ -1,25 +0,0 @@
[db_settings]
# Used to identify which repository this database is versioned under.
# You can use the name of your project.
repository_id=placement_db
# The name of the database table used to track the schema version.
# This name shouldn't already be used by your project.
# If this is changed once a database is under version control, you'll need to
# change the table name in each database too.
version_table=migrate_version
# When committing a change script, Migrate will attempt to generate the
# sql for all supported databases; normally, if one of them fails - probably
# because you don't have that database installed - it is ignored and the
# commit continues, perhaps ending successfully.
# Databases in this list MUST compile successfully during a commit, or the
# entire commit will fail. List the databases your application will actually
# be using to ensure your updates to that database work properly.
# This must be a list; example: ['postgres','sqlite']
required_dbs=[]
# When creating new change scripts, Migrate will stamp the new script with
# a version number. By default this is latest_version + 1. You can set this
# to 'true' to tell Migrate to use the UTC timestamp instead.
use_timestamp_numbering=False

View File

@ -1,123 +0,0 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Initial database migration for the Placement API DB."""
from migrate import UniqueConstraint
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import Float
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import Unicode
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
if migrate_engine.name == 'mysql':
nameargs = {'collation': 'utf8_bin'}
else:
nameargs = {}
resource_providers = Table(
'resource_providers', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(36), nullable=False),
Column('name', Unicode(200, **nameargs), nullable=True),
Column('generation', Integer, default=0),
Column('can_host', Integer, default=0),
UniqueConstraint('uuid', name='uniq_resource_providers0uuid'),
UniqueConstraint('name', name='uniq_resource_providers0name'),
Index('resource_providers_name_idx', 'name'),
Index('resource_providers_uuid_idx', 'uuid'),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
inventories = Table(
'inventories', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('resource_provider_id', Integer, nullable=False),
Column('resource_class_id', Integer, nullable=False),
Column('total', Integer, nullable=False),
Column('reserved', Integer, nullable=False),
Column('min_unit', Integer, nullable=False),
Column('max_unit', Integer, nullable=False),
Column('step_size', Integer, nullable=False),
Column('allocation_ratio', Float, nullable=False),
Index('inventories_resource_provider_id_idx',
'resource_provider_id'),
Index('inventories_resource_provider_resource_class_idx',
'resource_provider_id', 'resource_class_id'),
Index('inventories_resource_class_id_idx',
'resource_class_id'),
UniqueConstraint('resource_provider_id', 'resource_class_id',
name='uniq_inventories0resource_provider_resource_class'),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
allocations = Table(
'allocations', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('resource_provider_id', Integer, nullable=False),
Column('consumer_id', String(36), nullable=False),
Column('resource_class_id', Integer, nullable=False),
Column('used', Integer, nullable=False),
Index('allocations_resource_provider_class_used_idx',
'resource_provider_id', 'resource_class_id',
'used'),
Index('allocations_resource_class_id_idx',
'resource_class_id'),
Index('allocations_consumer_id_idx', 'consumer_id'),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
resource_provider_aggregates = Table(
'resource_provider_aggregates', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('resource_provider_id', Integer, primary_key=True,
nullable=False),
Column('aggregate_id', Integer, primary_key=True, nullable=False),
Index('resource_provider_aggregates_aggregate_id_idx',
'aggregate_id'),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
aggregates = Table(
'aggregates', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(36), nullable=False),
Index('aggregates_uuid_idx', 'uuid'),
UniqueConstraint('uuid', name='uniq_aggregates0uuid'),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
for table in [resource_providers, inventories, allocations,
resource_provider_aggregates, aggregates]:
table.create(checkfirst=True)

View File

@ -165,7 +165,7 @@ def _increment_provider_generation(conn, rp):
return new_generation return new_generation
@db_api.placement_context_manager.writer @db_api.api_context_manager.writer
def _add_inventory(context, rp, inventory): def _add_inventory(context, rp, inventory):
"""Add one Inventory that wasn't already on the provider.""" """Add one Inventory that wasn't already on the provider."""
resource_class_id = fields.ResourceClass.index(inventory.resource_class) resource_class_id = fields.ResourceClass.index(inventory.resource_class)
@ -177,7 +177,7 @@ def _add_inventory(context, rp, inventory):
rp.generation = _increment_provider_generation(conn, rp) rp.generation = _increment_provider_generation(conn, rp)
@db_api.placement_context_manager.writer @db_api.api_context_manager.writer
def _update_inventory(context, rp, inventory): def _update_inventory(context, rp, inventory):
"""Update an inventory already on the provider.""" """Update an inventory already on the provider."""
resource_class_id = fields.ResourceClass.index(inventory.resource_class) resource_class_id = fields.ResourceClass.index(inventory.resource_class)
@ -189,7 +189,7 @@ def _update_inventory(context, rp, inventory):
rp.generation = _increment_provider_generation(conn, rp) rp.generation = _increment_provider_generation(conn, rp)
@db_api.placement_context_manager.writer @db_api.api_context_manager.writer
def _delete_inventory(context, rp, resource_class_id): def _delete_inventory(context, rp, resource_class_id):
"""Delete up to one Inventory of the given resource_class id.""" """Delete up to one Inventory of the given resource_class id."""
@ -202,7 +202,7 @@ def _delete_inventory(context, rp, resource_class_id):
rp.generation = _increment_provider_generation(conn, rp) rp.generation = _increment_provider_generation(conn, rp)
@db_api.placement_context_manager.writer @db_api.api_context_manager.writer
def _set_inventory(context, rp, inv_list): def _set_inventory(context, rp, inv_list):
"""Given an InventoryList object, replaces the inventory of the """Given an InventoryList object, replaces the inventory of the
resource provider in a safe, atomic fashion using the resource resource provider in a safe, atomic fashion using the resource
@ -329,7 +329,7 @@ class ResourceProvider(base.NovaObject):
self.obj_reset_changes() self.obj_reset_changes()
@staticmethod @staticmethod
@db_api.placement_context_manager.writer @db_api.api_context_manager.writer
def _create_in_db(context, updates): def _create_in_db(context, updates):
db_rp = models.ResourceProvider() db_rp = models.ResourceProvider()
db_rp.update(updates) db_rp.update(updates)
@ -337,7 +337,7 @@ class ResourceProvider(base.NovaObject):
return db_rp return db_rp
@staticmethod @staticmethod
@db_api.placement_context_manager.writer @db_api.api_context_manager.writer
def _delete(context, _id): def _delete(context, _id):
# Don't delete the resource provider if it has allocations. # Don't delete the resource provider if it has allocations.
rp_allocations = context.session.query(models.Allocation).\ rp_allocations = context.session.query(models.Allocation).\
@ -354,7 +354,7 @@ class ResourceProvider(base.NovaObject):
raise exception.NotFound() raise exception.NotFound()
@staticmethod @staticmethod
@db_api.placement_context_manager.writer @db_api.api_context_manager.writer
def _update_in_db(context, id, updates): def _update_in_db(context, id, updates):
db_rp = context.session.query(models.ResourceProvider).filter_by( db_rp = context.session.query(models.ResourceProvider).filter_by(
id=id).first() id=id).first()
@ -370,7 +370,7 @@ class ResourceProvider(base.NovaObject):
return resource_provider return resource_provider
@staticmethod @staticmethod
@db_api.placement_context_manager.reader @db_api.api_context_manager.reader
def _get_by_uuid_from_db(context, uuid): def _get_by_uuid_from_db(context, uuid):
result = context.session.query(models.ResourceProvider).filter_by( result = context.session.query(models.ResourceProvider).filter_by(
uuid=uuid).first() uuid=uuid).first()
@ -393,7 +393,7 @@ class ResourceProviderList(base.ObjectListBase, base.NovaObject):
) )
@staticmethod @staticmethod
@db_api.placement_context_manager.reader @db_api.api_context_manager.reader
def _get_all_by_filters_from_db(context, filters): def _get_all_by_filters_from_db(context, filters):
if not filters: if not filters:
filters = {} filters = {}
@ -460,7 +460,7 @@ class _HasAResourceProvider(base.NovaObject):
return target return target
@db_api.placement_context_manager.writer @db_api.api_context_manager.writer
def _create_inventory_in_db(context, updates): def _create_inventory_in_db(context, updates):
db_inventory = models.Inventory() db_inventory = models.Inventory()
db_inventory.update(updates) db_inventory.update(updates)
@ -468,7 +468,7 @@ def _create_inventory_in_db(context, updates):
return db_inventory return db_inventory
@db_api.placement_context_manager.writer @db_api.api_context_manager.writer
def _update_inventory_in_db(context, id_, updates): def _update_inventory_in_db(context, id_, updates):
result = context.session.query( result = context.session.query(
models.Inventory).filter_by(id=id_).update(updates) models.Inventory).filter_by(id=id_).update(updates)
@ -551,7 +551,7 @@ class InventoryList(base.ObjectListBase, base.NovaObject):
return inv_rec return inv_rec
@staticmethod @staticmethod
@db_api.placement_context_manager.reader @db_api.api_context_manager.reader
def _get_all_by_resource_provider(context, rp_uuid): def _get_all_by_resource_provider(context, rp_uuid):
return context.session.query(models.Inventory).\ return context.session.query(models.Inventory).\
join(models.Inventory.resource_provider).\ join(models.Inventory.resource_provider).\
@ -580,7 +580,7 @@ class Allocation(_HasAResourceProvider):
} }
@staticmethod @staticmethod
@db_api.placement_context_manager.writer @db_api.api_context_manager.writer
def _create_in_db(context, updates): def _create_in_db(context, updates):
db_allocation = models.Allocation() db_allocation = models.Allocation()
db_allocation.update(updates) db_allocation.update(updates)
@ -588,7 +588,7 @@ class Allocation(_HasAResourceProvider):
return db_allocation return db_allocation
@staticmethod @staticmethod
@db_api.placement_context_manager.writer @db_api.api_context_manager.writer
def _destroy(context, id): def _destroy(context, id):
result = context.session.query(models.Allocation).filter_by( result = context.session.query(models.Allocation).filter_by(
id=id).delete() id=id).delete()
@ -619,7 +619,7 @@ class AllocationList(base.ObjectListBase, base.NovaObject):
} }
@staticmethod @staticmethod
@db_api.placement_context_manager.reader @db_api.api_context_manager.reader
def _get_allocations_from_db(context, rp_uuid): def _get_allocations_from_db(context, rp_uuid):
query = (context.session.query(models.Allocation) query = (context.session.query(models.Allocation)
.join(models.Allocation.resource_provider) .join(models.Allocation.resource_provider)
@ -671,7 +671,7 @@ class UsageList(base.ObjectListBase, base.NovaObject):
} }
@staticmethod @staticmethod
@db_api.placement_context_manager.reader @db_api.api_context_manager.reader
def _get_all_by_resource_provider_uuid(context, rp_uuid): def _get_all_by_resource_provider_uuid(context, rp_uuid):
query = (context.session.query(models.Inventory.resource_class_id, query = (context.session.query(models.Inventory.resource_class_id,
func.coalesce(func.sum(models.Allocation.used), 0)) func.coalesce(func.sum(models.Allocation.used), 0))

View File

@ -213,7 +213,6 @@ class TestCase(testtools.TestCase):
if self.USES_DB: if self.USES_DB:
self.useFixture(nova_fixtures.Database()) self.useFixture(nova_fixtures.Database())
self.useFixture(nova_fixtures.Database(database='api')) self.useFixture(nova_fixtures.Database(database='api'))
self.useFixture(nova_fixtures.Database(database='placement'))
self.useFixture(nova_fixtures.DefaultFlavorsFixture()) self.useFixture(nova_fixtures.DefaultFlavorsFixture())
elif not self.USES_DB_SELF: elif not self.USES_DB_SELF:
self.useFixture(nova_fixtures.DatabasePoisonFixture()) self.useFixture(nova_fixtures.DatabasePoisonFixture())

View File

@ -43,7 +43,7 @@ from nova.tests.functional.api import client
_TRUE_VALUES = ('True', 'true', '1', 'yes') _TRUE_VALUES = ('True', 'true', '1', 'yes')
CONF = cfg.CONF CONF = cfg.CONF
DB_SCHEMA = {'main': "", 'api': "", 'placement': ""} DB_SCHEMA = {'main': "", 'api': ""}
SESSION_CONFIGURED = False SESSION_CONFIGURED = False
@ -221,7 +221,7 @@ class Database(fixtures.Fixture):
def __init__(self, database='main', connection=None): def __init__(self, database='main', connection=None):
"""Create a database fixture. """Create a database fixture.
:param database: The type of database, 'main', 'api' or 'placement' :param database: The type of database, 'main' or 'api'
:param connection: The connection string to use :param connection: The connection string to use
""" """
super(Database, self).__init__() super(Database, self).__init__()
@ -242,8 +242,6 @@ class Database(fixtures.Fixture):
self.get_engine = session.get_engine self.get_engine = session.get_engine
elif database == 'api': elif database == 'api':
self.get_engine = session.get_api_engine self.get_engine = session.get_api_engine
elif database == 'placement':
self.get_engine = session.get_placement_engine
def _cache_schema(self): def _cache_schema(self):
global DB_SCHEMA global DB_SCHEMA
@ -277,7 +275,7 @@ class DatabaseAtVersion(fixtures.Fixture):
"""Create a database fixture. """Create a database fixture.
:param version: Max version to sync to (or None for current) :param version: Max version to sync to (or None for current)
:param database: The type of database, 'main', 'api', 'placement' :param database: The type of database, 'main' or 'api'
""" """
super(DatabaseAtVersion, self).__init__() super(DatabaseAtVersion, self).__init__()
self.database = database self.database = database
@ -286,8 +284,6 @@ class DatabaseAtVersion(fixtures.Fixture):
self.get_engine = session.get_engine self.get_engine = session.get_engine
elif database == 'api': elif database == 'api':
self.get_engine = session.get_api_engine self.get_engine = session.get_api_engine
elif database == 'placement':
self.get_engine = session.get_placement_engine
def cleanup(self): def cleanup(self):
engine = self.get_engine() engine = self.get_engine()

View File

@ -49,13 +49,11 @@ class APIFixture(fixture.GabbiFixture):
config.parse_args([], default_config_files=None, configure_db=False, config.parse_args([], default_config_files=None, configure_db=False,
init_rpc=False) init_rpc=False)
self.placement_db_fixture = fixtures.Database('placement')
# NOTE(cdent): api and main database are not used but we still need # NOTE(cdent): api and main database are not used but we still need
# to manage them to make the fixtures work correctly and not cause # to manage them to make the fixtures work correctly and not cause
# conflicts with other tests in the same process. # conflicts with other tests in the same process.
self.api_db_fixture = fixtures.Database('api') self.api_db_fixture = fixtures.Database('api')
self.main_db_fixture = fixtures.Database('main') self.main_db_fixture = fixtures.Database('main')
self.placement_db_fixture.reset()
self.api_db_fixture.reset() self.api_db_fixture.reset()
self.main_db_fixture.reset() self.main_db_fixture.reset()
@ -63,7 +61,6 @@ class APIFixture(fixture.GabbiFixture):
os.environ['RP_NAME'] = uuidutils.generate_uuid() os.environ['RP_NAME'] = uuidutils.generate_uuid()
def stop_fixture(self): def stop_fixture(self):
self.placement_db_fixture.cleanup()
self.api_db_fixture.cleanup() self.api_db_fixture.cleanup()
self.main_db_fixture.cleanup() self.main_db_fixture.cleanup()
if self.conf: if self.conf:

View File

@ -47,7 +47,8 @@ class ResourceProviderBaseCase(test.NoDBTestCase):
def setUp(self): def setUp(self):
super(ResourceProviderBaseCase, self).setUp() super(ResourceProviderBaseCase, self).setUp()
self.useFixture(fixtures.Database(database='placement')) self.useFixture(fixtures.Database())
self.useFixture(fixtures.Database(database='api'))
self.context = context.RequestContext('fake-user', 'fake-project') self.context = context.RequestContext('fake-user', 'fake-project')
def _make_allocation(self, rp_uuid=None): def _make_allocation(self, rp_uuid=None):
@ -420,6 +421,11 @@ class ResourceProviderTestCase(ResourceProviderBaseCase):
class ResourceProviderListTestCase(ResourceProviderBaseCase): class ResourceProviderListTestCase(ResourceProviderBaseCase):
def setUp(self):
super(ResourceProviderListTestCase, self).setUp()
self.useFixture(fixtures.Database())
self.useFixture(fixtures.Database(database='api'))
self.context = context.RequestContext('fake-user', 'fake-project')
def test_get_all_by_filters(self): def test_get_all_by_filters(self):
for rp_i in ['1', '2']: for rp_i in ['1', '2']:

View File

@ -1,22 +0,0 @@
---
features:
- |
An optional configuration group placement_database can be used in
nova.conf to configure a separate database for use with the placement
API.
We recommend setting the placement_database.connection setting to
a non-None value in order to ease upgrade and migration of Nova in
Ocata. Although we leave the default value of this setting to None
-- in order to not break any deployments that use continuous delivery
models -- setting this to a non-None value now will avoid a potentially
lengthy data migration in the future.
If placement_database.connection has a value this will be used as the
connection URL for the placement database. Before launching the
placement API service, the 'nova-manage placement sync' command
must be run to create the necessary tables.
When the setting is None the existing settings for the api_database
will be used for hosting placement API data.