Optional separate database for placement API

If 'connection' is set in the 'placement_database' conf group use
that as the connection URL for the placement database. Otherwise if
it is None, the default, then use the entire api_database conf group
to configure a database connection.

This works by:

* adding a 'placement sync' and 'placement version' command to
  nova-manage

* adding placement_migrations that that sync will run

* adding a placement_database config group with the relevant
  database configuration settings

* adding a placement_context_manager. If
  CONF.placement_database.connection is None this is the same as
  the api_context_manager, otherwise it is a new one from its own config

* adjust nova/tests/fixtures to be aware of a 'placement' database
  and the placement_context_manager

This version of this change differs from others by not requiring
separate placement commands for migration, instead using existing
tooling, which makes the size of the change a bit smaller and also
addresses problems with the test fixtures needing to be too aware of
what migration code to run. Now it runs the same code.

This functionality is being provided to allow deployers to choose
between establishing a new database now or requiring a migration
later. The default is migration later.

This is a modification of Id93cb93a0f4e8667c8e7848aa8cff1d994c2c364
and I3290e26d0a212911f8ef386418b9fa08c685380b.

Change-Id: Ice03144376c9868c064e4393d531510615fc6fc1
Co-Authored-By: Chris Dent <cdent@anticdent.org>
Partially-Implements: blueprint generic-resource-pools
This commit is contained in:
Roman Podoliaka 2016-07-14 20:15:08 +03:00 committed by Jay Pipes
parent c5b02421a6
commit 1b5f9f8203
15 changed files with 309 additions and 22 deletions

View File

@ -927,6 +927,24 @@ class ApiDbCommands(object):
print(migration.db_version(database='api'))
# NOTE(cdent): The behavior of these commands is undefined when
# the placement configuration is undefined.
class PlacementCommands(object):
"""Class for managing the placement database."""
def __init__(self):
pass
@args('--version', metavar='<version>', help='Database version')
def sync(self, version=None):
"""Sync the database up to the most recent version."""
return migration.db_sync(version, database='placement')
def version(self):
"""Print the current database version."""
print(migration.db_version(database='placement'))
class AgentBuildCommands(object):
"""Class for managing agent builds."""
@ -1417,6 +1435,7 @@ CATEGORIES = {
'host': HostCommands,
'logs': GetLogCommands,
'network': NetworkCommands,
'placement': PlacementCommands,
'project': ProjectCommands,
'shell': ShellCommands,
'vm': VmCommands,

View File

@ -122,12 +122,66 @@ def enrich_help_text(alt_db_opts):
# texts here if needed.
alt_db_opt.help = db_opt.help + alt_db_opt.help
# NOTE(cdent): See the note above on api_db_opts. The same issues
# apply here.
placement_db_group = cfg.OptGroup('placement_database',
title='Placement API database options',
help="""
The *Placement API Database* is a separate database which is used for the new
placement-api service. In Ocata release (14.0.0) this database is optional: if
connection option is not set, api database will be used instead. However, this
is not recommended, as it implies a potentially lengthy data migration in the
future. Operators are advised to use a separate database for Placement API from
the start.
""")
# TODO(rpodolyaka): see the notes on help messages on api_db_opts above, those
# also apply here
placement_db_opts = [
cfg.StrOpt('connection',
default=None,
help='',
secret=True),
cfg.BoolOpt('sqlite_synchronous',
default=True,
help=''),
cfg.StrOpt('slave_connection',
secret=True,
help=''),
cfg.StrOpt('mysql_sql_mode',
default='TRADITIONAL',
help=''),
cfg.IntOpt('idle_timeout',
default=3600,
help=''),
cfg.IntOpt('max_pool_size',
help=''),
cfg.IntOpt('max_retries',
default=10,
help=''),
cfg.IntOpt('retry_interval',
default=10,
help=''),
cfg.IntOpt('max_overflow',
help=''),
cfg.IntOpt('connection_debug',
default=0,
help=''),
cfg.BoolOpt('connection_trace',
default=False,
help=''),
cfg.IntOpt('pool_timeout',
help=''),
] # noqa
def register_opts(conf):
oslo_db_options.set_defaults(conf, connection=_DEFAULT_SQL_CONNECTION,
sqlite_db='nova.sqlite')
conf.register_opt(db_driver_opt)
conf.register_opts(api_db_opts, group=api_db_group)
conf.register_opts(placement_db_opts, group=placement_db_group)
def list_opts():
@ -139,6 +193,8 @@ def list_opts():
# in the "sample.conf" file, I omit the listing of the "oslo_db_options"
# here.
enrich_help_text(api_db_opts)
enrich_help_text(placement_db_opts)
return {'DEFAULT': [db_driver_opt],
api_db_group: api_db_opts,
placement_db_group: placement_db_opts,
}

View File

@ -75,6 +75,7 @@ LOG = logging.getLogger(__name__)
main_context_manager = enginefacade.transaction_context()
api_context_manager = enginefacade.transaction_context()
placement_context_manager = enginefacade.transaction_context()
def _get_db_conf(conf_group, connection=None):
@ -108,6 +109,10 @@ def _context_manager_from_context(context):
def configure(conf):
main_context_manager.configure(**_get_db_conf(conf.database))
api_context_manager.configure(**_get_db_conf(conf.api_database))
if conf.placement_database.connection is None:
conf.placement_database = conf.api_database
placement_context_manager.configure(
**_get_db_conf(conf.placement_database))
def create_context_manager(connection=None):
@ -142,6 +147,10 @@ def get_api_engine():
return api_context_manager.get_legacy_facade().get_engine()
def get_placement_engine():
return placement_context_manager.get_legacy_facade().get_engine()
_SHADOW_TABLE_PREFIX = 'shadow_'
_DEFAULT_QUOTA_NAME = 'default'
PER_PROJECT_QUOTAS = ['fixed_ips', 'floating_ips', 'networks']

View File

@ -354,6 +354,31 @@ class ResourceProviderAggregate(API_BASE):
aggregate_id = Column(Integer, primary_key=True, nullable=False)
class PlacementAggregate(API_BASE):
"""Represents a grouping of resource providers."""
# NOTE(rpodolyaka): placement API can optionally use the subset of tables
# of api DB instead of requiring its own DB. aggregates table is the only
# table which schema is a bit different (additional `name` column), but we
# can work around that by providing an additional mapping class to a
# subset of table columns, so that this model works for both separate and
# shared DBs cases.
__table__ = API_BASE.metadata.tables['aggregates']
__mapper_args__ = {
'exclude_properties': ['name']
}
resource_providers = orm.relationship(
'ResourceProvider',
secondary='resource_provider_aggregates',
primaryjoin=('PlacementAggregate.id == '
'ResourceProviderAggregate.aggregate_id'),
secondaryjoin=('ResourceProviderAggregate.resource_provider_id == '
'ResourceProvider.id'),
backref='aggregates'
)
class InstanceGroupMember(API_BASE):
"""Represents the members for an instance group."""
__tablename__ = 'instance_group_member'

View File

@ -31,6 +31,7 @@ from nova.i18n import _
INIT_VERSION = {}
INIT_VERSION['main'] = 215
INIT_VERSION['api'] = 0
INIT_VERSION['placement'] = 0
_REPOSITORY = {}
LOG = logging.getLogger(__name__)
@ -41,6 +42,8 @@ def get_engine(database='main'):
return db_session.get_engine()
if database == 'api':
return db_session.get_api_engine()
if database == 'placement':
return db_session.get_placement_engine()
def db_sync(version=None, database='main'):
@ -165,6 +168,8 @@ def _find_migrate_repo(database='main'):
rel_path = 'migrate_repo'
if database == 'api':
rel_path = os.path.join('api_migrations', 'migrate_repo')
if database == 'placement':
rel_path = os.path.join('placement_migrations', 'migrate_repo')
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
rel_path)
assert os.path.exists(path)

View File

@ -0,0 +1,25 @@
[db_settings]
# Used to identify which repository this database is versioned under.
# You can use the name of your project.
repository_id=placement_db
# The name of the database table used to track the schema version.
# This name shouldn't already be used by your project.
# If this is changed once a database is under version control, you'll need to
# change the table name in each database too.
version_table=migrate_version
# When committing a change script, Migrate will attempt to generate the
# sql for all supported databases; normally, if one of them fails - probably
# because you don't have that database installed - it is ignored and the
# commit continues, perhaps ending successfully.
# Databases in this list MUST compile successfully during a commit, or the
# entire commit will fail. List the databases your application will actually
# be using to ensure your updates to that database work properly.
# This must be a list; example: ['postgres','sqlite']
required_dbs=[]
# When creating new change scripts, Migrate will stamp the new script with
# a version number. By default this is latest_version + 1. You can set this
# to 'true' to tell Migrate to use the UTC timestamp instead.
use_timestamp_numbering=False

View File

@ -0,0 +1,123 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Initial database migration for the Placement API DB."""
from migrate import UniqueConstraint
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import Float
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import Unicode
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
if migrate_engine.name == 'mysql':
nameargs = {'collation': 'utf8_bin'}
else:
nameargs = {}
resource_providers = Table(
'resource_providers', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(36), nullable=False),
Column('name', Unicode(200, **nameargs), nullable=True),
Column('generation', Integer, default=0),
Column('can_host', Integer, default=0),
UniqueConstraint('uuid', name='uniq_resource_providers0uuid'),
UniqueConstraint('name', name='uniq_resource_providers0name'),
Index('resource_providers_name_idx', 'name'),
Index('resource_providers_uuid_idx', 'uuid'),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
inventories = Table(
'inventories', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('resource_provider_id', Integer, nullable=False),
Column('resource_class_id', Integer, nullable=False),
Column('total', Integer, nullable=False),
Column('reserved', Integer, nullable=False),
Column('min_unit', Integer, nullable=False),
Column('max_unit', Integer, nullable=False),
Column('step_size', Integer, nullable=False),
Column('allocation_ratio', Float, nullable=False),
Index('inventories_resource_provider_id_idx',
'resource_provider_id'),
Index('inventories_resource_provider_resource_class_idx',
'resource_provider_id', 'resource_class_id'),
Index('inventories_resource_class_id_idx',
'resource_class_id'),
UniqueConstraint('resource_provider_id', 'resource_class_id',
name='uniq_inventories0resource_provider_resource_class'),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
allocations = Table(
'allocations', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('resource_provider_id', Integer, nullable=False),
Column('consumer_id', String(36), nullable=False),
Column('resource_class_id', Integer, nullable=False),
Column('used', Integer, nullable=False),
Index('allocations_resource_provider_class_used_idx',
'resource_provider_id', 'resource_class_id',
'used'),
Index('allocations_resource_class_id_idx',
'resource_class_id'),
Index('allocations_consumer_id_idx', 'consumer_id'),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
resource_provider_aggregates = Table(
'resource_provider_aggregates', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('resource_provider_id', Integer, primary_key=True,
nullable=False),
Column('aggregate_id', Integer, primary_key=True, nullable=False),
Index('resource_provider_aggregates_aggregate_id_idx',
'aggregate_id'),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
aggregates = Table(
'aggregates', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(36), nullable=False),
Index('aggregates_uuid_idx', 'uuid'),
UniqueConstraint('uuid', name='uniq_aggregates0uuid'),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
for table in [resource_providers, inventories, allocations,
resource_provider_aggregates, aggregates]:
table.create(checkfirst=True)

View File

@ -135,7 +135,7 @@ def _increment_provider_generation(conn, rp):
return new_generation
@db_api.api_context_manager.writer
@db_api.placement_context_manager.writer
def _add_inventory(context, rp, inventory):
"""Add one Inventory that wasn't already on the provider."""
resource_class_id = fields.ResourceClass.index(inventory.resource_class)
@ -147,7 +147,7 @@ def _add_inventory(context, rp, inventory):
rp.generation = _increment_provider_generation(conn, rp)
@db_api.api_context_manager.writer
@db_api.placement_context_manager.writer
def _update_inventory(context, rp, inventory):
"""Update an inventory already on the provider."""
resource_class_id = fields.ResourceClass.index(inventory.resource_class)
@ -159,7 +159,7 @@ def _update_inventory(context, rp, inventory):
rp.generation = _increment_provider_generation(conn, rp)
@db_api.api_context_manager.writer
@db_api.placement_context_manager.writer
def _delete_inventory(context, rp, resource_class_id):
"""Delete up to one Inventory of the given resource_class id."""
@ -172,7 +172,7 @@ def _delete_inventory(context, rp, resource_class_id):
rp.generation = _increment_provider_generation(conn, rp)
@db_api.api_context_manager.writer
@db_api.placement_context_manager.writer
def _set_inventory(context, rp, inv_list):
"""Given an InventoryList object, replaces the inventory of the
resource provider in a safe, atomic fashion using the resource
@ -299,7 +299,7 @@ class ResourceProvider(base.NovaObject):
self.obj_reset_changes()
@staticmethod
@db_api.api_context_manager.writer
@db_api.placement_context_manager.writer
def _create_in_db(context, updates):
db_rp = models.ResourceProvider()
db_rp.update(updates)
@ -307,7 +307,7 @@ class ResourceProvider(base.NovaObject):
return db_rp
@staticmethod
@db_api.api_context_manager.writer
@db_api.placement_context_manager.writer
def _delete(context, _id):
# Don't delete the resource provider if it has allocations.
rp_allocations = context.session.query(models.Allocation).\
@ -324,7 +324,7 @@ class ResourceProvider(base.NovaObject):
raise exception.NotFound()
@staticmethod
@db_api.api_context_manager.writer
@db_api.placement_context_manager.writer
def _update_in_db(context, id, updates):
db_rp = context.session.query(models.ResourceProvider).filter_by(
id=id).first()
@ -340,7 +340,7 @@ class ResourceProvider(base.NovaObject):
return resource_provider
@staticmethod
@db_api.api_context_manager.reader
@db_api.placement_context_manager.reader
def _get_by_uuid_from_db(context, uuid):
result = context.session.query(models.ResourceProvider).filter_by(
uuid=uuid).first()
@ -359,7 +359,7 @@ class ResourceProviderList(base.ObjectListBase, base.NovaObject):
}
@staticmethod
@db_api.api_context_manager.reader
@db_api.placement_context_manager.reader
def _get_all_by_filters_from_db(context, filters):
if not filters:
filters = {}
@ -424,7 +424,7 @@ class _HasAResourceProvider(base.NovaObject):
return target
@db_api.api_context_manager.writer
@db_api.placement_context_manager.writer
def _create_inventory_in_db(context, updates):
db_inventory = models.Inventory()
db_inventory.update(updates)
@ -432,7 +432,7 @@ def _create_inventory_in_db(context, updates):
return db_inventory
@db_api.api_context_manager.writer
@db_api.placement_context_manager.writer
def _update_inventory_in_db(context, id_, updates):
result = context.session.query(
models.Inventory).filter_by(id=id_).update(updates)
@ -515,7 +515,7 @@ class InventoryList(base.ObjectListBase, base.NovaObject):
return inv_rec
@staticmethod
@db_api.api_context_manager.reader
@db_api.placement_context_manager.reader
def _get_all_by_resource_provider(context, rp_uuid):
return context.session.query(models.Inventory).\
join(models.Inventory.resource_provider).\
@ -544,7 +544,7 @@ class Allocation(_HasAResourceProvider):
}
@staticmethod
@db_api.api_context_manager.writer
@db_api.placement_context_manager.writer
def _create_in_db(context, updates):
db_allocation = models.Allocation()
db_allocation.update(updates)
@ -552,7 +552,7 @@ class Allocation(_HasAResourceProvider):
return db_allocation
@staticmethod
@db_api.api_context_manager.writer
@db_api.placement_context_manager.writer
def _destroy(context, id):
result = context.session.query(models.Allocation).filter_by(
id=id).delete()
@ -583,7 +583,7 @@ class AllocationList(base.ObjectListBase, base.NovaObject):
}
@staticmethod
@db_api.api_context_manager.reader
@db_api.placement_context_manager.reader
def _get_allocations_from_db(context, rp_uuid):
query = (context.session.query(models.Allocation)
.join(models.Allocation.resource_provider)

View File

@ -213,6 +213,7 @@ class TestCase(testtools.TestCase):
if self.USES_DB:
self.useFixture(nova_fixtures.Database())
self.useFixture(nova_fixtures.Database(database='api'))
self.useFixture(nova_fixtures.Database(database='placement'))
self.useFixture(nova_fixtures.DefaultFlavorsFixture())
elif not self.USES_DB_SELF:
self.useFixture(nova_fixtures.DatabasePoisonFixture())

View File

@ -43,7 +43,7 @@ from nova.tests.functional.api import client
_TRUE_VALUES = ('True', 'true', '1', 'yes')
CONF = cfg.CONF
DB_SCHEMA = {'main': "", 'api': ""}
DB_SCHEMA = {'main': "", 'api': "", 'placement': ""}
SESSION_CONFIGURED = False
@ -221,7 +221,7 @@ class Database(fixtures.Fixture):
def __init__(self, database='main', connection=None):
"""Create a database fixture.
:param database: The type of database, 'main' or 'api'
:param database: The type of database, 'main', 'api' or 'placement'
:param connection: The connection string to use
"""
super(Database, self).__init__()
@ -242,6 +242,8 @@ class Database(fixtures.Fixture):
self.get_engine = session.get_engine
elif database == 'api':
self.get_engine = session.get_api_engine
elif database == 'placement':
self.get_engine = session.get_placement_engine
def _cache_schema(self):
global DB_SCHEMA
@ -275,7 +277,7 @@ class DatabaseAtVersion(fixtures.Fixture):
"""Create a database fixture.
:param version: Max version to sync to (or None for current)
:param database: The type of database, 'main' or 'api'
:param database: The type of database, 'main', 'api', 'placement'
"""
super(DatabaseAtVersion, self).__init__()
self.database = database
@ -284,6 +286,8 @@ class DatabaseAtVersion(fixtures.Fixture):
self.get_engine = session.get_engine
elif database == 'api':
self.get_engine = session.get_api_engine
elif database == 'placement':
self.get_engine = session.get_placement_engine
def cleanup(self):
engine = self.get_engine()

View File

@ -47,8 +47,7 @@ class ResourceProviderBaseCase(test.NoDBTestCase):
def setUp(self):
super(ResourceProviderBaseCase, self).setUp()
self.useFixture(fixtures.Database())
self.useFixture(fixtures.Database(database='api'))
self.useFixture(fixtures.Database(database='placement'))
self.context = context.RequestContext('fake-user', 'fake-project')
def _make_allocation(self, rp_uuid=None):
@ -369,8 +368,7 @@ class ResourceProviderListTestCase(test.NoDBTestCase):
def setUp(self):
super(ResourceProviderListTestCase, self).setUp()
self.useFixture(fixtures.Database())
self.useFixture(fixtures.Database(database='api'))
self.useFixture(fixtures.Database(database='placement'))
self.context = context.RequestContext('fake-user', 'fake-project')
def test_get_all_by_filters(self):

View File

@ -0,0 +1,22 @@
---
features:
- |
An optional configuration group placement_database can be used in
nova.conf to configure a separate database for use with the placement
API.
We recommend setting the placement_database.connection setting to
a non-None value in order to ease upgrade and migration of Nova in
Ocata. Although we leave the default value of this setting to None
-- in order to not break any deployments that use continuous delivery
models -- setting this to a non-None value now will avoid a potentially
lengthy data migration in the future.
If placement_database.connection has a value this will be used as the
connection URL for the placement database. Before launching the
placement API service, the 'nova-manage placement sync' command
must be run to create the necessary tables.
When the setting is None the existing settings for the api_database
will be used for hosting placement API data.