db: Post reshuffle cleanup

Introduce a new 'nova.db.api.api' module to hold API database-specific
helpers, plus a generic 'nova.db.utils' module to hold code suitable for
both main and API databases. This highlights a level of complexity
around connection management that is present for the main database but
not for the API database. This is because we need to handle the
complexity of cells for the former but not the latter.

Change-Id: Ia5304c552ce552ae3c5223a2bfb3a9cd543ec57c
Signed-off-by: Stephen Finucane <stephenfin@redhat.com>
This commit is contained in:
Stephen Finucane 2021-04-01 12:14:33 +01:00
parent bf8b5fc7d0
commit 43b253cd60
32 changed files with 625 additions and 484 deletions

View File

@ -32,7 +32,8 @@ from nova.cmd import common as cmd_common
import nova.conf
from nova import config
from nova import context as nova_context
from nova.db.main import api as db_session
from nova.db.api import api as api_db_api
from nova.db.main import api as main_db_api
from nova import exception
from nova.i18n import _
from nova.objects import cell_mapping as cell_mapping_obj
@ -86,7 +87,7 @@ class UpgradeCommands(upgradecheck.UpgradeCommands):
# table, or by only counting compute nodes with a service version of at
# least 15 which was the highest service version when Newton was
# released.
meta = sa.MetaData(bind=db_session.get_engine(context=context))
meta = sa.MetaData(bind=main_db_api.get_engine(context=context))
compute_nodes = sa.Table('compute_nodes', meta, autoload=True)
return sa.select([sqlfunc.count()]).select_from(compute_nodes).where(
compute_nodes.c.deleted == 0).scalar()
@ -103,7 +104,7 @@ class UpgradeCommands(upgradecheck.UpgradeCommands):
for compute nodes if there are no host mappings on a fresh install.
"""
meta = sa.MetaData()
meta.bind = db_session.get_api_engine()
meta.bind = api_db_api.get_engine()
cell_mappings = self._get_cell_mappings()
count = len(cell_mappings)

View File

@ -53,7 +53,8 @@ from nova import conductor
import nova.conf
from nova import context as nova_context
from nova import crypto
from nova.db.main import api as db_api
from nova.db.api import api as api_db_api
from nova.db.main import api as main_db_api
from nova import exception
from nova import exception_wrapper
from nova.i18n import _
@ -1081,7 +1082,7 @@ class API:
network_metadata)
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _create_reqspec_buildreq_instmapping(context, rs, br, im):
"""Create the request spec, build request, and instance mapping in a
single database transaction.
@ -5082,7 +5083,7 @@ class API:
def get_instance_metadata(self, context, instance):
"""Get all metadata associated with an instance."""
return db_api.instance_metadata_get(context, instance.uuid)
return main_db_api.instance_metadata_get(context, instance.uuid)
@check_instance_lock
@check_instance_state(vm_state=[vm_states.ACTIVE, vm_states.PAUSED,
@ -5962,7 +5963,7 @@ class HostAPI:
"""Return the task logs within a given range, optionally
filtering by host and/or state.
"""
return db_api.task_log_get_all(
return main_db_api.task_log_get_all(
context, task_name, period_beginning, period_ending, host=host,
state=state)
@ -6055,7 +6056,7 @@ class HostAPI:
if cell.uuid == objects.CellMapping.CELL0_UUID:
continue
with nova_context.target_cell(context, cell) as cctxt:
cell_stats.append(db_api.compute_node_statistics(cctxt))
cell_stats.append(main_db_api.compute_node_statistics(cctxt))
if cell_stats:
keys = cell_stats[0].keys()

View File

@ -22,7 +22,8 @@ from oslo_policy import opts as policy_opts
from oslo_utils import importutils
import nova.conf
from nova.db.main import api as db_api
from nova.db.api import api as api_db_api
from nova.db.main import api as main_db_api
from nova import middleware
from nova import policy
from nova import rpc
@ -100,4 +101,5 @@ def parse_args(argv, default_config_files=None, configure_db=True,
rpc.init(CONF)
if configure_db:
db_api.configure(CONF)
main_db_api.configure(CONF)
api_db_api.configure(CONF)

50
nova/db/api/api.py Normal file
View File

@ -0,0 +1,50 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db.sqlalchemy import enginefacade
from oslo_utils import importutils
import sqlalchemy as sa
import nova.conf
profiler_sqlalchemy = importutils.try_import('osprofiler.sqlalchemy')
CONF = nova.conf.CONF
context_manager = enginefacade.transaction_context()
# NOTE(stephenfin): We don't need equivalents of the 'get_context_manager' or
# 'create_context_manager' APIs found in 'nova.db.main.api' since we don't need
# to be cell-aware here
def _get_db_conf(conf_group, connection=None):
kw = dict(conf_group.items())
if connection is not None:
kw['connection'] = connection
return kw
def configure(conf):
context_manager.configure(**_get_db_conf(conf.api_database))
if (
profiler_sqlalchemy and
CONF.profiler.enabled and
CONF.profiler.trace_sqlalchemy
):
context_manager.append_on_engine_create(
lambda eng: profiler_sqlalchemy.add_tracing(sa, eng, "db"))
def get_engine():
return context_manager.writer.get_engine()

View File

@ -22,7 +22,6 @@ import copy
import datetime
import functools
import inspect
import sys
import traceback
from oslo_db import api as oslo_db_api
@ -49,6 +48,8 @@ from nova.compute import vm_states
import nova.conf
import nova.context
from nova.db.main import models
from nova.db import utils as db_utils
from nova.db.utils import require_context
from nova import exception
from nova.i18n import _
from nova import safe_utils
@ -60,8 +61,7 @@ LOG = logging.getLogger(__name__)
DISABLE_DB_ACCESS = False
main_context_manager = enginefacade.transaction_context()
api_context_manager = enginefacade.transaction_context()
context_manager = enginefacade.transaction_context()
def _get_db_conf(conf_group, connection=None):
@ -89,15 +89,14 @@ def _joinedload_all(column):
def configure(conf):
main_context_manager.configure(**_get_db_conf(conf.database))
api_context_manager.configure(**_get_db_conf(conf.api_database))
context_manager.configure(**_get_db_conf(conf.database))
if profiler_sqlalchemy and CONF.profiler.enabled \
and CONF.profiler.trace_sqlalchemy:
main_context_manager.append_on_engine_create(
lambda eng: profiler_sqlalchemy.add_tracing(sa, eng, "db"))
api_context_manager.append_on_engine_create(
if (
profiler_sqlalchemy and
CONF.profiler.enabled and
CONF.profiler.trace_sqlalchemy
):
context_manager.append_on_engine_create(
lambda eng: profiler_sqlalchemy.add_tracing(sa, eng, "db"))
@ -116,7 +115,7 @@ def get_context_manager(context):
:param context: The request context that can contain a context manager
"""
return _context_manager_from_context(context) or main_context_manager
return _context_manager_from_context(context) or context_manager
def get_engine(use_slave=False, context=None):
@ -131,40 +130,11 @@ def get_engine(use_slave=False, context=None):
return ctxt_mgr.writer.get_engine()
def get_api_engine():
return api_context_manager.writer.get_engine()
_SHADOW_TABLE_PREFIX = 'shadow_'
_DEFAULT_QUOTA_NAME = 'default'
PER_PROJECT_QUOTAS = ['fixed_ips', 'floating_ips', 'networks']
# NOTE(stephenfin): This is required and used by oslo.db
def get_backend():
"""The backend is this module itself."""
return sys.modules[__name__]
def require_context(f):
"""Decorator to require *any* user or admin context.
This does no authorization for user or project access matching, see
:py:func:`nova.context.authorize_project_context` and
:py:func:`nova.context.authorize_user_context`.
The first argument to the wrapped function must be the context.
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
nova.context.require_context(args[0])
return f(*args, **kwargs)
wrapper.__signature__ = inspect.signature(f)
return wrapper
def select_db_reader_mode(f):
"""Decorator to select synchronous or asynchronous reader mode.
@ -1662,9 +1632,8 @@ def instance_get_all_by_filters_sort(context, filters, limit=None, marker=None,
if limit == 0:
return []
sort_keys, sort_dirs = process_sort_params(sort_keys,
sort_dirs,
default_dir='desc')
sort_keys, sort_dirs = db_utils.process_sort_params(
sort_keys, sort_dirs, default_dir='desc')
if columns_to_join is None:
columns_to_join_new = ['info_cache', 'security_groups']
@ -2043,75 +2012,6 @@ def _exact_instance_filter(query, filters, legal_keys):
return query
def process_sort_params(sort_keys, sort_dirs,
default_keys=['created_at', 'id'],
default_dir='asc'):
"""Process the sort parameters to include default keys.
Creates a list of sort keys and a list of sort directions. Adds the default
keys to the end of the list if they are not already included.
When adding the default keys to the sort keys list, the associated
direction is:
1) The first element in the 'sort_dirs' list (if specified), else
2) 'default_dir' value (Note that 'asc' is the default value since this is
the default in sqlalchemy.utils.paginate_query)
:param sort_keys: List of sort keys to include in the processed list
:param sort_dirs: List of sort directions to include in the processed list
:param default_keys: List of sort keys that need to be included in the
processed list, they are added at the end of the list
if not already specified.
:param default_dir: Sort direction associated with each of the default
keys that are not supplied, used when they are added
to the processed list
:returns: list of sort keys, list of sort directions
:raise exception.InvalidInput: If more sort directions than sort keys
are specified or if an invalid sort
direction is specified
"""
# Determine direction to use for when adding default keys
if sort_dirs and len(sort_dirs) != 0:
default_dir_value = sort_dirs[0]
else:
default_dir_value = default_dir
# Create list of keys (do not modify the input list)
if sort_keys:
result_keys = list(sort_keys)
else:
result_keys = []
# If a list of directions is not provided, use the default sort direction
# for all provided keys
if sort_dirs:
result_dirs = []
# Verify sort direction
for sort_dir in sort_dirs:
if sort_dir not in ('asc', 'desc'):
msg = _("Unknown sort direction, must be 'desc' or 'asc'")
raise exception.InvalidInput(reason=msg)
result_dirs.append(sort_dir)
else:
result_dirs = [default_dir_value for _sort_key in result_keys]
# Ensure that the key and direction length match
while len(result_dirs) < len(result_keys):
result_dirs.append(default_dir_value)
# Unless more direction are specified, which is an error
if len(result_dirs) > len(result_keys):
msg = _("Sort direction size exceeds sort key size")
raise exception.InvalidInput(reason=msg)
# Ensure defaults are included
for key in default_keys:
if key not in result_keys:
result_keys.append(key)
result_dirs.append(default_dir_value)
return result_keys, result_dirs
@require_context
@pick_context_manager_reader_allow_async
def instance_get_active_by_window_joined(context, begin, end=None,
@ -3507,8 +3407,8 @@ def migration_get_all_by_filters(context, filters,
raise exception.MarkerNotFound(marker=marker)
if limit or marker or sort_keys or sort_dirs:
# Default sort by desc(['created_at', 'id'])
sort_keys, sort_dirs = process_sort_params(sort_keys, sort_dirs,
default_dir='desc')
sort_keys, sort_dirs = db_utils.process_sort_params(
sort_keys, sort_dirs, default_dir='desc')
return sqlalchemyutils.paginate_query(query,
models.Migration,
limit=limit,

View File

@ -22,7 +22,8 @@ from migrate.versioning.repository import Repository
from oslo_log import log as logging
import sqlalchemy
from nova.db.main import api as db_session
from nova.db.api import api as api_db_api
from nova.db.main import api as main_db_api
from nova import exception
from nova.i18n import _
@ -36,10 +37,10 @@ LOG = logging.getLogger(__name__)
def get_engine(database='main', context=None):
if database == 'main':
return db_session.get_engine(context=context)
return main_db_api.get_engine(context=context)
if database == 'api':
return db_session.get_api_engine()
return api_db_api.get_engine()
def find_migrate_repo(database='main'):

109
nova/db/utils.py Normal file
View File

@ -0,0 +1,109 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import inspect
import nova.context
from nova import exception
from nova.i18n import _
def require_context(f):
"""Decorator to require *any* user or admin context.
This does no authorization for user or project access matching, see
:py:func:`nova.context.authorize_project_context` and
:py:func:`nova.context.authorize_user_context`.
The first argument to the wrapped function must be the context.
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
nova.context.require_context(args[0])
return f(*args, **kwargs)
wrapper.__signature__ = inspect.signature(f)
return wrapper
def process_sort_params(
sort_keys,
sort_dirs,
default_keys=['created_at', 'id'],
default_dir='asc',
):
"""Process the sort parameters to include default keys.
Creates a list of sort keys and a list of sort directions. Adds the default
keys to the end of the list if they are not already included.
When adding the default keys to the sort keys list, the associated
direction is:
1. The first element in the 'sort_dirs' list (if specified), else
2. 'default_dir' value (Note that 'asc' is the default value since this is
the default in sqlalchemy.utils.paginate_query)
:param sort_keys: List of sort keys to include in the processed list
:param sort_dirs: List of sort directions to include in the processed list
:param default_keys: List of sort keys that need to be included in the
processed list, they are added at the end of the list if not already
specified.
:param default_dir: Sort direction associated with each of the default
keys that are not supplied, used when they are added to the processed
list
:returns: list of sort keys, list of sort directions
:raise exception.InvalidInput: If more sort directions than sort keys
are specified or if an invalid sort direction is specified
"""
# Determine direction to use for when adding default keys
if sort_dirs and len(sort_dirs) != 0:
default_dir_value = sort_dirs[0]
else:
default_dir_value = default_dir
# Create list of keys (do not modify the input list)
if sort_keys:
result_keys = list(sort_keys)
else:
result_keys = []
# If a list of directions is not provided, use the default sort direction
# for all provided keys
if sort_dirs:
result_dirs = []
# Verify sort direction
for sort_dir in sort_dirs:
if sort_dir not in ('asc', 'desc'):
msg = _("Unknown sort direction, must be 'desc' or 'asc'")
raise exception.InvalidInput(reason=msg)
result_dirs.append(sort_dir)
else:
result_dirs = [default_dir_value for _sort_key in result_keys]
# Ensure that the key and direction length match
while len(result_dirs) < len(result_keys):
result_dirs.append(default_dir_value)
# Unless more direction are specified, which is an error
if len(result_dirs) > len(result_keys):
msg = _("Sort direction size exceeds sort key size")
raise exception.InvalidInput(reason=msg)
# Ensure defaults are included
for key in default_keys:
if key not in result_keys:
result_keys.append(key)
result_dirs.append(default_dir_value)
return result_keys, result_dirs

View File

@ -19,8 +19,8 @@ from oslo_utils import uuidutils
from sqlalchemy import orm
from nova.compute import utils as compute_utils
from nova.db.api import api as api_db_api
from nova.db.api import models as api_models
from nova.db.main import api as db_api
from nova import exception
from nova.i18n import _
from nova import objects
@ -32,7 +32,7 @@ LOG = logging.getLogger(__name__)
DEPRECATED_FIELDS = ['deleted', 'deleted_at']
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _aggregate_get_from_db(context, aggregate_id):
query = context.session.query(api_models.Aggregate).\
options(orm.joinedload('_hosts')).\
@ -47,7 +47,7 @@ def _aggregate_get_from_db(context, aggregate_id):
return aggregate
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _aggregate_get_from_db_by_uuid(context, aggregate_uuid):
query = context.session.query(api_models.Aggregate).\
options(orm.joinedload('_hosts')).\
@ -64,7 +64,7 @@ def _aggregate_get_from_db_by_uuid(context, aggregate_uuid):
def _host_add_to_db(context, aggregate_id, host):
try:
with db_api.api_context_manager.writer.using(context):
with api_db_api.context_manager.writer.using(context):
# Check to see if the aggregate exists
_aggregate_get_from_db(context, aggregate_id)
@ -79,7 +79,7 @@ def _host_add_to_db(context, aggregate_id, host):
def _host_delete_from_db(context, aggregate_id, host):
count = 0
with db_api.api_context_manager.writer.using(context):
with api_db_api.context_manager.writer.using(context):
# Check to see if the aggregate exists
_aggregate_get_from_db(context, aggregate_id)
@ -98,7 +98,7 @@ def _metadata_add_to_db(context, aggregate_id, metadata, max_retries=10,
all_keys = metadata.keys()
for attempt in range(max_retries):
try:
with db_api.api_context_manager.writer.using(context):
with api_db_api.context_manager.writer.using(context):
query = context.session.query(api_models.AggregateMetadata).\
filter_by(aggregate_id=aggregate_id)
@ -142,7 +142,7 @@ def _metadata_add_to_db(context, aggregate_id, metadata, max_retries=10,
LOG.warning(msg)
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _metadata_delete_from_db(context, aggregate_id, key):
# Check to see if the aggregate exists
_aggregate_get_from_db(context, aggregate_id)
@ -157,7 +157,7 @@ def _metadata_delete_from_db(context, aggregate_id, key):
aggregate_id=aggregate_id, metadata_key=key)
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _aggregate_create_in_db(context, values, metadata=None):
query = context.session.query(api_models.Aggregate)
query = query.filter(api_models.Aggregate.name == values['name'])
@ -181,7 +181,7 @@ def _aggregate_create_in_db(context, values, metadata=None):
return aggregate
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _aggregate_delete_from_db(context, aggregate_id):
# Delete Metadata first
context.session.query(api_models.AggregateMetadata).\
@ -196,7 +196,7 @@ def _aggregate_delete_from_db(context, aggregate_id):
raise exception.AggregateNotFound(aggregate_id=aggregate_id)
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _aggregate_update_to_db(context, aggregate_id, values):
aggregate = _aggregate_get_from_db(context, aggregate_id)
@ -411,7 +411,7 @@ class Aggregate(base.NovaPersistentObject, base.NovaObject):
return self.metadata.get('availability_zone', None)
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_all_from_db(context):
query = context.session.query(api_models.Aggregate).\
options(orm.joinedload('_hosts')).\
@ -420,7 +420,7 @@ def _get_all_from_db(context):
return query.all()
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_by_host_from_db(context, host, key=None):
query = context.session.query(api_models.Aggregate).\
options(orm.joinedload('_hosts')).\
@ -435,7 +435,7 @@ def _get_by_host_from_db(context, host, key=None):
return query.all()
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_by_metadata_from_db(context, key=None, value=None):
assert(key is not None or value is not None)
query = context.session.query(api_models.Aggregate)
@ -450,7 +450,7 @@ def _get_by_metadata_from_db(context, key=None, value=None):
return query.all()
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_non_matching_by_metadata_keys_from_db(context, ignored_keys,
key_prefix, value):
"""Filter aggregates based on non matching metadata.

View File

@ -18,8 +18,9 @@ from oslo_serialization import jsonutils
from oslo_utils import versionutils
from oslo_versionedobjects import exception as ovoo_exc
from nova.db.api import api as api_db_api
from nova.db.api import models as api_models
from nova.db.main import api as db
from nova.db import utils as db_utils
from nova import exception
from nova import objects
from nova.objects import base
@ -163,7 +164,7 @@ class BuildRequest(base.NovaObject):
return req
@staticmethod
@db.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_by_instance_uuid_from_db(context, instance_uuid):
db_req = context.session.query(api_models.BuildRequest).filter_by(
instance_uuid=instance_uuid).first()
@ -177,7 +178,7 @@ class BuildRequest(base.NovaObject):
return cls._from_db_object(context, cls(), db_req)
@staticmethod
@db.api_context_manager.writer
@api_db_api.context_manager.writer
def _create_in_db(context, updates):
db_req = api_models.BuildRequest()
db_req.update(updates)
@ -206,7 +207,7 @@ class BuildRequest(base.NovaObject):
self._from_db_object(self._context, self, db_req)
@staticmethod
@db.api_context_manager.writer
@api_db_api.context_manager.writer
def _destroy_in_db(context, instance_uuid):
result = context.session.query(api_models.BuildRequest).filter_by(
instance_uuid=instance_uuid).delete()
@ -217,7 +218,7 @@ class BuildRequest(base.NovaObject):
def destroy(self):
self._destroy_in_db(self._context, self.instance_uuid)
@db.api_context_manager.writer
@api_db_api.context_manager.writer
def _save_in_db(self, context, req_id, updates):
db_req = context.session.query(
api_models.BuildRequest).filter_by(id=req_id).first()
@ -262,7 +263,7 @@ class BuildRequestList(base.ObjectListBase, base.NovaObject):
}
@staticmethod
@db.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_all_from_db(context):
query = context.session.query(api_models.BuildRequest)
@ -396,8 +397,8 @@ class BuildRequestList(base.ObjectListBase, base.NovaObject):
# exists. So it can be ignored.
# 'deleted' and 'cleaned' are handled above.
sort_keys, sort_dirs = db.process_sort_params(sort_keys, sort_dirs,
default_dir='desc')
sort_keys, sort_dirs = db_utils.process_sort_params(
sort_keys, sort_dirs, default_dir='desc')
# For other filters that don't match this, we will do regexp matching
# Taken from db/sqlalchemy/api.py

View File

@ -18,8 +18,8 @@ from sqlalchemy import sql
from sqlalchemy.sql import expression
import nova.conf
from nova.db.api import models as api_models
from nova.db.main import api as db_api
from nova.db.api import api as api_db_api
from nova.db.api import models as api_db_models
from nova import exception
from nova.objects import base
from nova.objects import fields
@ -168,11 +168,11 @@ class CellMapping(base.NovaTimestampObject, base.NovaObject):
return cell_mapping
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_by_uuid_from_db(context, uuid):
db_mapping = context.session.query(api_models.CellMapping).filter_by(
uuid=uuid).first()
db_mapping = context.session\
.query(api_db_models.CellMapping).filter_by(uuid=uuid).first()
if not db_mapping:
raise exception.CellMappingNotFound(uuid=uuid)
@ -185,10 +185,10 @@ class CellMapping(base.NovaTimestampObject, base.NovaObject):
return cls._from_db_object(context, cls(), db_mapping)
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _create_in_db(context, updates):
db_mapping = api_models.CellMapping()
db_mapping = api_db_models.CellMapping()
db_mapping.update(updates)
db_mapping.save(context.session)
return db_mapping
@ -199,11 +199,11 @@ class CellMapping(base.NovaTimestampObject, base.NovaObject):
self._from_db_object(self._context, self, db_mapping)
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _save_in_db(context, uuid, updates):
db_mapping = context.session.query(
api_models.CellMapping).filter_by(uuid=uuid).first()
api_db_models.CellMapping).filter_by(uuid=uuid).first()
if not db_mapping:
raise exception.CellMappingNotFound(uuid=uuid)
@ -219,10 +219,10 @@ class CellMapping(base.NovaTimestampObject, base.NovaObject):
self.obj_reset_changes()
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _destroy_in_db(context, uuid):
result = context.session.query(api_models.CellMapping).filter_by(
result = context.session.query(api_db_models.CellMapping).filter_by(
uuid=uuid).delete()
if not result:
raise exception.CellMappingNotFound(uuid=uuid)
@ -246,10 +246,10 @@ class CellMappingList(base.ObjectListBase, base.NovaObject):
}
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_all_from_db(context):
return context.session.query(api_models.CellMapping).order_by(
expression.asc(api_models.CellMapping.id)).all()
return context.session.query(api_db_models.CellMapping).order_by(
expression.asc(api_db_models.CellMapping.id)).all()
@base.remotable_classmethod
def get_all(cls, context):
@ -257,16 +257,16 @@ class CellMappingList(base.ObjectListBase, base.NovaObject):
return base.obj_make_list(context, cls(), CellMapping, db_mappings)
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_by_disabled_from_db(context, disabled):
if disabled:
return context.session.query(api_models.CellMapping)\
return context.session.query(api_db_models.CellMapping)\
.filter_by(disabled=sql.true())\
.order_by(expression.asc(api_models.CellMapping.id)).all()
.order_by(expression.asc(api_db_models.CellMapping.id)).all()
else:
return context.session.query(api_models.CellMapping)\
return context.session.query(api_db_models.CellMapping)\
.filter_by(disabled=sql.false())\
.order_by(expression.asc(api_models.CellMapping.id)).all()
.order_by(expression.asc(api_db_models.CellMapping.id)).all()
@base.remotable_classmethod
def get_by_disabled(cls, context, disabled):
@ -274,16 +274,16 @@ class CellMappingList(base.ObjectListBase, base.NovaObject):
return base.obj_make_list(context, cls(), CellMapping, db_mappings)
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_by_project_id_from_db(context, project_id):
# SELECT DISTINCT cell_id FROM instance_mappings \
# WHERE project_id = $project_id;
cell_ids = context.session.query(
api_models.InstanceMapping.cell_id).filter_by(
api_db_models.InstanceMapping.cell_id).filter_by(
project_id=project_id).distinct().subquery()
# SELECT cell_mappings WHERE cell_id IN ($cell_ids);
return context.session.query(api_models.CellMapping).filter(
api_models.CellMapping.id.in_(cell_ids)).all()
return context.session.query(api_db_models.CellMapping).filter(
api_db_models.CellMapping.id.in_(cell_ids)).all()
@classmethod
def get_by_project_id(cls, context, project_id):

View File

@ -21,8 +21,9 @@ from sqlalchemy import sql
from sqlalchemy.sql import expression
import nova.conf
from nova.db.api import api as api_db_api
from nova.db.api import models as api_models
from nova.db.main import api as db_api
from nova.db import utils as db_utils
from nova import exception
from nova.notifications.objects import base as notification
from nova.notifications.objects import flavor as flavor_notification
@ -51,7 +52,7 @@ def _dict_with_extra_specs(flavor_model):
# decorators with static methods. We pull these out for now and can
# move them back into the actual staticmethods on the object when those
# issues are resolved.
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_projects_from_db(context, flavorid):
db_flavor = context.session.query(api_models.Flavors).\
filter_by(flavorid=flavorid).\
@ -62,7 +63,7 @@ def _get_projects_from_db(context, flavorid):
return [x['project_id'] for x in db_flavor['projects']]
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _flavor_add_project(context, flavor_id, project_id):
project = api_models.FlavorProjects()
project.update({'flavor_id': flavor_id,
@ -74,7 +75,7 @@ def _flavor_add_project(context, flavor_id, project_id):
project_id=project_id)
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _flavor_del_project(context, flavor_id, project_id):
result = context.session.query(api_models.FlavorProjects).\
filter_by(project_id=project_id).\
@ -85,9 +86,9 @@ def _flavor_del_project(context, flavor_id, project_id):
project_id=project_id)
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _flavor_extra_specs_add(context, flavor_id, specs, max_retries=10):
writer = db_api.api_context_manager.writer
writer = api_db_api.context_manager.writer
for attempt in range(max_retries):
try:
spec_refs = context.session.query(
@ -122,7 +123,7 @@ def _flavor_extra_specs_add(context, flavor_id, specs, max_retries=10):
id=flavor_id, retries=max_retries)
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _flavor_extra_specs_del(context, flavor_id, key):
result = context.session.query(api_models.FlavorExtraSpecs).\
filter_by(flavor_id=flavor_id).\
@ -133,7 +134,7 @@ def _flavor_extra_specs_del(context, flavor_id, key):
extra_specs_key=key, flavor_id=flavor_id)
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _flavor_create(context, values):
specs = values.get('extra_specs')
db_specs = []
@ -169,7 +170,7 @@ def _flavor_create(context, values):
return _dict_with_extra_specs(db_flavor)
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _flavor_destroy(context, flavor_id=None, flavorid=None):
query = context.session.query(api_models.Flavors)
@ -268,7 +269,7 @@ class Flavor(base.NovaPersistentObject, base.NovaObject,
return flavor
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _flavor_get_query_from_db(context):
query = context.session.query(api_models.Flavors).\
options(orm.joinedload('extra_specs'))
@ -281,7 +282,7 @@ class Flavor(base.NovaPersistentObject, base.NovaObject,
return query
@staticmethod
@db_api.require_context
@db_utils.require_context
def _flavor_get_from_db(context, id):
"""Returns a dict describing specific flavor."""
result = Flavor._flavor_get_query_from_db(context).\
@ -292,7 +293,7 @@ class Flavor(base.NovaPersistentObject, base.NovaObject,
return _dict_with_extra_specs(result)
@staticmethod
@db_api.require_context
@db_utils.require_context
def _flavor_get_by_name_from_db(context, name):
"""Returns a dict describing specific flavor."""
result = Flavor._flavor_get_query_from_db(context).\
@ -303,7 +304,7 @@ class Flavor(base.NovaPersistentObject, base.NovaObject,
return _dict_with_extra_specs(result)
@staticmethod
@db_api.require_context
@db_utils.require_context
def _flavor_get_by_flavor_id_from_db(context, flavor_id):
"""Returns a dict describing specific flavor_id."""
result = Flavor._flavor_get_query_from_db(context).\
@ -485,7 +486,7 @@ class Flavor(base.NovaPersistentObject, base.NovaObject,
# NOTE(mriedem): This method is not remotable since we only expect the API
# to be able to make updates to a flavor.
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _save(self, context, values):
db_flavor = context.session.query(api_models.Flavors).\
filter_by(id=self.id).first()
@ -581,7 +582,7 @@ class Flavor(base.NovaPersistentObject, base.NovaObject,
payload=payload).emit(self._context)
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _flavor_get_all_from_db(context, inactive, filters, sort_key, sort_dir,
limit, marker):
"""Returns all flavors.

View File

@ -14,8 +14,8 @@ from oslo_db import exception as db_exc
from sqlalchemy import orm
from nova import context
from nova.db.api import api as api_db_api
from nova.db.api import models as api_models
from nova.db.main import api as db_api
from nova import exception
from nova.i18n import _
from nova.objects import base
@ -52,7 +52,7 @@ class HostMapping(base.NovaTimestampObject, base.NovaObject):
}
def _get_cell_mapping(self):
with db_api.api_context_manager.reader.using(self._context) as session:
with api_db_api.context_manager.reader.using(self._context) as session:
cell_map = (session.query(api_models.CellMapping)
.join(api_models.HostMapping)
.filter(api_models.HostMapping.host == self.host)
@ -87,7 +87,7 @@ class HostMapping(base.NovaTimestampObject, base.NovaObject):
return host_mapping
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_by_host_from_db(context, host):
db_mapping = context.session.query(api_models.HostMapping)\
.options(orm.joinedload('cell_mapping'))\
@ -102,7 +102,7 @@ class HostMapping(base.NovaTimestampObject, base.NovaObject):
return cls._from_db_object(context, cls(), db_mapping)
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _create_in_db(context, updates):
db_mapping = api_models.HostMapping()
return _apply_updates(context, db_mapping, updates)
@ -116,7 +116,7 @@ class HostMapping(base.NovaTimestampObject, base.NovaObject):
self._from_db_object(self._context, self, db_mapping)
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _save_in_db(context, obj, updates):
db_mapping = context.session.query(api_models.HostMapping).filter_by(
id=obj.id).first()
@ -134,7 +134,7 @@ class HostMapping(base.NovaTimestampObject, base.NovaObject):
self.obj_reset_changes()
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _destroy_in_db(context, host):
result = context.session.query(api_models.HostMapping).filter_by(
host=host).delete()
@ -157,7 +157,7 @@ class HostMappingList(base.ObjectListBase, base.NovaObject):
}
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_from_db(context, cell_id=None):
query = (context.session.query(api_models.HostMapping)
.options(orm.joinedload('cell_mapping')))

View File

@ -22,8 +22,8 @@ from oslo_utils import versionutils
from sqlalchemy import orm
from nova.compute import utils as compute_utils
from nova.db.api import api as api_db_api
from nova.db.api import models as api_models
from nova.db.main import api as db_api
from nova import exception
from nova import objects
from nova.objects import base
@ -213,7 +213,7 @@ class InstanceGroup(base.NovaPersistentObject, base.NovaObject,
return instance_group
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_from_db_by_uuid(context, uuid):
grp = _instance_group_get_query(context,
id_field=api_models.InstanceGroup.uuid,
@ -223,7 +223,7 @@ class InstanceGroup(base.NovaPersistentObject, base.NovaObject,
return grp
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_from_db_by_id(context, id):
grp = _instance_group_get_query(context,
id_field=api_models.InstanceGroup.id,
@ -233,7 +233,7 @@ class InstanceGroup(base.NovaPersistentObject, base.NovaObject,
return grp
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_from_db_by_name(context, name):
grp = _instance_group_get_query(context).filter_by(name=name).first()
if not grp:
@ -241,7 +241,7 @@ class InstanceGroup(base.NovaPersistentObject, base.NovaObject,
return grp
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_from_db_by_instance(context, instance_uuid):
grp_member = context.session.query(api_models.InstanceGroupMember).\
filter_by(instance_uuid=instance_uuid).first()
@ -251,7 +251,7 @@ class InstanceGroup(base.NovaPersistentObject, base.NovaObject,
return grp
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _save_in_db(context, group_uuid, values):
grp = InstanceGroup._get_from_db_by_uuid(context, group_uuid)
values_copy = copy.copy(values)
@ -265,7 +265,7 @@ class InstanceGroup(base.NovaPersistentObject, base.NovaObject,
return grp
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _create_in_db(context, values, policies=None, members=None,
policy=None, rules=None):
try:
@ -301,7 +301,7 @@ class InstanceGroup(base.NovaPersistentObject, base.NovaObject,
return group
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _destroy_in_db(context, group_uuid):
qry = _instance_group_get_query(context,
id_field=api_models.InstanceGroup.uuid,
@ -319,13 +319,13 @@ class InstanceGroup(base.NovaPersistentObject, base.NovaObject,
qry.delete()
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _add_members_in_db(context, group_uuid, members):
return _instance_group_members_add_by_uuid(context, group_uuid,
members)
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _remove_members_in_db(context, group_id, instance_uuids):
# There is no public method provided for removing members because the
# user-facing API doesn't allow removal of instance group members. We
@ -337,7 +337,7 @@ class InstanceGroup(base.NovaPersistentObject, base.NovaObject,
delete(synchronize_session=False)
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _destroy_members_bulk_in_db(context, instance_uuids):
return context.session.query(api_models.InstanceGroupMember).filter(
api_models.InstanceGroupMember.instance_uuid.in_(instance_uuids)).\
@ -537,7 +537,7 @@ class InstanceGroupList(base.ObjectListBase, base.NovaObject):
}
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_from_db(context, project_id=None):
query = _instance_group_get_query(context)
if project_id is not None:
@ -545,7 +545,7 @@ class InstanceGroupList(base.ObjectListBase, base.NovaObject):
return query.all()
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_counts_from_db(context, project_id, user_id=None):
query = context.session.query(api_models.InstanceGroup.id).\
filter_by(project_id=project_id)

View File

@ -20,8 +20,8 @@ from sqlalchemy import sql
from sqlalchemy.sql import func
from nova import context as nova_context
from nova.db.api import api as api_db_api
from nova.db.api import models as api_models
from nova.db.main import api as db_api
from nova import exception
from nova.i18n import _
from nova import objects
@ -96,7 +96,7 @@ class InstanceMapping(base.NovaTimestampObject, base.NovaObject):
return instance_mapping
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_by_instance_uuid_from_db(context, instance_uuid):
db_mapping = context.session.query(api_models.InstanceMapping)\
.options(orm.joinedload('cell_mapping'))\
@ -113,7 +113,7 @@ class InstanceMapping(base.NovaTimestampObject, base.NovaObject):
return cls._from_db_object(context, cls(), db_mapping)
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _create_in_db(context, updates):
db_mapping = api_models.InstanceMapping()
db_mapping.update(updates)
@ -138,7 +138,7 @@ class InstanceMapping(base.NovaTimestampObject, base.NovaObject):
self._from_db_object(self._context, self, db_mapping)
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _save_in_db(context, instance_uuid, updates):
db_mapping = context.session.query(
api_models.InstanceMapping).filter_by(
@ -173,7 +173,7 @@ class InstanceMapping(base.NovaTimestampObject, base.NovaObject):
self.obj_reset_changes()
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _destroy_in_db(context, instance_uuid):
result = context.session.query(api_models.InstanceMapping).filter_by(
instance_uuid=instance_uuid).delete()
@ -185,7 +185,7 @@ class InstanceMapping(base.NovaTimestampObject, base.NovaObject):
self._destroy_in_db(self._context, self.instance_uuid)
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def populate_queued_for_delete(context, max_count):
cells = objects.CellMappingList.get_all(context)
processed = 0
@ -229,7 +229,7 @@ def populate_queued_for_delete(context, max_count):
return processed, processed
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def populate_user_id(context, max_count):
cells = objects.CellMappingList.get_all(context)
cms_by_id = {cell.id: cell for cell in cells}
@ -309,7 +309,7 @@ class InstanceMappingList(base.ObjectListBase, base.NovaObject):
}
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_by_project_id_from_db(context, project_id):
return context.session.query(api_models.InstanceMapping)\
.options(orm.joinedload('cell_mapping'))\
@ -323,7 +323,7 @@ class InstanceMappingList(base.ObjectListBase, base.NovaObject):
db_mappings)
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_by_cell_id_from_db(context, cell_id):
return context.session.query(api_models.InstanceMapping)\
.options(orm.joinedload('cell_mapping'))\
@ -336,7 +336,7 @@ class InstanceMappingList(base.ObjectListBase, base.NovaObject):
db_mappings)
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_by_instance_uuids_from_db(context, uuids):
return context.session.query(api_models.InstanceMapping)\
.options(orm.joinedload('cell_mapping'))\
@ -350,7 +350,7 @@ class InstanceMappingList(base.ObjectListBase, base.NovaObject):
db_mappings)
@staticmethod
@db_api.api_context_manager.writer
@api_db_api.context_manager.writer
def _destroy_bulk_in_db(context, instance_uuids):
return context.session.query(api_models.InstanceMapping).filter(
api_models.InstanceMapping.instance_uuid.in_(instance_uuids)).\
@ -361,7 +361,7 @@ class InstanceMappingList(base.ObjectListBase, base.NovaObject):
return cls._destroy_bulk_in_db(context, instance_uuids)
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_not_deleted_by_cell_and_project_from_db(context, cell_uuid,
project_id, limit):
query = context.session.query(api_models.InstanceMapping)
@ -400,7 +400,7 @@ class InstanceMappingList(base.ObjectListBase, base.NovaObject):
db_mappings)
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_counts_in_db(context, project_id, user_id=None):
project_query = context.session.query(
func.count(api_models.InstanceMapping.id)).\
@ -435,7 +435,7 @@ class InstanceMappingList(base.ObjectListBase, base.NovaObject):
return cls._get_counts_in_db(context, project_id, user_id=user_id)
@staticmethod
@db_api.api_context_manager.reader
@api_db_api.context_manager.reader
def _get_count_by_uuids_and_user_in_db(context, uuids, user_id):
query = (context.session.query(