enginefacade: 'compute_node'

Use enginefacade in 'compute_node' section.

Implements: blueprint new-oslodb-enginefacade

Co-Authored-By: Sergey Nikitin <snikitin@mirantis.com>

Change-Id: I298acd647629e699e661bfbbff6dbb1f71ca57be
This commit is contained in:
Pavel Kholkin
2016-01-22 15:43:38 +03:00
committed by Sergey Nikitin
parent 1ab0ee1258
commit ee70e7bb5e
4 changed files with 39 additions and 32 deletions

View File

@@ -227,7 +227,7 @@ def compute_node_get_all(context):
return IMPL.compute_node_get_all(context) return IMPL.compute_node_get_all(context)
def compute_node_get_all_by_host(context, host, use_slave=False): def compute_node_get_all_by_host(context, host):
"""Get compute nodes by host name """Get compute nodes by host name
:param context: The security context (admin) :param context: The security context (admin)
@@ -235,7 +235,7 @@ def compute_node_get_all_by_host(context, host, use_slave=False):
:returns: List of dictionaries each containing compute node properties :returns: List of dictionaries each containing compute node properties
""" """
return IMPL.compute_node_get_all_by_host(context, host, use_slave) return IMPL.compute_node_get_all_by_host(context, host)
def compute_node_search_by_hypervisor(context, hypervisor_match): def compute_node_search_by_hypervisor(context, hypervisor_match):

View File

@@ -567,12 +567,9 @@ def service_update(context, service_id, values):
################### ###################
@main_context_manager.reader
def compute_node_get(context, compute_id): def compute_node_get(context, compute_id):
return _compute_node_get(context, compute_id) result = model_query(context, models.ComputeNode).\
def _compute_node_get(context, compute_id, session=None):
result = model_query(context, models.ComputeNode, session=session).\
filter_by(id=compute_id).\ filter_by(id=compute_id).\
first() first()
@@ -582,6 +579,7 @@ def _compute_node_get(context, compute_id, session=None):
return result return result
@main_context_manager.reader
def compute_nodes_get_by_service_id(context, service_id): def compute_nodes_get_by_service_id(context, service_id):
result = model_query(context, models.ComputeNode, read_deleted='no').\ result = model_query(context, models.ComputeNode, read_deleted='no').\
filter_by(service_id=service_id).\ filter_by(service_id=service_id).\
@@ -593,6 +591,7 @@ def compute_nodes_get_by_service_id(context, service_id):
return result return result
@main_context_manager.reader
def compute_node_get_by_host_and_nodename(context, host, nodename): def compute_node_get_by_host_and_nodename(context, host, nodename):
result = model_query(context, models.ComputeNode, read_deleted='no').\ result = model_query(context, models.ComputeNode, read_deleted='no').\
filter_by(host=host, hypervisor_hostname=nodename).\ filter_by(host=host, hypervisor_hostname=nodename).\
@@ -604,9 +603,9 @@ def compute_node_get_by_host_and_nodename(context, host, nodename):
return result return result
def compute_node_get_all_by_host(context, host, use_slave=False): @main_context_manager.reader.allow_async
result = model_query(context, models.ComputeNode, read_deleted='no', def compute_node_get_all_by_host(context, host):
use_slave=use_slave).\ result = model_query(context, models.ComputeNode, read_deleted='no').\
filter_by(host=host).\ filter_by(host=host).\
all() all()
@@ -616,10 +615,12 @@ def compute_node_get_all_by_host(context, host, use_slave=False):
return result return result
@main_context_manager.reader
def compute_node_get_all(context): def compute_node_get_all(context):
return model_query(context, models.ComputeNode, read_deleted='no').all() return model_query(context, models.ComputeNode, read_deleted='no').all()
@main_context_manager.reader
def compute_node_search_by_hypervisor(context, hypervisor_match): def compute_node_search_by_hypervisor(context, hypervisor_match):
field = models.ComputeNode.hypervisor_hostname field = models.ComputeNode.hypervisor_hostname
return model_query(context, models.ComputeNode).\ return model_query(context, models.ComputeNode).\
@@ -627,6 +628,7 @@ def compute_node_search_by_hypervisor(context, hypervisor_match):
all() all()
@main_context_manager.writer
def compute_node_create(context, values): def compute_node_create(context, values):
"""Creates a new ComputeNode and populates the capacity fields """Creates a new ComputeNode and populates the capacity fields
with the most recent data. with the most recent data.
@@ -635,40 +637,39 @@ def compute_node_create(context, values):
compute_node_ref = models.ComputeNode() compute_node_ref = models.ComputeNode()
compute_node_ref.update(values) compute_node_ref.update(values)
compute_node_ref.save() compute_node_ref.save(context.session)
return compute_node_ref return compute_node_ref
@oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True)
@main_context_manager.writer
def compute_node_update(context, compute_id, values): def compute_node_update(context, compute_id, values):
"""Updates the ComputeNode record with the most recent data.""" """Updates the ComputeNode record with the most recent data."""
session = get_session() compute_ref = compute_node_get(context, compute_id)
with session.begin(): # Always update this, even if there's going to be no other
compute_ref = _compute_node_get(context, compute_id, session=session) # changes in data. This ensures that we invalidate the
# Always update this, even if there's going to be no other # scheduler cache of compute node data in case of races.
# changes in data. This ensures that we invalidate the values['updated_at'] = timeutils.utcnow()
# scheduler cache of compute node data in case of races. convert_objects_related_datetimes(values)
values['updated_at'] = timeutils.utcnow() compute_ref.update(values)
convert_objects_related_datetimes(values)
compute_ref.update(values)
return compute_ref return compute_ref
@main_context_manager.writer
def compute_node_delete(context, compute_id): def compute_node_delete(context, compute_id):
"""Delete a ComputeNode record.""" """Delete a ComputeNode record."""
session = get_session() result = model_query(context, models.ComputeNode).\
with session.begin(): filter_by(id=compute_id).\
result = model_query(context, models.ComputeNode, session=session).\ soft_delete(synchronize_session=False)
filter_by(id=compute_id).\
soft_delete(synchronize_session=False)
if not result: if not result:
raise exception.ComputeHostNotFound(host=compute_id) raise exception.ComputeHostNotFound(host=compute_id)
@main_context_manager.reader
def compute_node_statistics(context): def compute_node_statistics(context):
"""Compute statistics over all compute nodes.""" """Compute statistics over all compute nodes."""

View File

@@ -228,6 +228,7 @@ class ComputeNode(base.NovaPersistentObject, base.NovaObject,
context, host, nodename) context, host, nodename)
return cls._from_db_object(context, cls(), db_compute) return cls._from_db_object(context, cls(), db_compute)
# TODO(pkholkin): Remove this method in the next major version bump
@base.remotable_classmethod @base.remotable_classmethod
def get_first_node_by_host_for_old_compat(cls, context, host, def get_first_node_by_host_for_old_compat(cls, context, host,
use_slave=False): use_slave=False):
@@ -365,9 +366,14 @@ class ComputeNodeList(base.ObjectListBase, base.NovaObject):
return base.obj_make_list(context, cls(context), objects.ComputeNode, return base.obj_make_list(context, cls(context), objects.ComputeNode,
db_computes) db_computes)
@staticmethod
@db.select_db_reader_mode
def _db_compute_node_get_all_by_host(context, host, use_slave=False):
return db.compute_node_get_all_by_host(context, host)
@base.remotable_classmethod @base.remotable_classmethod
def get_all_by_host(cls, context, host, use_slave=False): def get_all_by_host(cls, context, host, use_slave=False):
db_computes = db.compute_node_get_all_by_host(context, host, db_computes = cls._db_compute_node_get_all_by_host(context, host,
use_slave) use_slave=use_slave)
return base.obj_make_list(context, cls(context), objects.ComputeNode, return base.obj_make_list(context, cls(context), objects.ComputeNode,
db_computes) db_computes)

View File

@@ -7414,9 +7414,9 @@ class ComputeNodeTestCase(test.TestCase, ModelsObjectComparatorMixin):
node = db.compute_node_create(self.ctxt, compute_node_another_host) node = db.compute_node_create(self.ctxt, compute_node_another_host)
result = db.compute_node_get_all_by_host(self.ctxt, 'host1', False) result = db.compute_node_get_all_by_host(self.ctxt, 'host1')
self._assertEqualListsOfObjects([self.item], result) self._assertEqualListsOfObjects([self.item], result)
result = db.compute_node_get_all_by_host(self.ctxt, 'host2', False) result = db.compute_node_get_all_by_host(self.ctxt, 'host2')
self._assertEqualListsOfObjects([node], result) self._assertEqualListsOfObjects([node], result)
def test_compute_node_get_all_by_host_with_same_host(self): def test_compute_node_get_all_by_host_with_same_host(self):
@@ -7429,7 +7429,7 @@ class ComputeNodeTestCase(test.TestCase, ModelsObjectComparatorMixin):
expected = [self.item, node] expected = [self.item, node]
result = sorted(db.compute_node_get_all_by_host( result = sorted(db.compute_node_get_all_by_host(
self.ctxt, 'host1', False), self.ctxt, 'host1'),
key=lambda n: n['hypervisor_hostname']) key=lambda n: n['hypervisor_hostname'])
self._assertEqualListsOfObjects(expected, result, self._assertEqualListsOfObjects(expected, result,