enginefacade: 'compute_node'

Use enginefacade in 'compute_node' section.

Implements: blueprint new-oslodb-enginefacade

Co-Authored-By: Sergey Nikitin <snikitin@mirantis.com>

Change-Id: I298acd647629e699e661bfbbff6dbb1f71ca57be
This commit is contained in:
Pavel Kholkin
2016-01-22 15:43:38 +03:00
committed by Sergey Nikitin
parent 1ab0ee1258
commit ee70e7bb5e
4 changed files with 39 additions and 32 deletions

View File

@@ -227,7 +227,7 @@ def compute_node_get_all(context):
return IMPL.compute_node_get_all(context)
def compute_node_get_all_by_host(context, host, use_slave=False):
def compute_node_get_all_by_host(context, host):
"""Get compute nodes by host name
:param context: The security context (admin)
@@ -235,7 +235,7 @@ def compute_node_get_all_by_host(context, host, use_slave=False):
:returns: List of dictionaries each containing compute node properties
"""
return IMPL.compute_node_get_all_by_host(context, host, use_slave)
return IMPL.compute_node_get_all_by_host(context, host)
def compute_node_search_by_hypervisor(context, hypervisor_match):

View File

@@ -567,12 +567,9 @@ def service_update(context, service_id, values):
###################
@main_context_manager.reader
def compute_node_get(context, compute_id):
return _compute_node_get(context, compute_id)
def _compute_node_get(context, compute_id, session=None):
result = model_query(context, models.ComputeNode, session=session).\
result = model_query(context, models.ComputeNode).\
filter_by(id=compute_id).\
first()
@@ -582,6 +579,7 @@ def _compute_node_get(context, compute_id, session=None):
return result
@main_context_manager.reader
def compute_nodes_get_by_service_id(context, service_id):
result = model_query(context, models.ComputeNode, read_deleted='no').\
filter_by(service_id=service_id).\
@@ -593,6 +591,7 @@ def compute_nodes_get_by_service_id(context, service_id):
return result
@main_context_manager.reader
def compute_node_get_by_host_and_nodename(context, host, nodename):
result = model_query(context, models.ComputeNode, read_deleted='no').\
filter_by(host=host, hypervisor_hostname=nodename).\
@@ -604,9 +603,9 @@ def compute_node_get_by_host_and_nodename(context, host, nodename):
return result
def compute_node_get_all_by_host(context, host, use_slave=False):
result = model_query(context, models.ComputeNode, read_deleted='no',
use_slave=use_slave).\
@main_context_manager.reader.allow_async
def compute_node_get_all_by_host(context, host):
result = model_query(context, models.ComputeNode, read_deleted='no').\
filter_by(host=host).\
all()
@@ -616,10 +615,12 @@ def compute_node_get_all_by_host(context, host, use_slave=False):
return result
@main_context_manager.reader
def compute_node_get_all(context):
return model_query(context, models.ComputeNode, read_deleted='no').all()
@main_context_manager.reader
def compute_node_search_by_hypervisor(context, hypervisor_match):
field = models.ComputeNode.hypervisor_hostname
return model_query(context, models.ComputeNode).\
@@ -627,6 +628,7 @@ def compute_node_search_by_hypervisor(context, hypervisor_match):
all()
@main_context_manager.writer
def compute_node_create(context, values):
"""Creates a new ComputeNode and populates the capacity fields
with the most recent data.
@@ -635,40 +637,39 @@ def compute_node_create(context, values):
compute_node_ref = models.ComputeNode()
compute_node_ref.update(values)
compute_node_ref.save()
compute_node_ref.save(context.session)
return compute_node_ref
@oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True)
@main_context_manager.writer
def compute_node_update(context, compute_id, values):
"""Updates the ComputeNode record with the most recent data."""
session = get_session()
with session.begin():
compute_ref = _compute_node_get(context, compute_id, session=session)
# Always update this, even if there's going to be no other
# changes in data. This ensures that we invalidate the
# scheduler cache of compute node data in case of races.
values['updated_at'] = timeutils.utcnow()
convert_objects_related_datetimes(values)
compute_ref.update(values)
compute_ref = compute_node_get(context, compute_id)
# Always update this, even if there's going to be no other
# changes in data. This ensures that we invalidate the
# scheduler cache of compute node data in case of races.
values['updated_at'] = timeutils.utcnow()
convert_objects_related_datetimes(values)
compute_ref.update(values)
return compute_ref
@main_context_manager.writer
def compute_node_delete(context, compute_id):
"""Delete a ComputeNode record."""
session = get_session()
with session.begin():
result = model_query(context, models.ComputeNode, session=session).\
filter_by(id=compute_id).\
soft_delete(synchronize_session=False)
result = model_query(context, models.ComputeNode).\
filter_by(id=compute_id).\
soft_delete(synchronize_session=False)
if not result:
raise exception.ComputeHostNotFound(host=compute_id)
if not result:
raise exception.ComputeHostNotFound(host=compute_id)
@main_context_manager.reader
def compute_node_statistics(context):
"""Compute statistics over all compute nodes."""

View File

@@ -228,6 +228,7 @@ class ComputeNode(base.NovaPersistentObject, base.NovaObject,
context, host, nodename)
return cls._from_db_object(context, cls(), db_compute)
# TODO(pkholkin): Remove this method in the next major version bump
@base.remotable_classmethod
def get_first_node_by_host_for_old_compat(cls, context, host,
use_slave=False):
@@ -365,9 +366,14 @@ class ComputeNodeList(base.ObjectListBase, base.NovaObject):
return base.obj_make_list(context, cls(context), objects.ComputeNode,
db_computes)
@staticmethod
@db.select_db_reader_mode
def _db_compute_node_get_all_by_host(context, host, use_slave=False):
return db.compute_node_get_all_by_host(context, host)
@base.remotable_classmethod
def get_all_by_host(cls, context, host, use_slave=False):
db_computes = db.compute_node_get_all_by_host(context, host,
use_slave)
db_computes = cls._db_compute_node_get_all_by_host(context, host,
use_slave=use_slave)
return base.obj_make_list(context, cls(context), objects.ComputeNode,
db_computes)

View File

@@ -7414,9 +7414,9 @@ class ComputeNodeTestCase(test.TestCase, ModelsObjectComparatorMixin):
node = db.compute_node_create(self.ctxt, compute_node_another_host)
result = db.compute_node_get_all_by_host(self.ctxt, 'host1', False)
result = db.compute_node_get_all_by_host(self.ctxt, 'host1')
self._assertEqualListsOfObjects([self.item], result)
result = db.compute_node_get_all_by_host(self.ctxt, 'host2', False)
result = db.compute_node_get_all_by_host(self.ctxt, 'host2')
self._assertEqualListsOfObjects([node], result)
def test_compute_node_get_all_by_host_with_same_host(self):
@@ -7429,7 +7429,7 @@ class ComputeNodeTestCase(test.TestCase, ModelsObjectComparatorMixin):
expected = [self.item, node]
result = sorted(db.compute_node_get_all_by_host(
self.ctxt, 'host1', False),
self.ctxt, 'host1'),
key=lambda n: n['hypervisor_hostname'])
self._assertEqualListsOfObjects(expected, result,