Merge "enginefacade: 'compute_node'"
This commit is contained in:
commit
b181f68e6b
@ -224,7 +224,7 @@ def compute_node_get_all(context):
|
||||
return IMPL.compute_node_get_all(context)
|
||||
|
||||
|
||||
def compute_node_get_all_by_host(context, host, use_slave=False):
|
||||
def compute_node_get_all_by_host(context, host):
|
||||
"""Get compute nodes by host name
|
||||
|
||||
:param context: The security context (admin)
|
||||
@ -232,7 +232,7 @@ def compute_node_get_all_by_host(context, host, use_slave=False):
|
||||
|
||||
:returns: List of dictionaries each containing compute node properties
|
||||
"""
|
||||
return IMPL.compute_node_get_all_by_host(context, host, use_slave)
|
||||
return IMPL.compute_node_get_all_by_host(context, host)
|
||||
|
||||
|
||||
def compute_node_search_by_hypervisor(context, hypervisor_match):
|
||||
|
@ -562,12 +562,9 @@ def service_update(context, service_id, values):
|
||||
|
||||
###################
|
||||
|
||||
@main_context_manager.reader
|
||||
def compute_node_get(context, compute_id):
|
||||
return _compute_node_get(context, compute_id)
|
||||
|
||||
|
||||
def _compute_node_get(context, compute_id, session=None):
|
||||
result = model_query(context, models.ComputeNode, session=session).\
|
||||
result = model_query(context, models.ComputeNode).\
|
||||
filter_by(id=compute_id).\
|
||||
first()
|
||||
|
||||
@ -577,6 +574,7 @@ def _compute_node_get(context, compute_id, session=None):
|
||||
return result
|
||||
|
||||
|
||||
@main_context_manager.reader
|
||||
def compute_nodes_get_by_service_id(context, service_id):
|
||||
result = model_query(context, models.ComputeNode, read_deleted='no').\
|
||||
filter_by(service_id=service_id).\
|
||||
@ -588,6 +586,7 @@ def compute_nodes_get_by_service_id(context, service_id):
|
||||
return result
|
||||
|
||||
|
||||
@main_context_manager.reader
|
||||
def compute_node_get_by_host_and_nodename(context, host, nodename):
|
||||
result = model_query(context, models.ComputeNode, read_deleted='no').\
|
||||
filter_by(host=host, hypervisor_hostname=nodename).\
|
||||
@ -599,9 +598,9 @@ def compute_node_get_by_host_and_nodename(context, host, nodename):
|
||||
return result
|
||||
|
||||
|
||||
def compute_node_get_all_by_host(context, host, use_slave=False):
|
||||
result = model_query(context, models.ComputeNode, read_deleted='no',
|
||||
use_slave=use_slave).\
|
||||
@main_context_manager.reader.allow_async
|
||||
def compute_node_get_all_by_host(context, host):
|
||||
result = model_query(context, models.ComputeNode, read_deleted='no').\
|
||||
filter_by(host=host).\
|
||||
all()
|
||||
|
||||
@ -611,10 +610,12 @@ def compute_node_get_all_by_host(context, host, use_slave=False):
|
||||
return result
|
||||
|
||||
|
||||
@main_context_manager.reader
|
||||
def compute_node_get_all(context):
|
||||
return model_query(context, models.ComputeNode, read_deleted='no').all()
|
||||
|
||||
|
||||
@main_context_manager.reader
|
||||
def compute_node_search_by_hypervisor(context, hypervisor_match):
|
||||
field = models.ComputeNode.hypervisor_hostname
|
||||
return model_query(context, models.ComputeNode).\
|
||||
@ -622,6 +623,7 @@ def compute_node_search_by_hypervisor(context, hypervisor_match):
|
||||
all()
|
||||
|
||||
|
||||
@main_context_manager.writer
|
||||
def compute_node_create(context, values):
|
||||
"""Creates a new ComputeNode and populates the capacity fields
|
||||
with the most recent data.
|
||||
@ -630,40 +632,39 @@ def compute_node_create(context, values):
|
||||
|
||||
compute_node_ref = models.ComputeNode()
|
||||
compute_node_ref.update(values)
|
||||
compute_node_ref.save()
|
||||
compute_node_ref.save(context.session)
|
||||
|
||||
return compute_node_ref
|
||||
|
||||
|
||||
@oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True)
|
||||
@main_context_manager.writer
|
||||
def compute_node_update(context, compute_id, values):
|
||||
"""Updates the ComputeNode record with the most recent data."""
|
||||
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
compute_ref = _compute_node_get(context, compute_id, session=session)
|
||||
# Always update this, even if there's going to be no other
|
||||
# changes in data. This ensures that we invalidate the
|
||||
# scheduler cache of compute node data in case of races.
|
||||
values['updated_at'] = timeutils.utcnow()
|
||||
convert_objects_related_datetimes(values)
|
||||
compute_ref.update(values)
|
||||
compute_ref = compute_node_get(context, compute_id)
|
||||
# Always update this, even if there's going to be no other
|
||||
# changes in data. This ensures that we invalidate the
|
||||
# scheduler cache of compute node data in case of races.
|
||||
values['updated_at'] = timeutils.utcnow()
|
||||
convert_objects_related_datetimes(values)
|
||||
compute_ref.update(values)
|
||||
|
||||
return compute_ref
|
||||
|
||||
|
||||
@main_context_manager.writer
|
||||
def compute_node_delete(context, compute_id):
|
||||
"""Delete a ComputeNode record."""
|
||||
session = get_session()
|
||||
with session.begin():
|
||||
result = model_query(context, models.ComputeNode, session=session).\
|
||||
filter_by(id=compute_id).\
|
||||
soft_delete(synchronize_session=False)
|
||||
result = model_query(context, models.ComputeNode).\
|
||||
filter_by(id=compute_id).\
|
||||
soft_delete(synchronize_session=False)
|
||||
|
||||
if not result:
|
||||
raise exception.ComputeHostNotFound(host=compute_id)
|
||||
if not result:
|
||||
raise exception.ComputeHostNotFound(host=compute_id)
|
||||
|
||||
|
||||
@main_context_manager.reader
|
||||
def compute_node_statistics(context):
|
||||
"""Compute statistics over all compute nodes."""
|
||||
|
||||
|
@ -228,6 +228,7 @@ class ComputeNode(base.NovaPersistentObject, base.NovaObject,
|
||||
context, host, nodename)
|
||||
return cls._from_db_object(context, cls(), db_compute)
|
||||
|
||||
# TODO(pkholkin): Remove this method in the next major version bump
|
||||
@base.remotable_classmethod
|
||||
def get_first_node_by_host_for_old_compat(cls, context, host,
|
||||
use_slave=False):
|
||||
@ -365,9 +366,14 @@ class ComputeNodeList(base.ObjectListBase, base.NovaObject):
|
||||
return base.obj_make_list(context, cls(context), objects.ComputeNode,
|
||||
db_computes)
|
||||
|
||||
@staticmethod
|
||||
@db.select_db_reader_mode
|
||||
def _db_compute_node_get_all_by_host(context, host, use_slave=False):
|
||||
return db.compute_node_get_all_by_host(context, host)
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get_all_by_host(cls, context, host, use_slave=False):
|
||||
db_computes = db.compute_node_get_all_by_host(context, host,
|
||||
use_slave)
|
||||
db_computes = cls._db_compute_node_get_all_by_host(context, host,
|
||||
use_slave=use_slave)
|
||||
return base.obj_make_list(context, cls(context), objects.ComputeNode,
|
||||
db_computes)
|
||||
|
@ -7431,9 +7431,9 @@ class ComputeNodeTestCase(test.TestCase, ModelsObjectComparatorMixin):
|
||||
|
||||
node = db.compute_node_create(self.ctxt, compute_node_another_host)
|
||||
|
||||
result = db.compute_node_get_all_by_host(self.ctxt, 'host1', False)
|
||||
result = db.compute_node_get_all_by_host(self.ctxt, 'host1')
|
||||
self._assertEqualListsOfObjects([self.item], result)
|
||||
result = db.compute_node_get_all_by_host(self.ctxt, 'host2', False)
|
||||
result = db.compute_node_get_all_by_host(self.ctxt, 'host2')
|
||||
self._assertEqualListsOfObjects([node], result)
|
||||
|
||||
def test_compute_node_get_all_by_host_with_same_host(self):
|
||||
@ -7446,7 +7446,7 @@ class ComputeNodeTestCase(test.TestCase, ModelsObjectComparatorMixin):
|
||||
|
||||
expected = [self.item, node]
|
||||
result = sorted(db.compute_node_get_all_by_host(
|
||||
self.ctxt, 'host1', False),
|
||||
self.ctxt, 'host1'),
|
||||
key=lambda n: n['hypervisor_hostname'])
|
||||
|
||||
self._assertEqualListsOfObjects(expected, result,
|
||||
|
Loading…
x
Reference in New Issue
Block a user