Cells: Limit instances pulled in _heal_instances
In order to avoid ballooning memory usage we should limit the number of instances pulled at any time while syncing them. This adds pagination to the generator used in order to limit it, and an optional parameter to optimize for when small numbers of instances are needed. Change-Id: Ia82185af27ed11d68a8498ec4bde3da529134893 Closes-Bug: 1447732
This commit is contained in:
parent
ff412c3888
commit
a8a349c10f
|
@ -57,6 +57,11 @@ cells_opts = [
|
|||
cfg.IntOpt('bandwidth_update_interval',
|
||||
default=600,
|
||||
help='Seconds between bandwidth updates for cells.'),
|
||||
cfg.IntOpt('instance_update_sync_database_limit',
|
||||
default=100,
|
||||
help='Number of instances to pull from the database at one '
|
||||
'time for a sync. If there are more instances to update '
|
||||
'the results will be paged through'),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
|
|
@ -19,6 +19,7 @@ Cells Utility Methods
|
|||
import random
|
||||
import sys
|
||||
|
||||
from oslo_config import cfg
|
||||
import six
|
||||
|
||||
from nova import objects
|
||||
|
@ -35,6 +36,10 @@ BLOCK_SYNC_FLAG = '!!'
|
|||
# Separator used between cell name and item
|
||||
_CELL_ITEM_SEP = '@'
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.import_opt('instance_update_sync_database_limit', 'nova.cells.opts',
|
||||
group='cells')
|
||||
|
||||
|
||||
class ProxyObjectSerializer(obj_base.NovaObjectSerializer):
|
||||
def __init__(self):
|
||||
|
@ -131,6 +136,19 @@ def get_instances_to_sync(context, updated_since=None, project_id=None,
|
|||
cells services aren't self-healing the same instances in nearly
|
||||
lockstep.
|
||||
"""
|
||||
def _get_paginated_instances(context, filters, shuffle, limit, marker):
|
||||
instances = objects.InstanceList.get_by_filters(
|
||||
context, filters, sort_key='deleted', sort_dir='asc',
|
||||
limit=limit, marker=marker)
|
||||
if len(instances) > 0:
|
||||
marker = instances[-1]['uuid']
|
||||
# NOTE(melwitt/alaski): Need a list that supports assignment for
|
||||
# shuffle. And pop() on the returned result.
|
||||
instances = list(instances)
|
||||
if shuffle:
|
||||
random.shuffle(instances)
|
||||
return instances, marker
|
||||
|
||||
filters = {}
|
||||
if updated_since is not None:
|
||||
filters['changes-since'] = updated_since
|
||||
|
@ -139,13 +157,17 @@ def get_instances_to_sync(context, updated_since=None, project_id=None,
|
|||
if not deleted:
|
||||
filters['deleted'] = False
|
||||
# Active instances first.
|
||||
instances = objects.InstanceList.get_by_filters(
|
||||
context, filters, sort_key='deleted', sort_dir='asc')
|
||||
if shuffle:
|
||||
# NOTE(melwitt): Need a list that supports assignment for shuffle.
|
||||
instances = [instance for instance in instances]
|
||||
random.shuffle(instances)
|
||||
for instance in instances:
|
||||
limit = CONF.cells.instance_update_sync_database_limit
|
||||
marker = None
|
||||
|
||||
instances = []
|
||||
while True:
|
||||
if not instances:
|
||||
instances, marker = _get_paginated_instances(context, filters,
|
||||
shuffle, limit, marker)
|
||||
if not instances:
|
||||
break
|
||||
instance = instances.pop(0)
|
||||
if uuids_only:
|
||||
yield instance.uuid
|
||||
else:
|
||||
|
|
|
@ -23,6 +23,7 @@ from nova.cells import utils as cells_utils
|
|||
from nova import exception
|
||||
from nova import objects
|
||||
from nova import test
|
||||
from nova.tests.unit import fake_instance
|
||||
|
||||
|
||||
class CellsUtilsTestCase(test.NoDBTestCase):
|
||||
|
@ -37,13 +38,18 @@ class CellsUtilsTestCase(test.NoDBTestCase):
|
|||
|
||||
@staticmethod
|
||||
def instance_get_all_by_filters(context, filters,
|
||||
sort_key, sort_dir):
|
||||
sort_key, sort_dir, limit, marker):
|
||||
# Pretend we return a full list the first time otherwise we loop
|
||||
# infinitely
|
||||
if marker is not None:
|
||||
return []
|
||||
self.assertEqual(fake_context, context)
|
||||
self.assertEqual('deleted', sort_key)
|
||||
self.assertEqual('asc', sort_dir)
|
||||
call_info['got_filters'] = filters
|
||||
call_info['get_all'] += 1
|
||||
return ['fake_instance1', 'fake_instance2', 'fake_instance3']
|
||||
instances = [fake_instance.fake_db_instance() for i in range(3)]
|
||||
return instances
|
||||
|
||||
self.stubs.Set(objects.InstanceList, 'get_by_filters',
|
||||
instance_get_all_by_filters)
|
||||
|
@ -83,6 +89,58 @@ class CellsUtilsTestCase(test.NoDBTestCase):
|
|||
'project_id': 'fake-project'}, call_info['got_filters'])
|
||||
self.assertEqual(2, call_info['shuffle'])
|
||||
|
||||
@mock.patch.object(objects.InstanceList, 'get_by_filters')
|
||||
@mock.patch.object(random, 'shuffle')
|
||||
def _test_get_instances_pagination(self, mock_shuffle,
|
||||
mock_get_by_filters, shuffle=False, updated_since=None,
|
||||
project_id=None):
|
||||
fake_context = 'fake_context'
|
||||
|
||||
instances0 = objects.instance._make_instance_list(fake_context,
|
||||
objects.InstanceList(),
|
||||
[fake_instance.fake_db_instance() for i in range(3)],
|
||||
expected_attrs=None)
|
||||
marker0 = instances0[-1]['uuid']
|
||||
instances1 = objects.instance._make_instance_list(fake_context,
|
||||
objects.InstanceList(),
|
||||
[fake_instance.fake_db_instance() for i in range(3)],
|
||||
expected_attrs=None)
|
||||
marker1 = instances1[-1]['uuid']
|
||||
|
||||
mock_get_by_filters.side_effect = [instances0, instances1, []]
|
||||
|
||||
instances = cells_utils.get_instances_to_sync(fake_context,
|
||||
updated_since, project_id, shuffle=shuffle)
|
||||
self.assertEqual(len([x for x in instances]), 6)
|
||||
|
||||
filters = {}
|
||||
if updated_since is not None:
|
||||
filters['changes-since'] = updated_since
|
||||
if project_id is not None:
|
||||
filters['project_id'] = project_id
|
||||
limit = 100
|
||||
expected_calls = [mock.call(fake_context, filters, sort_key='deleted',
|
||||
sort_dir='asc', limit=limit, marker=None),
|
||||
mock.call(fake_context, filters, sort_key='deleted',
|
||||
sort_dir='asc', limit=limit, marker=marker0),
|
||||
mock.call(fake_context, filters, sort_key='deleted',
|
||||
sort_dir='asc', limit=limit, marker=marker1)]
|
||||
mock_get_by_filters.assert_has_calls(expected_calls)
|
||||
self.assertEqual(3, mock_get_by_filters.call_count)
|
||||
|
||||
def test_get_instances_to_sync_limit(self):
|
||||
self._test_get_instances_pagination()
|
||||
|
||||
def test_get_instances_to_sync_shuffle(self):
|
||||
self._test_get_instances_pagination(shuffle=True)
|
||||
|
||||
def test_get_instances_to_sync_updated_since(self):
|
||||
self._test_get_instances_pagination(updated_since='fake-updated-since')
|
||||
|
||||
def test_get_instances_to_sync_multiple_params(self):
|
||||
self._test_get_instances_pagination(project_id='fake-project',
|
||||
updated_since='fake-updated-since', shuffle=True)
|
||||
|
||||
def test_split_cell_and_item(self):
|
||||
path = 'australia', 'queensland', 'gold_coast'
|
||||
cell = cells_utils.PATH_CELL_SEP.join(path)
|
||||
|
|
Loading…
Reference in New Issue