Route extend_volume calls to scheduler

When extending a volume in a THIN pool, Cinder has no checks of whether
the new allocated space is provisioned by the backend. This allow a
tenant, with enough quota space to extend the volume to a size much
higher than the provisioned space reported by the backend.

So, by routing the extend request through the scheduler, it will
assure that the host can provision the needed space.

Closes-bug: #1600295
Closes-bug: #1539112
Change-Id: I4a4ee8acac258d773b99e2767f1c75faa909813b
This commit is contained in:
Erlon R. Cruz 2016-11-29 09:20:42 -02:00
parent 0e47f3ce3a
commit c5afe768cc
7 changed files with 267 additions and 56 deletions

View File

@ -39,7 +39,23 @@ class CapacityFilter(filters.BaseHostFilter):
if host_state.host == filter_properties.get('vol_exists_on'): if host_state.host == filter_properties.get('vol_exists_on'):
return True return True
volume_size = filter_properties.get('size') spec = filter_properties.get('request_spec')
if spec:
volid = spec.get('volume_id')
if filter_properties.get('new_size'):
# If new_size is passed, we are allocating space to extend a volume
requested_size = (int(filter_properties.get('new_size')) -
int(filter_properties.get('size')))
LOG.debug('Checking if host %(host)s can extend the volume %(id)s'
'in %(size)s GB', {'host': host_state.host, 'id': volid,
'size': requested_size})
else:
requested_size = filter_properties.get('size')
LOG.debug('Checking if host %(host)s can create a %(size)s GB '
'volume (%(id)s)',
{'host': host_state.host, 'id': volid,
'size': requested_size})
if host_state.free_capacity_gb is None: if host_state.free_capacity_gb is None:
# Fail Safe # Fail Safe
@ -78,7 +94,7 @@ class CapacityFilter(filters.BaseHostFilter):
free = free_space - math.floor(total * reserved) free = free_space - math.floor(total * reserved)
msg_args = {"host": host_state.host, msg_args = {"host": host_state.host,
"requested": volume_size, "requested": requested_size,
"available": free} "available": free}
# NOTE(xyang): If 'provisioning:type' is 'thick' in extra_specs, # NOTE(xyang): If 'provisioning:type' is 'thick' in extra_specs,
@ -99,7 +115,7 @@ class CapacityFilter(filters.BaseHostFilter):
if (thin and host_state.thin_provisioning_support and if (thin and host_state.thin_provisioning_support and
host_state.max_over_subscription_ratio >= 1): host_state.max_over_subscription_ratio >= 1):
provisioned_ratio = ((host_state.provisioned_capacity_gb + provisioned_ratio = ((host_state.provisioned_capacity_gb +
volume_size) / total) requested_size) / total)
if provisioned_ratio > host_state.max_over_subscription_ratio: if provisioned_ratio > host_state.max_over_subscription_ratio:
LOG.warning(_LW( LOG.warning(_LW(
"Insufficient free space for thin provisioning. " "Insufficient free space for thin provisioning. "
@ -120,7 +136,7 @@ class CapacityFilter(filters.BaseHostFilter):
# of reserved space) which we can over-subscribe. # of reserved space) which we can over-subscribe.
adjusted_free_virtual = ( adjusted_free_virtual = (
free * host_state.max_over_subscription_ratio) free * host_state.max_over_subscription_ratio)
return adjusted_free_virtual >= volume_size return adjusted_free_virtual >= requested_size
elif thin and host_state.thin_provisioning_support: elif thin and host_state.thin_provisioning_support:
LOG.warning(_LW("Filtering out host %(host)s with an invalid " LOG.warning(_LW("Filtering out host %(host)s with an invalid "
"maximum over subscription ratio of " "maximum over subscription ratio of "
@ -131,7 +147,7 @@ class CapacityFilter(filters.BaseHostFilter):
"host": host_state.host}) "host": host_state.host})
return False return False
if free < volume_size: if free < requested_size:
LOG.warning(_LW("Insufficient free space for volume creation " LOG.warning(_LW("Insufficient free space for volume creation "
"on host %(host)s (requested / avail): " "on host %(host)s (requested / avail): "
"%(requested)s/%(available)s"), msg_args) "%(requested)s/%(available)s"), msg_args)

View File

@ -293,6 +293,28 @@ class SchedulerManager(manager.Manager):
""" """
return self.driver.get_pools(context, filters) return self.driver.get_pools(context, filters)
def extend_volume(self, context, volume, new_size, reservations,
request_spec=None, filter_properties=None):
def _extend_volume_set_error(self, context, ex, request_spec):
volume_state = {'volume_state': {'status': 'available'}}
self._set_volume_state_and_notify('extend_volume', volume_state,
context, ex, request_spec)
if not filter_properties:
filter_properties = {}
filter_properties['new_size'] = new_size
try:
self.driver.host_passes_filters(context, volume.host,
request_spec, filter_properties)
volume_rpcapi.VolumeAPI().extend_volume(context, volume, new_size,
reservations)
except exception.NoValidHost as ex:
QUOTAS.rollback(context, reservations,
project_id=volume.project_id)
_extend_volume_set_error(self, context, ex, request_spec)
def _set_volume_state_and_notify(self, method, updates, context, ex, def _set_volume_state_and_notify(self, method, updates, context, ex,
request_spec, msg=None): request_spec, msg=None):
# TODO(harlowja): move into a task that just does this later. # TODO(harlowja): move into a task that just does this later.

View File

@ -61,9 +61,10 @@ class SchedulerAPI(rpc.RPCAPI):
3.0 - Remove 2.x compatibility 3.0 - Remove 2.x compatibility
3.1 - Adds notify_service_capabilities() 3.1 - Adds notify_service_capabilities()
3.2 - Adds extend_volume()
""" """
RPC_API_VERSION = '3.1' RPC_API_VERSION = '3.2'
RPC_DEFAULT_VERSION = '3.0' RPC_DEFAULT_VERSION = '3.0'
TOPIC = constants.SCHEDULER_TOPIC TOPIC = constants.SCHEDULER_TOPIC
BINARY = 'cinder-scheduler' BINARY = 'cinder-scheduler'
@ -132,6 +133,25 @@ class SchedulerAPI(rpc.RPCAPI):
} }
return cctxt.cast(ctxt, 'manage_existing', **msg_args) return cctxt.cast(ctxt, 'manage_existing', **msg_args)
def extend_volume(self, ctxt, volume, new_size, reservations,
request_spec, filter_properties=None):
cctxt = self._get_cctxt()
if not cctxt.can_send_version('3.2'):
msg = _('extend_volume requires cinder-scheduler '
'RPC API version >= 3.2.')
raise exception.ServiceTooOld(msg)
request_spec_p = jsonutils.to_primitive(request_spec)
msg_args = {
'volume': volume,
'new_size': new_size,
'reservations': reservations,
'request_spec': request_spec_p,
'filter_properties': filter_properties,
}
return cctxt.cast(ctxt, 'extend_volume', **msg_args)
def get_pools(self, ctxt, filters=None): def get_pools(self, ctxt, filters=None):
cctxt = self._get_cctxt() cctxt = self._get_cctxt()
return cctxt.call(ctxt, 'get_pools', filters=filters) return cctxt.call(ctxt, 'get_pools', filters=filters)

View File

@ -61,7 +61,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
def test_filter_passes(self, _mock_serv_is_up): def test_filter_passes(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True _mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']() filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100} filter_properties = {'size': 100,
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 500, {'total_capacity_gb': 500,
@ -73,7 +74,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
def test_filter_current_host_passes(self, _mock_serv_is_up): def test_filter_current_host_passes(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True _mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']() filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100, 'vol_exists_on': 'host1'} filter_properties = {'size': 100, 'vol_exists_on': 'host1',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 100, {'total_capacity_gb': 100,
@ -85,7 +87,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
def test_filter_fails(self, _mock_serv_is_up): def test_filter_fails(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True _mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']() filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100} filter_properties = {'size': 100,
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 200, {'total_capacity_gb': 200,
@ -98,7 +101,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
def test_filter_fails_free_capacity_None(self, _mock_serv_is_up): def test_filter_fails_free_capacity_None(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True _mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']() filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100} filter_properties = {'size': 100,
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'free_capacity_gb': None, {'free_capacity_gb': None,
@ -109,7 +113,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
def test_filter_passes_infinite(self, _mock_serv_is_up): def test_filter_passes_infinite(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True _mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']() filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100} filter_properties = {'size': 100,
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'free_capacity_gb': 'infinite', {'free_capacity_gb': 'infinite',
@ -117,10 +122,37 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'service': service}) 'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties)) self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_filter_extend_request(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'new_size': 100, 'size': 50,
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False}
host = fakes.FakeHostState('host1',
{'free_capacity_gb': 200,
'updated_at': None,
'total_capacity_gb': 500,
'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_filter_extend_request_negative(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 50,
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False}
host = fakes.FakeHostState('host1',
{'free_capacity_gb': 49,
'updated_at': None,
'total_capacity_gb': 500,
'service': service})
self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_filter_passes_unknown(self, _mock_serv_is_up): def test_filter_passes_unknown(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True _mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']() filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100} filter_properties = {'size': 100,
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'free_capacity_gb': 'unknown', {'free_capacity_gb': 'unknown',
@ -131,7 +163,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
def test_filter_passes_total_infinite(self, _mock_serv_is_up): def test_filter_passes_total_infinite(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True _mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']() filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100} filter_properties = {'size': 100,
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'free_capacity_gb': 'infinite', {'free_capacity_gb': 'infinite',
@ -144,7 +177,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
def test_filter_passes_total_unknown(self, _mock_serv_is_up): def test_filter_passes_total_unknown(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True _mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']() filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100} filter_properties = {'size': 100,
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'free_capacity_gb': 'unknown', {'free_capacity_gb': 'unknown',
@ -157,7 +191,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
def test_filter_fails_total_infinite(self, _mock_serv_is_up): def test_filter_fails_total_infinite(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True _mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']() filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100} filter_properties = {'size': 100,
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 'infinite', {'total_capacity_gb': 'infinite',
@ -169,7 +204,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
def test_filter_fails_total_unknown(self, _mock_serv_is_up): def test_filter_fails_total_unknown(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True _mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']() filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100} filter_properties = {'size': 100,
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 'unknown', {'total_capacity_gb': 'unknown',
@ -181,7 +217,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
def test_filter_fails_total_zero(self, _mock_serv_is_up): def test_filter_fails_total_zero(self, _mock_serv_is_up):
_mock_serv_is_up.return_value = True _mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']() filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100} filter_properties = {'size': 100,
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 0, {'total_capacity_gb': 0,
@ -197,7 +234,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'capabilities:thin_provisioning_support': 'capabilities:thin_provisioning_support':
'<is> True', '<is> True',
'capabilities:thick_provisioning_support': 'capabilities:thick_provisioning_support':
'<is> False'} '<is> False',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 500, {'total_capacity_gb': 500,
@ -218,7 +256,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'capabilities:thin_provisioning_support': 'capabilities:thin_provisioning_support':
'<is> True', '<is> True',
'capabilities:thick_provisioning_support': 'capabilities:thick_provisioning_support':
'<is> False'} '<is> False',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 500, {'total_capacity_gb': 500,
@ -239,7 +278,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'capabilities:thin_provisioning_support': 'capabilities:thin_provisioning_support':
'<is> False', '<is> False',
'capabilities:thick_provisioning_support': 'capabilities:thick_provisioning_support':
'<is> True'} '<is> True',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
# If "thin_provisioning_support" is False, # If "thin_provisioning_support" is False,
# "max_over_subscription_ratio" will be ignored. # "max_over_subscription_ratio" will be ignored.
@ -262,7 +302,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'capabilities:thin_provisioning_support': 'capabilities:thin_provisioning_support':
'<is> True', '<is> True',
'capabilities:thick_provisioning_support': 'capabilities:thick_provisioning_support':
'<is> False'} '<is> False',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 500, {'total_capacity_gb': 500,
@ -283,7 +324,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'capabilities:thin_provisioning_support': 'capabilities:thin_provisioning_support':
'<is> True', '<is> True',
'capabilities:thick_provisioning_support': 'capabilities:thick_provisioning_support':
'<is> False'} '<is> False',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 500, {'total_capacity_gb': 500,
@ -304,7 +346,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'capabilities:thin_provisioning_support': 'capabilities:thin_provisioning_support':
'<is> True', '<is> True',
'capabilities:thick_provisioning_support': 'capabilities:thick_provisioning_support':
'<is> False'} '<is> False',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 500, {'total_capacity_gb': 500,
@ -325,7 +368,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'capabilities:thin_provisioning_support': 'capabilities:thin_provisioning_support':
'<is> True', '<is> True',
'capabilities:thick_provisioning_support': 'capabilities:thick_provisioning_support':
'<is> False'} '<is> False',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 500, {'total_capacity_gb': 500,
@ -346,7 +390,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'capabilities:thin_provisioning_support': 'capabilities:thin_provisioning_support':
'<is> True', '<is> True',
'capabilities:thick_provisioning_support': 'capabilities:thick_provisioning_support':
'<is> False'} '<is> False',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 500, {'total_capacity_gb': 500,
@ -367,7 +412,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'capabilities:thin_provisioning_support': 'capabilities:thin_provisioning_support':
'<is> False', '<is> False',
'capabilities:thick_provisioning_support': 'capabilities:thick_provisioning_support':
'<is> True'} '<is> True',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
# If "thin_provisioning_support" is False, # If "thin_provisioning_support" is False,
# "max_over_subscription_ratio" will be ignored. # "max_over_subscription_ratio" will be ignored.
@ -390,7 +436,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'capabilities:thin_provisioning_support': 'capabilities:thin_provisioning_support':
'<is> True', '<is> True',
'capabilities:thick_provisioning_support': 'capabilities:thick_provisioning_support':
'<is> True'} '<is> True',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 500, {'total_capacity_gb': 500,
@ -411,7 +458,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'capabilities:thin_provisioning_support': 'capabilities:thin_provisioning_support':
'<is> True', '<is> True',
'capabilities:thick_provisioning_support': 'capabilities:thick_provisioning_support':
'<is> True'} '<is> True',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 500, {'total_capacity_gb': 500,
@ -432,7 +480,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'capabilities:thin_provisioning_support': 'capabilities:thin_provisioning_support':
'<is> True', '<is> True',
'capabilities:thick_provisioning_support': 'capabilities:thick_provisioning_support':
'<is> False'} '<is> False',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 500, {'total_capacity_gb': 500,
@ -453,7 +502,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'capabilities:thin_provisioning_support': 'capabilities:thin_provisioning_support':
'<is> True', '<is> True',
'capabilities:thick_provisioning_support': 'capabilities:thick_provisioning_support':
'<is> True'} '<is> True',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 500, {'total_capacity_gb': 500,
@ -474,7 +524,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
'capabilities:thin_provisioning_support': 'capabilities:thin_provisioning_support':
'<is> True', '<is> True',
'capabilities:thick_provisioning_support': 'capabilities:thick_provisioning_support':
'<is> True'} '<is> True',
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 500, {'total_capacity_gb': 500,
@ -500,7 +551,8 @@ class CapacityFilterTestCase(HostFiltersTestCase):
_mock_serv_is_up.return_value = True _mock_serv_is_up.return_value = True
filt_cls = self.class_map['CapacityFilter']() filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100, filter_properties = {'size': 100,
'volume_type': volume_type} 'volume_type': volume_type,
'request_spec': {'volume_id': fake.VOLUME_ID}}
service = {'disabled': False} service = {'disabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'total_capacity_gb': 500, {'total_capacity_gb': 500,
@ -530,8 +582,8 @@ class AffinityFilterTestCase(HostFiltersTestCase):
vol_id = volume.id vol_id = volume.id
filter_properties = {'context': self.context.elevated(), filter_properties = {'context': self.context.elevated(),
'scheduler_hints': { 'scheduler_hints': {'different_host': [vol_id], },
'different_host': [vol_id], }} 'request_spec': {'volume_id': fake.VOLUME_ID}}
self.assertTrue(filt_cls.host_passes(host, filter_properties)) self.assertTrue(filt_cls.host_passes(host, filter_properties))
@ -550,8 +602,8 @@ class AffinityFilterTestCase(HostFiltersTestCase):
vol_id = volume.id vol_id = volume.id
filter_properties = {'context': self.context.elevated(), filter_properties = {'context': self.context.elevated(),
'scheduler_hints': { 'scheduler_hints': {'different_host': [vol_id], },
'different_host': [vol_id], }} 'request_spec': {'volume_id': fake.VOLUME_ID}}
self.assertTrue(filt_cls.host_passes(host, filter_properties)) self.assertTrue(filt_cls.host_passes(host, filter_properties))
@ -574,8 +626,8 @@ class AffinityFilterTestCase(HostFiltersTestCase):
vol_id = volume.id vol_id = volume.id
filter_properties = {'context': self.context.elevated(), filter_properties = {'context': self.context.elevated(),
'scheduler_hints': { 'scheduler_hints': {'different_host': [vol_id], },
'different_host': [vol_id], }} 'request_spec': {'volume_id': fake.VOLUME_ID}}
self.assertFalse(filt_cls.host_passes(host, filter_properties)) self.assertFalse(filt_cls.host_passes(host, filter_properties))
@ -584,7 +636,8 @@ class AffinityFilterTestCase(HostFiltersTestCase):
host = fakes.FakeHostState('host1', {}) host = fakes.FakeHostState('host1', {})
filter_properties = {'context': self.context.elevated(), filter_properties = {'context': self.context.elevated(),
'scheduler_hints': None} 'scheduler_hints': None,
'request_spec': {'volume_id': fake.VOLUME_ID}}
self.assertTrue(filt_cls.host_passes(host, filter_properties)) self.assertTrue(filt_cls.host_passes(host, filter_properties))
@ -944,7 +997,8 @@ class InstanceLocalityFilterTestCase(HostFiltersTestCase):
uuid = nova.novaclient().servers.create('host1') uuid = nova.novaclient().servers.create('host1')
filter_properties = {'context': self.context, filter_properties = {'context': self.context,
'scheduler_hints': {'local_to_instance': uuid}} 'scheduler_hints': {'local_to_instance': uuid},
'request_spec': {'volume_id': fake.VOLUME_ID}}
self.assertTrue(filt_cls.host_passes(host, filter_properties)) self.assertTrue(filt_cls.host_passes(host, filter_properties))
@mock.patch('novaclient.client.discover_extensions') @mock.patch('novaclient.client.discover_extensions')
@ -958,7 +1012,8 @@ class InstanceLocalityFilterTestCase(HostFiltersTestCase):
uuid = nova.novaclient().servers.create('host2') uuid = nova.novaclient().servers.create('host2')
filter_properties = {'context': self.context, filter_properties = {'context': self.context,
'scheduler_hints': {'local_to_instance': uuid}} 'scheduler_hints': {'local_to_instance': uuid},
'request_spec': {'volume_id': fake.VOLUME_ID}}
self.assertFalse(filt_cls.host_passes(host, filter_properties)) self.assertFalse(filt_cls.host_passes(host, filter_properties))
def test_handles_none(self): def test_handles_none(self):
@ -966,7 +1021,8 @@ class InstanceLocalityFilterTestCase(HostFiltersTestCase):
host = fakes.FakeHostState('host1', {}) host = fakes.FakeHostState('host1', {})
filter_properties = {'context': self.context, filter_properties = {'context': self.context,
'scheduler_hints': None} 'scheduler_hints': None,
'request_spec': {'volume_id': fake.VOLUME_ID}}
self.assertTrue(filt_cls.host_passes(host, filter_properties)) self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_invalid_uuid(self): def test_invalid_uuid(self):
@ -975,7 +1031,8 @@ class InstanceLocalityFilterTestCase(HostFiltersTestCase):
filter_properties = {'context': self.context, filter_properties = {'context': self.context,
'scheduler_hints': 'scheduler_hints':
{'local_to_instance': 'e29b11d4-not-valid-a716'}} {'local_to_instance': 'e29b11d4-not-valid-a716'},
'request_spec': {'volume_id': fake.VOLUME_ID}}
self.assertRaises(exception.InvalidUUID, self.assertRaises(exception.InvalidUUID,
filt_cls.host_passes, host, filter_properties) filt_cls.host_passes, host, filter_properties)
@ -988,7 +1045,8 @@ class InstanceLocalityFilterTestCase(HostFiltersTestCase):
uuid = nova.novaclient().servers.create('host1') uuid = nova.novaclient().servers.create('host1')
filter_properties = {'context': self.context, filter_properties = {'context': self.context,
'scheduler_hints': {'local_to_instance': uuid}} 'scheduler_hints': {'local_to_instance': uuid},
'request_spec': {'volume_id': fake.VOLUME_ID}}
self.assertRaises(exception.CinderException, self.assertRaises(exception.CinderException,
filt_cls.host_passes, host, filter_properties) filt_cls.host_passes, host, filter_properties)
@ -1000,7 +1058,8 @@ class InstanceLocalityFilterTestCase(HostFiltersTestCase):
filt_cls = self.class_map['InstanceLocalityFilter']() filt_cls = self.class_map['InstanceLocalityFilter']()
host = fakes.FakeHostState('host1', {}) host = fakes.FakeHostState('host1', {})
filter_properties = {'context': self.context, 'size': 100} filter_properties = {'context': self.context, 'size': 100,
'request_spec': {'volume_id': fake.VOLUME_ID}}
self.assertTrue(filt_cls.host_passes(host, filter_properties)) self.assertTrue(filt_cls.host_passes(host, filter_properties))
@mock.patch('cinder.compute.nova.novaclient') @mock.patch('cinder.compute.nova.novaclient')
@ -1014,7 +1073,8 @@ class InstanceLocalityFilterTestCase(HostFiltersTestCase):
filter_properties = \ filter_properties = \
{'context': self.context, 'scheduler_hints': {'context': self.context, 'scheduler_hints':
{'local_to_instance': 'e29b11d4-15ef-34a9-a716-598a6f0b5467'}} {'local_to_instance': 'e29b11d4-15ef-34a9-a716-598a6f0b5467'},
'request_spec': {'volume_id': fake.VOLUME_ID}}
self.assertRaises(exception.APITimeout, self.assertRaises(exception.APITimeout,
filt_cls.host_passes, host, filter_properties) filt_cls.host_passes, host, filter_properties)
@ -1279,7 +1339,8 @@ class BasicFiltersTestCase(HostFiltersTestCase):
capabilities.update(ecaps) capabilities.update(ecaps)
service = {'disabled': False} service = {'disabled': False}
filter_properties = {'resource_type': {'name': 'fake_type', filter_properties = {'resource_type': {'name': 'fake_type',
'extra_specs': especs}} 'extra_specs': especs},
'request_spec': {'volume_id': fake.VOLUME_ID}}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'free_capacity_gb': 1024, {'free_capacity_gb': 1024,
'capabilities': capabilities, 'capabilities': capabilities,
@ -1436,7 +1497,8 @@ class BasicFiltersTestCase(HostFiltersTestCase):
filter_properties = {'resource_type': {'memory_mb': 1024, filter_properties = {'resource_type': {'memory_mb': 1024,
'root_gb': 200, 'root_gb': 200,
'ephemeral_gb': 0}, 'ephemeral_gb': 0},
'scheduler_hints': {'query': self.json_query}} 'scheduler_hints': {'query': self.json_query},
'request_spec': {'volume_id': fake.VOLUME_ID}}
capabilities = {'enabled': True} capabilities = {'enabled': True}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'free_ram_mb': 1024, {'free_ram_mb': 1024,
@ -1448,7 +1510,8 @@ class BasicFiltersTestCase(HostFiltersTestCase):
filt_cls = self.class_map['JsonFilter']() filt_cls = self.class_map['JsonFilter']()
filter_properties = {'resource_type': {'memory_mb': 1024, filter_properties = {'resource_type': {'memory_mb': 1024,
'root_gb': 200, 'root_gb': 200,
'ephemeral_gb': 0}} 'ephemeral_gb': 0},
'request_spec': {'volume_id': fake.VOLUME_ID}}
capabilities = {'enabled': True} capabilities = {'enabled': True}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'free_ram_mb': 0, {'free_ram_mb': 0,
@ -1461,7 +1524,8 @@ class BasicFiltersTestCase(HostFiltersTestCase):
filter_properties = {'resource_type': {'memory_mb': 1024, filter_properties = {'resource_type': {'memory_mb': 1024,
'root_gb': 200, 'root_gb': 200,
'ephemeral_gb': 0}, 'ephemeral_gb': 0},
'scheduler_hints': {'query': self.json_query}} 'scheduler_hints': {'query': self.json_query},
'request_spec': {'volume_id': fake.VOLUME_ID}}
capabilities = {'enabled': True} capabilities = {'enabled': True}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'free_ram_mb': 1023, {'free_ram_mb': 1023,
@ -1474,7 +1538,8 @@ class BasicFiltersTestCase(HostFiltersTestCase):
filter_properties = {'resource_type': {'memory_mb': 1024, filter_properties = {'resource_type': {'memory_mb': 1024,
'root_gb': 200, 'root_gb': 200,
'ephemeral_gb': 0}, 'ephemeral_gb': 0},
'scheduler_hints': {'query': self.json_query}} 'scheduler_hints': {'query': self.json_query},
'request_spec': {'volume_id': fake.VOLUME_ID}}
capabilities = {'enabled': True} capabilities = {'enabled': True}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'free_ram_mb': 1024, {'free_ram_mb': 1024,
@ -1491,7 +1556,8 @@ class BasicFiltersTestCase(HostFiltersTestCase):
filter_properties = {'resource_type': {'memory_mb': 1024, filter_properties = {'resource_type': {'memory_mb': 1024,
'root_gb': 200, 'root_gb': 200,
'ephemeral_gb': 0}, 'ephemeral_gb': 0},
'scheduler_hints': {'query': json_query}} 'scheduler_hints': {'query': json_query},
'request_spec': {'volume_id': fake.VOLUME_ID}}
capabilities = {'enabled': False} capabilities = {'enabled': False}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'free_ram_mb': 1024, {'free_ram_mb': 1024,
@ -1507,7 +1573,8 @@ class BasicFiltersTestCase(HostFiltersTestCase):
['not', '$service.disabled']]) ['not', '$service.disabled']])
filter_properties = {'resource_type': {'memory_mb': 1024, filter_properties = {'resource_type': {'memory_mb': 1024,
'local_gb': 200}, 'local_gb': 200},
'scheduler_hints': {'query': json_query}} 'scheduler_hints': {'query': json_query},
'request_spec': {'volume_id': fake.VOLUME_ID}}
capabilities = {'enabled': True} capabilities = {'enabled': True}
host = fakes.FakeHostState('host1', host = fakes.FakeHostState('host1',
{'free_ram_mb': 1024, {'free_ram_mb': 1024,
@ -1532,6 +1599,7 @@ class BasicFiltersTestCase(HostFiltersTestCase):
'scheduler_hints': { 'scheduler_hints': {
'query': jsonutils.dumps(raw), 'query': jsonutils.dumps(raw),
}, },
'request_spec': {'volume_id': fake.VOLUME_ID}
} }
# Passes # Passes
@ -1633,6 +1701,7 @@ class BasicFiltersTestCase(HostFiltersTestCase):
'scheduler_hints': { 'scheduler_hints': {
'query': jsonutils.dumps(raw), 'query': jsonutils.dumps(raw),
}, },
'request_spec': {'volume_id': fake.VOLUME_ID}
} }
self.assertEqual(expected, self.assertEqual(expected,
filt_cls.host_passes(host, filter_properties)) filt_cls.host_passes(host, filter_properties))

View File

@ -170,6 +170,37 @@ class SchedulerRpcAPITestCase(test.TestCase):
version='3.0') version='3.0')
create_worker_mock.assert_not_called() create_worker_mock.assert_not_called()
@mock.patch('oslo_messaging.RPCClient.can_send_version',
return_value=False)
def test_extend_volume_capped(self, can_send_version_mock):
new_size = 4
volume = fake_volume.fake_volume_obj(self.context)
self.assertRaises(exception.ServiceTooOld,
self._test_scheduler_api,
'extend_volume',
rpc_method='cast',
request_spec='fake_request_spec',
filter_properties='filter_properties',
volume=volume,
new_size=new_size,
reservations=['RESERVATIONS'],
version='3.0')
@mock.patch('oslo_messaging.RPCClient.can_send_version', return_value=True)
def test_extend_volume(self, can_send_version_mock):
new_size = 4
volume = fake_volume.fake_volume_obj(self.context)
create_worker_mock = self.mock_object(volume, 'create_worker')
self._test_scheduler_api('extend_volume',
rpc_method='cast',
request_spec='fake_request_spec',
filter_properties='filter_properties',
volume=volume,
new_size=new_size,
reservations=['RESERVATIONS'],
version='3.0')
create_worker_mock.assert_not_called()
def test_get_pools(self): def test_get_pools(self):
self._test_scheduler_api('get_pools', self._test_scheduler_api('get_pools',
rpc_method='call', rpc_method='call',

View File

@ -1071,7 +1071,6 @@ class VolumeTestCase(base.BaseVolumeTestCase):
def test_get_all_limit_bad_value(self): def test_get_all_limit_bad_value(self):
"""Test value of 'limit' is numeric and >= 0""" """Test value of 'limit' is numeric and >= 0"""
volume_api = cinder.volume.api.API() volume_api = cinder.volume.api.API()
self.assertRaises(exception.InvalidInput, self.assertRaises(exception.InvalidInput,
volume_api.get_all, volume_api.get_all,
self.context, self.context,
@ -3956,6 +3955,36 @@ class VolumeTestCase(base.BaseVolumeTestCase):
volume_api.extend, self.context, volume_api.extend, self.context,
volume, 3) volume, 3)
# Test scheduler path
limit_check.side_effect = None
reserve.side_effect = None
db.volume_update(self.context, volume.id, {'status': 'available'})
volume_api.scheduler_rpcapi = mock.MagicMock()
volume_api.scheduler_rpcapi.extend_volume = mock.MagicMock()
volume_api.extend(self.context, volume, 3)
request_spec = {
'volume_properties': volume,
'volume_type': {},
'volume_id': volume.id
}
volume_api.scheduler_rpcapi.extend_volume.assert_called_once_with(
self.context, volume, 3, ["RESERVATION"], request_spec)
# Test direct volume path
limit_check.side_effect = None
reserve.side_effect = None
db.volume_update(self.context, volume.id, {'status': 'available'})
ext_mock = mock.MagicMock(side_effect=exception.ServiceTooOld)
volume_api.volume_rpcapi.extend_volume = mock.MagicMock()
volume_api.scheduler_rpcapi.extend_volume = ext_mock
volume_api.extend(self.context, volume, 3)
volume_api.volume_rpcapi.extend_volume.assert_called_once_with(
self.context, volume, 3, ["RESERVATION"])
# clean up # clean up
self.volume.delete_volume(self.context, volume) self.volume.delete_volume(self.context, volume)

View File

@ -1314,8 +1314,32 @@ class API(base.Base):
if reservations is None: if reservations is None:
_roll_back_status() _roll_back_status()
self.volume_rpcapi.extend_volume(context, volume, new_size, volume_type = {}
reservations) if volume.volume_type_id:
volume_type = volume_types.get_volume_type(context.elevated(),
volume.volume_type_id)
request_spec = {
'volume_properties': volume,
'volume_type': volume_type,
'volume_id': volume.id
}
try:
self.scheduler_rpcapi.extend_volume(context, volume, new_size,
reservations, request_spec)
except exception.ServiceTooOld as e:
# NOTE(erlon): During rolling upgrades scheduler and volume can
# have different versions. This check makes sure that a new
# version of the volume service won't break.
msg = _LW("Failed to send extend volume request to scheduler. "
"Falling back to old behaviour. This is normal during a "
"live-upgrade. Error: %(e)s")
LOG.warning(msg, {'e': e})
# TODO(erlon): Remove in Pike
self.volume_rpcapi.extend_volume(context, volume, new_size,
reservations)
LOG.info(_LI("Extend volume request issued successfully."), LOG.info(_LI("Extend volume request issued successfully."),
resource=volume) resource=volume)