Allow updates to export locations

Update init_host() method of share manager to allow
driver return a list of export locations via ensure_share() method.
Save export locations to the database in exact same order in which
driver provides it.

Partially implements bp multiple-export-locations

Change-Id: If4291faa6aa2216703642a8e0279c1dcb064d9cb
This commit is contained in:
Igor Malinovskiy 2015-03-17 17:52:48 +02:00
parent 77416dfbe5
commit 0c14bc36c6
5 changed files with 134 additions and 57 deletions

View File

@ -18,6 +18,7 @@
"""Implementation of SQLAlchemy backend."""
import datetime
import sys
import uuid
import warnings
@ -1676,9 +1677,13 @@ def _share_export_locations_get(context, share_id, session=None):
if not session:
session = get_session()
return model_query(context, models.ShareExportLocations,
session=session). \
filter_by(share_id=share_id).all()
return (
model_query(context, models.ShareExportLocations,
session=session, read_deleted="no").
filter_by(share_id=share_id).
order_by("updated_at").
all()
)
@require_context
@ -1696,29 +1701,54 @@ def share_export_locations_update(context, share_id, export_locations, delete):
location_rows = _share_export_locations_get(
context, share_id, session=session)
current_locations = set([l['path'] for l in location_rows])
def get_path_list_from_rows(rows):
return set([l['path'] for l in rows])
new_locations = set(export_locations)
add_locations = new_locations.difference(current_locations)
current_locations = get_path_list_from_rows(location_rows)
# Set existing export location to deleted if delete argument is True
if delete:
delete_locations = current_locations.difference(new_locations)
def create_indexed_time_dict(key_list):
base = timeutils.utcnow()
return {
# NOTE(u_glide): Incrementing timestamp by microseconds to make
# timestamp order match index order.
key: base + datetime.timedelta(microseconds=index)
for index, key in enumerate(key_list)
}
for location in location_rows:
if location['path'] in delete_locations:
location.update({'deleted': True})
location.save(session=session)
else:
export_locations = list(current_locations.union(new_locations))
indexed_update_time = create_indexed_time_dict(export_locations)
for location in location_rows:
if delete and location['path'] not in export_locations:
location.update({'deleted': True})
else:
updated_at = indexed_update_time[location['path']]
location.update({
'updated_at': updated_at,
'deleted': False,
})
location.save(session=session)
# Now add new export locations
for path in add_locations:
for path in export_locations:
if path in current_locations:
# Already updated
continue
location_ref = models.ShareExportLocations()
location_ref.update({'path': path, 'share_id': share_id})
location_ref.update({
'path': path,
'share_id': share_id,
'updated_at': indexed_update_time[path],
'deleted': False,
})
location_ref.save(session=session)
return export_locations
if delete:
return export_locations
return get_path_list_from_rows(_share_export_locations_get(
context, share_id, session=session))
#################################

View File

@ -255,7 +255,14 @@ class ShareDriver(object):
"""
def ensure_share(self, context, share, share_server=None):
"""Invoked to sure that share is exported."""
"""Invoked to ensure that share is exported.
Driver can use this method to update the list of export locations of
the share if it changes. To do that, you should return list with
export locations.
:return None or list with export locations
"""
raise NotImplementedError()
def allow_access(self, context, share, access, share_server=None):

View File

@ -125,47 +125,53 @@ class ShareManager(manager.SchedulerDependentManager):
shares = self.db.share_get_all_by_host(ctxt, self.host)
LOG.debug("Re-exporting %s shares", len(shares))
for share in shares:
if share['status'] == 'available':
self._ensure_share_has_pool(ctxt, share)
share_server = self._get_share_server(ctxt, share)
try:
self.driver.ensure_share(
ctxt, share, share_server=share_server)
except Exception as e:
LOG.error(
_LE("Caught exception trying ensure share '%(s_id)s'. "
"Exception: \n%(e)s."),
{'s_id': share['id'], 'e': six.text_type(e)},
)
continue
rules = self.db.share_access_get_all_for_share(ctxt,
share['id'])
for access_ref in rules:
if access_ref['state'] == access_ref.STATE_ACTIVE:
try:
self.driver.allow_access(ctxt, share,
access_ref,
share_server=share_server)
except exception.ShareAccessExists:
pass
except Exception as e:
LOG.error(
_LE("Unexpected exception during share access"
" allow operation. Share id is '%(s_id)s'"
", access rule type is '%(ar_type)s', "
"access rule id is '%(ar_id)s', exception"
" is '%(e)s'."),
{'s_id': share['id'],
'ar_type': access_ref['access_type'],
'ar_id': access_ref['id'],
'e': six.text_type(e)},
)
else:
if share['status'] != 'available':
LOG.info(
_LI("Share %(name)s: skipping export, because it has "
"'%(status)s' status."),
{'name': share['name'], 'status': share['status']},
)
continue
self._ensure_share_has_pool(ctxt, share)
share_server = self._get_share_server(ctxt, share)
try:
export_locations = self.driver.ensure_share(
ctxt, share, share_server=share_server)
except Exception as e:
LOG.error(
_LE("Caught exception trying ensure share '%(s_id)s'. "
"Exception: \n%(e)s."),
{'s_id': share['id'], 'e': six.text_type(e)},
)
continue
if export_locations:
self.db.share_export_locations_update(
ctxt, share['id'], export_locations)
rules = self.db.share_access_get_all_for_share(ctxt, share['id'])
for access_ref in rules:
if access_ref['state'] != access_ref.STATE_ACTIVE:
continue
try:
self.driver.allow_access(ctxt, share, access_ref,
share_server=share_server)
except exception.ShareAccessExists:
pass
except Exception as e:
LOG.error(
_LE("Unexpected exception during share access"
" allow operation. Share id is '%(s_id)s'"
", access rule type is '%(ar_type)s', "
"access rule id is '%(ar_id)s', exception"
" is '%(e)s'."),
{'s_id': share['id'],
'ar_type': access_ref['access_type'],
'ar_id': access_ref['id'],
'e': six.text_type(e)},
)
self.publish_service_capabilities(ctxt)

View File

@ -50,3 +50,29 @@ class SQLAlchemyAPIShareTestCase(test.TestCase):
ignored_keys=['share_type',
'share_type_id',
'export_locations'])
def test_share_export_locations_update_valid_order(self):
share = api.share_create(self.ctxt, {'host': 'foobar'})
initial_locations = ['fake1/1/', 'fake2/2', 'fake3/3']
update_locations = ['fake4/4', 'fake2/2', 'fake3/3']
# add initial locations
api.share_export_locations_update(self.ctxt, share['id'],
initial_locations, False)
# update locations
api.share_export_locations_update(self.ctxt, share['id'],
update_locations, True)
actual_result = api.share_export_locations_get(self.ctxt, share['id'])
# actual result should contain locations in exact same order
self.assertTrue(actual_result == update_locations)
def test_share_export_locations_update_string(self):
share = api.share_create(self.ctxt, {'host': 'foobar'})
initial_location = 'fake1/1/'
api.share_export_locations_update(self.ctxt, share['id'],
initial_location, False)
actual_result = api.share_export_locations_get(self.ctxt, share['id'])
self.assertTrue(actual_result == [initial_location])

View File

@ -167,11 +167,15 @@ class ShareManagerTestCase(test.TestCase):
FakeAccessRule(state='active'),
FakeAccessRule(state='error'),
]
fake_export_locations = ['fake/path/1', 'fake/path']
share_server = 'fake_share_server_type_does_not_matter'
self.mock_object(self.share_manager.db,
'share_get_all_by_host',
mock.Mock(return_value=shares))
self.mock_object(self.share_manager.driver, 'ensure_share')
self.mock_object(self.share_manager.db,
'share_export_locations_update')
self.mock_object(self.share_manager.driver, 'ensure_share',
mock.Mock(return_value=fake_export_locations))
self.mock_object(self.share_manager, '_ensure_share_has_pool')
self.mock_object(self.share_manager, '_get_share_server',
mock.Mock(return_value=share_server))
@ -189,6 +193,9 @@ class ShareManagerTestCase(test.TestCase):
# verification of call
self.share_manager.db.share_get_all_by_host.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), self.share_manager.host)
exports_update = self.share_manager.db.share_export_locations_update
exports_update.assert_called_once_with(
mock.ANY, 'fake_id_1', fake_export_locations)
self.share_manager.driver.do_setup.assert_called_once_with(
utils.IsAMatcher(context.RequestContext))
self.share_manager.driver.check_for_setup_error.\
@ -281,7 +288,8 @@ class ShareManagerTestCase(test.TestCase):
self.mock_object(self.share_manager.db,
'share_get_all_by_host',
mock.Mock(return_value=shares))
self.mock_object(self.share_manager.driver, 'ensure_share')
self.mock_object(self.share_manager.driver, 'ensure_share',
mock.Mock(return_value=None))
self.mock_object(self.share_manager, '_ensure_share_has_pool')
self.mock_object(self.share_manager, '_get_share_server',
mock.Mock(return_value=share_server))