Implement share backup

Add share backup feature in Data Copy Service and
Share Service. It will allow the user to create, restore
and delete backups as well as listing backups and showing
the details of a specific backup.

APIImpact
DOCImpact

Change-Id: I7d10cf47864cd21932315375d84dc728ff738f23
Implement: blueprint share-backup
This commit is contained in:
zhongjun 2016-07-19 17:46:35 +08:00 committed by Kiran Pawar
parent 364000c140
commit 0b99fdaa9a
43 changed files with 3481 additions and 236 deletions

View File

@ -197,6 +197,7 @@ REST_API_VERSION_HISTORY = """
* 2.78 - Added Share Network Subnet Metadata to Metadata API. * 2.78 - Added Share Network Subnet Metadata to Metadata API.
* 2.79 - Added ``with_count`` in share snapshot list API to get total * 2.79 - Added ``with_count`` in share snapshot list API to get total
count info. count info.
* 2.80 - Added share backup APIs.
""" """
@ -204,7 +205,7 @@ REST_API_VERSION_HISTORY = """
# The default api version request is defined to be the # The default api version request is defined to be the
# minimum version of the API supported. # minimum version of the API supported.
_MIN_API_VERSION = "2.0" _MIN_API_VERSION = "2.0"
_MAX_API_VERSION = "2.79" _MAX_API_VERSION = "2.80"
DEFAULT_API_VERSION = _MIN_API_VERSION DEFAULT_API_VERSION = _MIN_API_VERSION

View File

@ -399,7 +399,6 @@ ____
2.72 2.72
---- ----
Added 'share_network' option to share replica create API. Added 'share_network' option to share replica create API.
2.73 (Maximum in Zed) 2.73 (Maximum in Zed)
@ -429,5 +428,9 @@ ____
to Share Network Subnets. to Share Network Subnets.
2.79 2.79
------------------------ ----
Added ``with_count`` in share snapshot list API to get total count info. Added ``with_count`` in share snapshot list API to get total count info.
2.80
----
Added share backup APIs.

View File

@ -149,7 +149,9 @@ class QuotaSetsMixin(object):
body.get('share_group_snapshots') is None and body.get('share_group_snapshots') is None and
body.get('share_replicas') is None and body.get('share_replicas') is None and
body.get('replica_gigabytes') is None and body.get('replica_gigabytes') is None and
body.get('per_share_gigabytes') is None): body.get('per_share_gigabytes') is None and
body.get('backups') is None and
body.get('backup_gigabytes') is None):
msg = _("Must supply at least one quota field to update.") msg = _("Must supply at least one quota field to update.")
raise webob.exc.HTTPBadRequest(explanation=msg) raise webob.exc.HTTPBadRequest(explanation=msg)
@ -346,6 +348,9 @@ class QuotaSetsController(QuotaSetsMixin, wsgi.Controller):
elif req.api_version_request < api_version.APIVersionRequest("2.62"): elif req.api_version_request < api_version.APIVersionRequest("2.62"):
self._ensure_specific_microversion_args_are_absent( self._ensure_specific_microversion_args_are_absent(
body, ['per_share_gigabytes'], "2.62") body, ['per_share_gigabytes'], "2.62")
elif req.api_version_request < api_version.APIVersionRequest("2.80"):
self._ensure_specific_microversion_args_are_absent(
body, ['backups', 'backup_gigabytes'], "2.80")
return self._update(req, id, body) return self._update(req, id, body)
@wsgi.Controller.api_version('2.7') @wsgi.Controller.api_version('2.7')

View File

@ -35,6 +35,7 @@ from manila.api.v2 import quota_sets
from manila.api.v2 import services from manila.api.v2 import services
from manila.api.v2 import share_access_metadata from manila.api.v2 import share_access_metadata
from manila.api.v2 import share_accesses from manila.api.v2 import share_accesses
from manila.api.v2 import share_backups
from manila.api.v2 import share_export_locations from manila.api.v2 import share_export_locations
from manila.api.v2 import share_group_snapshots from manila.api.v2 import share_group_snapshots
from manila.api.v2 import share_group_type_specs from manila.api.v2 import share_group_type_specs
@ -643,3 +644,10 @@ class APIRouter(manila.api.openstack.APIRouter):
controller=access_metadata_controller, controller=access_metadata_controller,
action="delete", action="delete",
conditions={"method": ["DELETE"]}) conditions={"method": ["DELETE"]})
self.resources['share-backups'] = share_backups.create_resource()
mapper.resource("share-backup",
"share-backups",
controller=self.resources['share-backups'],
collection={'detail': 'GET'},
member={'action': 'POST'})

View File

@ -0,0 +1,222 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The Share Backups API."""
import webob
from webob import exc
from manila.api import common
from manila.api.openstack import wsgi
from manila.api.views import share_backups as backup_view
from manila import db
from manila import exception
from manila.i18n import _
from manila import share
MIN_SUPPORTED_API_VERSION = '2.80'
class ShareBackupController(wsgi.Controller, wsgi.AdminActionsMixin):
"""The Share Backup API controller for the OpenStack API."""
resource_name = 'share_backup'
_view_builder_class = backup_view.BackupViewBuilder
def __init__(self):
super(ShareBackupController, self).__init__()
self.share_api = share.API()
def _update(self, *args, **kwargs):
db.share_backup_update(*args, **kwargs)
def _get(self, *args, **kwargs):
return db.share_backup_get(*args, **kwargs)
@wsgi.Controller.api_version(MIN_SUPPORTED_API_VERSION, experimental=True)
def index(self, req):
"""Return a summary list of backups."""
return self._get_backups(req)
@wsgi.Controller.api_version(MIN_SUPPORTED_API_VERSION, experimental=True)
def detail(self, req):
"""Returns a detailed list of backups."""
return self._get_backups(req, is_detail=True)
@wsgi.Controller.authorize('get_all')
def _get_backups(self, req, is_detail=False):
"""Returns list of backups."""
context = req.environ['manila.context']
search_opts = {}
search_opts.update(req.GET)
params = common.get_pagination_params(req)
limit, offset = [params.get('limit'), params.get('offset')]
search_opts.pop('limit', None)
search_opts.pop('offset', None)
sort_key, sort_dir = common.get_sort_params(search_opts)
key_dict = {"name": "display_name",
"description": "display_description"}
for key in key_dict:
if sort_key == key:
sort_key = key_dict[key]
share_id = req.params.get('share_id')
if share_id:
try:
self.share_api.get(context, share_id)
search_opts.update({'share_id': share_id})
except exception.NotFound:
msg = _("No share exists with ID %s.")
raise exc.HTTPBadRequest(explanation=msg % share_id)
backups = db.share_backups_get_all(context,
filters=search_opts,
limit=limit,
offset=offset,
sort_key=sort_key,
sort_dir=sort_dir)
if is_detail:
backups = self._view_builder.detail_list(req, backups)
else:
backups = self._view_builder.summary_list(req, backups)
return backups
@wsgi.Controller.api_version(MIN_SUPPORTED_API_VERSION, experimental=True)
@wsgi.Controller.authorize('get')
def show(self, req, id):
"""Return data about the given backup."""
context = req.environ['manila.context']
try:
backup = db.share_backup_get(context, id)
except exception.ShareBackupNotFound:
msg = _("No backup exists with ID %s.")
raise exc.HTTPNotFound(explanation=msg % id)
return self._view_builder.detail(req, backup)
@wsgi.Controller.api_version(MIN_SUPPORTED_API_VERSION, experimental=True)
@wsgi.Controller.authorize
@wsgi.response(202)
def create(self, req, body):
"""Add a backup to an existing share."""
context = req.environ['manila.context']
if not self.is_valid_body(body, 'share_backup'):
msg = _("Body does not contain 'share_backup' information.")
raise exc.HTTPUnprocessableEntity(explanation=msg)
backup = body.get('share_backup')
share_id = backup.get('share_id')
if not share_id:
msg = _("'share_id' is missing from the request body.")
raise exc.HTTPBadRequest(explanation=msg)
try:
share = self.share_api.get(context, share_id)
except exception.NotFound:
msg = _("No share exists with ID %s.")
raise exc.HTTPBadRequest(explanation=msg % share_id)
if share.get('is_soft_deleted'):
msg = _("Backup can not be created for share '%s' "
"since it has been soft deleted.") % share_id
raise exc.HTTPForbidden(explanation=msg)
try:
backup = self.share_api.create_share_backup(context, share, backup)
except (exception.InvalidBackup,
exception.InvalidShare) as e:
raise exc.HTTPBadRequest(explanation=e.msg)
except exception.ShareBusyException as e:
raise exc.HTTPConflict(explanation=e.msg)
return self._view_builder.detail(req, backup)
@wsgi.Controller.api_version(MIN_SUPPORTED_API_VERSION, experimental=True)
@wsgi.Controller.authorize
def delete(self, req, id):
"""Delete a backup."""
context = req.environ['manila.context']
try:
backup = db.share_backup_get(context, id)
except exception.ShareBackupNotFound:
msg = _("No backup exists with ID %s.")
raise exc.HTTPNotFound(explanation=msg % id)
try:
self.share_api.delete_share_backup(context, backup)
except exception.InvalidBackup as e:
raise exc.HTTPBadRequest(explanation=e.msg)
return webob.Response(status_int=202)
@wsgi.Controller.api_version(MIN_SUPPORTED_API_VERSION, experimental=True)
@wsgi.action('restore')
@wsgi.Controller.authorize
@wsgi.response(202)
def restore(self, req, id, body):
"""Restore an existing backup to a share."""
context = req.environ['manila.context']
try:
backup = db.share_backup_get(context, id)
except exception.ShareBackupNotFound:
msg = _("No backup exists with ID %s.")
raise exc.HTTPNotFound(explanation=msg % id)
try:
restored = self.share_api.restore_share_backup(context, backup)
except (exception.InvalidShare,
exception.InvalidBackup) as e:
raise exc.HTTPBadRequest(explanation=e.msg)
retval = self._view_builder.restore_summary(req, restored)
return retval
@wsgi.Controller.api_version(MIN_SUPPORTED_API_VERSION, experimental=True)
@wsgi.Controller.authorize
@wsgi.response(202)
def update(self, req, id, body):
"""Update a backup."""
context = req.environ['manila.context']
if not self.is_valid_body(body, 'share_backup'):
msg = _("Body does not contain 'share_backup' information.")
raise exc.HTTPUnprocessableEntity(explanation=msg)
try:
backup = db.share_backup_get(context, id)
except exception.ShareBackupNotFound:
msg = _("No backup exists with ID %s.")
raise exc.HTTPNotFound(explanation=msg % id)
backup_update = body.get('share_backup')
update_dict = {}
if 'name' in backup_update:
update_dict['display_name'] = backup_update.pop('name')
if 'description' in backup_update:
update_dict['display_description'] = (
backup_update.pop('description'))
backup = self.share_api.update_share_backup(context, backup,
update_dict)
return self._view_builder.detail(req, backup)
def create_resource():
return wsgi.Resource(ShareBackupController())

View File

@ -25,7 +25,8 @@ class ViewBuilder(common.ViewBuilder):
_collection_name = "limits" _collection_name = "limits"
_detail_version_modifiers = [ _detail_version_modifiers = [
"add_share_replica_quotas", "add_share_replica_quotas",
"add_share_group_quotas" "add_share_group_quotas",
"add_share_backup_quotas",
] ]
def build(self, request, rate_limits, absolute_limits): def build(self, request, rate_limits, absolute_limits):
@ -128,3 +129,12 @@ class ViewBuilder(common.ViewBuilder):
limit_names["in_use"]["share_replicas"] = ["totalShareReplicasUsed"] limit_names["in_use"]["share_replicas"] = ["totalShareReplicasUsed"]
limit_names["in_use"]["replica_gigabytes"] = ( limit_names["in_use"]["replica_gigabytes"] = (
["totalReplicaGigabytesUsed"]) ["totalReplicaGigabytesUsed"])
@common.ViewBuilder.versioned_method("2.80")
def add_share_backup_quotas(self, request, limit_names, absolute_limits):
limit_names["limit"]["backups"] = ["maxTotalShareBackups"]
limit_names["limit"]["backup_gigabytes"] = (
["maxTotalBackupGigabytes"])
limit_names["in_use"]["backups"] = ["totalShareBackupsUsed"]
limit_names["in_use"]["backup_gigabytes"] = (
["totalBackupGigabytesUsed"])

View File

@ -23,6 +23,7 @@ class ViewBuilder(common.ViewBuilder):
"add_share_group_quotas", "add_share_group_quotas",
"add_share_replica_quotas", "add_share_replica_quotas",
"add_per_share_gigabytes_quotas", "add_per_share_gigabytes_quotas",
"add_share_backup_quotas",
] ]
def detail_list(self, request, quota_class_set, quota_class=None): def detail_list(self, request, quota_class_set, quota_class=None):
@ -58,3 +59,8 @@ class ViewBuilder(common.ViewBuilder):
def add_per_share_gigabytes_quotas(self, context, view, quota_class_set): def add_per_share_gigabytes_quotas(self, context, view, quota_class_set):
view['per_share_gigabytes'] = quota_class_set.get( view['per_share_gigabytes'] = quota_class_set.get(
'per_share_gigabytes') 'per_share_gigabytes')
@common.ViewBuilder.versioned_method("2.80")
def add_share_backup_quotas(self, context, view, quota_class_set):
view['backups'] = quota_class_set.get('backups')
view['backup_gigabytes'] = quota_class_set.get('backup_gigabytes')

View File

@ -23,6 +23,7 @@ class ViewBuilder(common.ViewBuilder):
"add_share_group_quotas", "add_share_group_quotas",
"add_share_replica_quotas", "add_share_replica_quotas",
"add_per_share_gigabytes_quotas", "add_per_share_gigabytes_quotas",
"add_share_backup_quotas",
] ]
def detail_list(self, request, quota_set, project_id=None, def detail_list(self, request, quota_set, project_id=None,
@ -64,3 +65,8 @@ class ViewBuilder(common.ViewBuilder):
@common.ViewBuilder.versioned_method("2.62") @common.ViewBuilder.versioned_method("2.62")
def add_per_share_gigabytes_quotas(self, context, view, quota_set): def add_per_share_gigabytes_quotas(self, context, view, quota_set):
view['per_share_gigabytes'] = quota_set.get('per_share_gigabytes') view['per_share_gigabytes'] = quota_set.get('per_share_gigabytes')
@common.ViewBuilder.versioned_method("2.80")
def add_share_backup_quotas(self, context, view, quota_set):
view['backups'] = quota_set.get('backups')
view['backup_gigabytes'] = quota_set.get('backup_gigabytes')

View File

@ -0,0 +1,87 @@
# Copyright 2023 Cloudification GmbH.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from manila.api import common
from manila import policy
class BackupViewBuilder(common.ViewBuilder):
"""Model a server API response as a python dictionary."""
_collection_name = 'share_backups'
_collection_links = 'share_backup_links'
def summary_list(self, request, backups):
"""Summary view of a list of backups."""
return self._list_view(self.summary, request, backups)
def detail_list(self, request, backups):
"""Detailed view of a list of backups."""
return self._list_view(self.detail, request, backups)
def summary(self, request, backup):
"""Generic, non-detailed view of a share backup."""
backup_dict = {
'id': backup.get('id'),
'share_id': backup.get('share_id'),
'backup_state': backup.get('status'),
}
return {'share_backup': backup_dict}
def restore_summary(self, request, restore):
"""Generic, non-detailed view of a restore."""
return {
'restore': {
'backup_id': restore['backup_id'],
'share_id': restore['share_id'],
},
}
def detail(self, request, backup):
"""Detailed view of a single backup."""
context = request.environ['manila.context']
backup_dict = {
'id': backup.get('id'),
'size': backup.get('size'),
'share_id': backup.get('share_id'),
'availability_zone': backup.get('availability_zone'),
'created_at': backup.get('created_at'),
'updated_at': backup.get('updated_at'),
'backup_state': backup.get('status'),
'name': backup.get('display_name'),
'description': backup.get('display_description'),
}
if policy.check_is_host_admin(context):
backup_dict['host'] = backup.get('host')
backup_dict['topic'] = backup.get('topic')
return {'share_backup': backup_dict}
def _list_view(self, func, request, backups):
"""Provide a view for a list of backups."""
backups_list = [func(request, backup)['share_backup']
for backup in backups]
backup_links = self._get_collection_links(
request, backups, self._collection_name)
backups_dict = {self._collection_name: backups_list}
if backup_links:
backups_dict[self._collection_links] = backup_links
return backups_dict

View File

@ -48,6 +48,9 @@ STATUS_RESTORING = 'restoring'
STATUS_REVERTING = 'reverting' STATUS_REVERTING = 'reverting'
STATUS_REVERTING_ERROR = 'reverting_error' STATUS_REVERTING_ERROR = 'reverting_error'
STATUS_AWAITING_TRANSFER = 'awaiting_transfer' STATUS_AWAITING_TRANSFER = 'awaiting_transfer'
STATUS_BACKUP_CREATING = 'backup_creating'
STATUS_BACKUP_RESTORING = 'backup_restoring'
STATUS_BACKUP_RESTORING_ERROR = 'backup_restoring_error'
# Transfer resource type # Transfer resource type
SHARE_RESOURCE_TYPE = 'share' SHARE_RESOURCE_TYPE = 'share'
@ -136,6 +139,7 @@ TRANSITIONAL_STATUSES = (
STATUS_MIGRATING, STATUS_MIGRATING_TO, STATUS_MIGRATING, STATUS_MIGRATING_TO,
STATUS_RESTORING, STATUS_REVERTING, STATUS_RESTORING, STATUS_REVERTING,
STATUS_SERVER_MIGRATING, STATUS_SERVER_MIGRATING_TO, STATUS_SERVER_MIGRATING, STATUS_SERVER_MIGRATING_TO,
STATUS_BACKUP_RESTORING, STATUS_BACKUP_CREATING,
) )
INVALID_SHARE_INSTANCE_STATUSES_FOR_ACCESS_RULE_UPDATES = ( INVALID_SHARE_INSTANCE_STATUSES_FOR_ACCESS_RULE_UPDATES = (

View File

@ -0,0 +1,42 @@
# Copyright 2023 Cloudification GmbH.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for all backup drivers."""
class BackupDriver(object):
def __init__(self):
super(BackupDriver, self).__init__()
# This flag indicates if backup driver implement backup, restore and
# delete operation by its own or uses data manager.
self.use_data_manager = True
def backup(self, backup, share):
"""Start a backup of a specified share."""
return
def restore(self, backup, share):
"""Restore a saved backup."""
return
def delete(self, backup):
"""Delete a saved backup."""
return
def get_backup_info(self, backup):
"""Get backup capabilities information of driver."""
return

View File

View File

@ -0,0 +1,74 @@
# Copyright 2023 Cloudification GmbH.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of a backup service that uses NFS storage as the backend."""
from oslo_config import cfg
from manila.data import backup_driver
nfsbackup_service_opts = [
cfg.StrOpt('backup_mount_template',
default='mount -vt %(proto)s %(options)s %(export)s %(path)s',
help='The template for mounting NFS shares.'),
cfg.StrOpt('backup_unmount_template',
default='umount -v %(path)s',
help='The template for unmounting NFS shares.'),
cfg.StrOpt('backup_mount_export',
help='NFS backup export location in hostname:path, '
'ipv4addr:path, or "[ipv6addr]:path" format.'),
cfg.StrOpt('backup_mount_proto',
default='nfs',
help='Mount Protocol for mounting NFS shares'),
cfg.StrOpt('backup_mount_options',
default='',
help='Mount options passed to the NFS client. See NFS '
'man page for details.'),
]
CONF = cfg.CONF
CONF.register_opts(nfsbackup_service_opts)
class NFSBackupDriver(backup_driver.BackupDriver):
"""Provides backup, restore and delete using NFS supplied repository."""
def __init__(self):
self.backup_mount_export = CONF.backup_mount_export
self.backup_mount_template = CONF.backup_mount_template
self.backup_unmount_template = CONF.backup_unmount_template
self.backup_mount_options = CONF.backup_mount_options
self.backup_mount_proto = CONF.backup_mount_proto
super(NFSBackupDriver, self).__init__()
def get_backup_info(self, backup):
"""Get backup info of a specified backup."""
mount_template = (
self.backup_mount_template % {
'proto': self.backup_mount_proto,
'options': self.backup_mount_options,
'export': self.backup_mount_export,
'path': '%(path)s',
}
)
unmount_template = self.backup_unmount_template
backup_info = {
'mount': mount_template,
'unmount': unmount_template,
}
return backup_info

View File

@ -79,17 +79,16 @@ class DataServiceHelper(object):
# NOTE(ganso): Cleanup methods do not throw exceptions, since the # NOTE(ganso): Cleanup methods do not throw exceptions, since the
# exceptions that should be thrown are the ones that call the cleanup # exceptions that should be thrown are the ones that call the cleanup
def cleanup_data_access(self, access_ref_list, share_instance_id): def cleanup_data_access(self, access_ref_list, share_instance):
try: try:
self.deny_access_to_data_service( self.deny_access_to_data_service(
access_ref_list, share_instance_id) access_ref_list, share_instance)
except Exception: except Exception:
LOG.warning("Could not cleanup access rule of share %s.", LOG.warning("Could not cleanup access rule of share %s.",
self.share['id']) self.share['id'])
def cleanup_temp_folder(self, instance_id, mount_path): def cleanup_temp_folder(self, mount_path, instance_id):
try: try:
path = os.path.join(mount_path, instance_id) path = os.path.join(mount_path, instance_id)
if os.path.exists(path): if os.path.exists(path):
@ -102,12 +101,10 @@ class DataServiceHelper(object):
'instance_id': instance_id, 'instance_id': instance_id,
'share_id': self.share['id']}) 'share_id': self.share['id']})
def cleanup_unmount_temp_folder(self, unmount_template, mount_path, def cleanup_unmount_temp_folder(self, unmount_info, mount_path):
share_instance_id): share_instance_id = unmount_info.get('share_instance_id')
try: try:
self.unmount_share_instance(unmount_template, mount_path, self.unmount_share_instance_or_backup(unmount_info, mount_path)
share_instance_id)
except Exception: except Exception:
LOG.warning("Could not unmount folder of instance" LOG.warning("Could not unmount folder of instance"
" %(instance_id)s for data copy of " " %(instance_id)s for data copy of "
@ -251,16 +248,32 @@ class DataServiceHelper(object):
if os.path.exists(path): if os.path.exists(path):
raise exception.Found("Folder %s was found." % path) raise exception.Found("Folder %s was found." % path)
def mount_share_instance(self, mount_template, mount_path, def mount_share_instance_or_backup(self, mount_info, mount_path):
share_instance): mount_point = mount_info.get('mount_point')
mount_template = mount_info.get('mount')
share_instance_id = mount_info.get('share_instance_id')
backup = mount_info.get('backup')
restore = mount_info.get('restore')
backup_id = mount_info.get('backup_id')
path = os.path.join(mount_path, share_instance['id']) if share_instance_id:
path = os.path.join(mount_path, share_instance_id)
else:
path = ''
options = CONF.data_node_mount_options # overwrite path in case different mount point is explicitly provided
options = {k.lower(): v for k, v in options.items()} if mount_point and mount_point != path:
proto_options = options.get(share_instance['share_proto'].lower()) path = mount_point
if not proto_options: if share_instance_id:
share_instance = self.db.share_instance_get(
self.context, share_instance_id, with_share_data=True)
options = CONF.data_node_mount_options
options = {k.lower(): v for k, v in options.items()}
proto_options = options.get(
share_instance['share_proto'].lower(), '')
else:
# For backup proto_options are included in mount_template
proto_options = '' proto_options = ''
if not os.path.exists(path): if not os.path.exists(path):
@ -269,16 +282,36 @@ class DataServiceHelper(object):
mount_command = mount_template % {'path': path, mount_command = mount_template % {'path': path,
'options': proto_options} 'options': proto_options}
utils.execute(*(mount_command.split()), run_as_root=True) utils.execute(*(mount_command.split()), run_as_root=True)
if backup:
# we create new folder, which named with backup_id. To distinguish
# different backup data at mount points
backup_folder = os.path.join(path, backup_id)
if not os.path.exists(backup_folder):
os.makedirs(backup_folder)
self._check_dir_exists(backup_folder)
if restore:
# backup_folder should exist after mount, else backup is
# already deleted
backup_folder = os.path.join(path, backup_id)
if not os.path.exists(backup_folder):
raise exception.ShareBackupNotFound(backup_id=backup_id)
def unmount_share_instance(self, unmount_template, mount_path, def unmount_share_instance_or_backup(self, unmount_info, mount_path):
share_instance_id): mount_point = unmount_info.get('mount_point')
unmount_template = unmount_info.get('unmount')
share_instance_id = unmount_info.get('share_instance_id')
path = os.path.join(mount_path, share_instance_id) if share_instance_id:
path = os.path.join(mount_path, share_instance_id)
else:
path = ''
# overwrite path in case different mount point is explicitly provided
if mount_point and mount_point != path:
path = mount_point
unmount_command = unmount_template % {'path': path} unmount_command = unmount_template % {'path': path}
utils.execute(*(unmount_command.split()), run_as_root=True) utils.execute(*(unmount_command.split()), run_as_root=True)
try: try:

View File

@ -17,9 +17,13 @@ Data Service
""" """
import os import os
import shutil
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log from oslo_log import log
from oslo_service import periodic_task
from oslo_utils import excutils
from oslo_utils import importutils
from manila.common import constants from manila.common import constants
from manila import context from manila import context
@ -27,36 +31,93 @@ from manila.data import helper
from manila.data import utils as data_utils from manila.data import utils as data_utils
from manila import exception from manila import exception
from manila import manager from manila import manager
from manila import quota
from manila.share import rpcapi as share_rpc from manila.share import rpcapi as share_rpc
from manila import utils
QUOTAS = quota.QUOTAS
from manila.i18n import _ from manila.i18n import _
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
backup_opts = [
cfg.StrOpt(
'backup_driver',
default='manila.data.drivers.nfs.NFSBackupDriver',
help='Driver to use for backups.'),
cfg.StrOpt(
'backup_share_mount_template',
default='mount -vt %(proto)s %(options)s %(export)s %(path)s',
help="The template for mounting shares during backup. Must specify "
"the executable with all necessary parameters for the protocol "
"supported. 'proto' template element may not be required if "
"included in the command. 'export' and 'path' template elements "
"are required. It is advisable to separate different commands "
"per backend."),
cfg.StrOpt(
'backup_share_unmount_template',
default='umount -v %(path)s',
help="The template for unmounting shares during backup. Must "
"specify the executable with all necessary parameters for the "
"protocol supported. 'path' template element is required. It is "
"advisable to separate different commands per backend."),
cfg.ListOpt(
'backup_ignore_files',
default=['lost+found'],
help="List of files and folders to be ignored when backing up "
"shares. Items should be names (not including any path)."),
cfg.DictOpt(
'backup_protocol_access_mapping',
default={'ip': ['nfs']},
help="Protocol access mapping for backup. Should be a "
"dictionary comprised of "
"{'access_type1': ['share_proto1', 'share_proto2'],"
" 'access_type2': ['share_proto2', 'share_proto3']}."),
]
data_opts = [ data_opts = [
cfg.StrOpt( cfg.StrOpt(
'mount_tmp_location', 'mount_tmp_location',
default='/tmp/', default='/tmp/',
help="Temporary path to create and mount shares during migration."), help="Temporary path to create and mount shares during migration."),
cfg.StrOpt(
'backup_mount_tmp_location',
default='/tmp/',
help="Temporary path to create and mount backup during share backup."),
cfg.BoolOpt( cfg.BoolOpt(
'check_hash', 'check_hash',
default=False, default=False,
help="Chooses whether hash of each file should be checked on data " help="Chooses whether hash of each file should be checked on data "
"copying."), "copying."),
cfg.IntOpt(
'backup_continue_update_interval',
default=10,
help='This value, specified in seconds, determines how often '
'the data manager will poll to perform the next steps of '
'backup such as fetch the progress of backup.'),
cfg.IntOpt(
'restore_continue_update_interval',
default=10,
help='This value, specified in seconds, determines how often '
'the data manager will poll to perform the next steps of '
'restore such as fetch the progress of restore.')
] ]
CONF = cfg.CONF CONF = cfg.CONF
CONF.register_opts(data_opts) CONF.register_opts(data_opts)
CONF.register_opts(backup_opts)
class DataManager(manager.Manager): class DataManager(manager.Manager):
"""Receives requests to handle data and sends responses.""" """Receives requests to handle data and sends responses."""
RPC_API_VERSION = '1.0' RPC_API_VERSION = '1.1'
def __init__(self, service_name=None, *args, **kwargs): def __init__(self, service_name=None, *args, **kwargs):
super(DataManager, self).__init__(*args, **kwargs) super(DataManager, self).__init__(*args, **kwargs)
self.backup_driver = importutils.import_object(CONF.backup_driver)
self.busy_tasks_shares = {} self.busy_tasks_shares = {}
self.service_id = None self.service_id = None
@ -94,10 +155,29 @@ class DataManager(manager.Manager):
os.path.join(mount_path, dest_share_instance_id), os.path.join(mount_path, dest_share_instance_id),
ignore_list, CONF.check_hash) ignore_list, CONF.check_hash)
self._copy_share_data( info_src = {
context, copy, share_ref, share_instance_id, 'share_id': share_ref['id'],
dest_share_instance_id, connection_info_src, 'share_instance_id': share_instance_id,
connection_info_dest) 'mount': connection_info_src['mount'],
'unmount': connection_info_src['unmount'],
'access_mapping': connection_info_src.get(
'access_mapping', {}),
'mount_point': os.path.join(mount_path,
share_instance_id),
}
info_dest = {
'share_id': None,
'share_instance_id': dest_share_instance_id,
'mount': connection_info_dest['mount'],
'unmount': connection_info_dest['unmount'],
'access_mapping': connection_info_dest.get(
'access_mapping', {}),
'mount_point': os.path.join(mount_path,
dest_share_instance_id),
}
self._copy_share_data(context, copy, info_src, info_dest)
except exception.ShareDataCopyCancelled: except exception.ShareDataCopyCancelled:
share_rpcapi.migration_complete( share_rpcapi.migration_complete(
context, share_instance_ref, dest_share_instance_id) context, share_instance_ref, dest_share_instance_id)
@ -151,146 +231,501 @@ class DataManager(manager.Manager):
LOG.error(msg) LOG.error(msg)
raise exception.InvalidShare(reason=msg) raise exception.InvalidShare(reason=msg)
def _copy_share_data( def _copy_share_data(self, context, copy, info_src, info_dest):
self, context, copy, src_share, share_instance_id, """Copy share data between source and destination.
dest_share_instance_id, connection_info_src, connection_info_dest):
copied = False e.g. During migration source and destination both are shares
and during backup create, destination is backup location
while during backup restore, source is backup location.
1. Mount source and destination. Create access rules.
2. Perform copy
3. Unmount source and destination. Cleanup access rules.
"""
mount_path = CONF.mount_tmp_location mount_path = CONF.mount_tmp_location
share_instance = self.db.share_instance_get( if info_src.get('share_id'):
context, share_instance_id, with_share_data=True) share_id = info_src['share_id']
dest_share_instance = self.db.share_instance_get( elif info_dest.get('share_id'):
context, dest_share_instance_id, with_share_data=True) share_id = info_dest['share_id']
else:
msg = _("Share data copy failed because of undefined share.")
LOG.exception(msg)
raise exception.ShareDataCopyFailed(reason=msg)
share_instance_src = None
share_instance_dest = None
if info_src['share_instance_id']:
share_instance_src = self.db.share_instance_get(
context, info_src['share_instance_id'], with_share_data=True)
if info_dest['share_instance_id']:
share_instance_dest = self.db.share_instance_get(
context, info_dest['share_instance_id'], with_share_data=True)
share = self.db.share_get(context, share_id)
self.db.share_update( self.db.share_update(
context, src_share['id'], context, share['id'],
{'task_state': constants.TASK_STATE_DATA_COPYING_STARTING}) {'task_state': constants.TASK_STATE_DATA_COPYING_STARTING})
helper_src = helper.DataServiceHelper(context, self.db, src_share) helper_src = helper.DataServiceHelper(context, self.db, share)
helper_dest = helper_src helper_dest = helper_src
access_ref_list_src = helper_src.allow_access_to_data_service( if share_instance_src:
share_instance, connection_info_src, dest_share_instance, access_ref_src = helper_src.allow_access_to_data_service(
connection_info_dest) share_instance_src, info_src, share_instance_dest, info_dest)
access_ref_list_dest = access_ref_list_src access_ref_dest = access_ref_src
elif share_instance_dest:
access_ref_src = helper_src.allow_access_to_data_service(
share_instance_dest, info_dest, share_instance_src, info_src)
access_ref_dest = access_ref_src
def _call_cleanups(items): def _call_cleanups(items):
for item in items: for item in items:
if 'unmount_src' == item: if 'unmount_src' == item:
helper_src.cleanup_unmount_temp_folder( helper_src.cleanup_unmount_temp_folder(
connection_info_src['unmount'], mount_path, info_src, mount_path)
share_instance_id)
elif 'temp_folder_src' == item: elif 'temp_folder_src' == item:
helper_src.cleanup_temp_folder(share_instance_id, helper_src.cleanup_temp_folder(
mount_path) mount_path, info_src['share_instance_id'])
elif 'temp_folder_dest' == item: elif 'temp_folder_dest' == item:
helper_dest.cleanup_temp_folder(dest_share_instance_id, helper_dest.cleanup_temp_folder(
mount_path) mount_path, info_dest['share_instance_id'])
elif 'access_src' == item: elif 'access_src' == item and share_instance_src:
helper_src.cleanup_data_access(access_ref_list_src, helper_src.cleanup_data_access(
share_instance_id) access_ref_src, share_instance_src)
elif 'access_dest' == item: elif 'access_dest' == item and share_instance_dest:
helper_dest.cleanup_data_access(access_ref_list_dest, helper_dest.cleanup_data_access(
dest_share_instance_id) access_ref_dest, share_instance_dest)
try: try:
helper_src.mount_share_instance( helper_src.mount_share_instance_or_backup(info_src, mount_path)
connection_info_src['mount'], mount_path, share_instance)
except Exception: except Exception:
msg = _("Data copy failed attempting to mount " msg = _("Share data copy failed attempting to mount source "
"share instance %s.") % share_instance_id "at %s.") % info_src['mount_point']
LOG.exception(msg) LOG.exception(msg)
_call_cleanups(['temp_folder_src', 'access_dest', 'access_src']) _call_cleanups(['temp_folder_src', 'access_dest', 'access_src'])
raise exception.ShareDataCopyFailed(reason=msg) raise exception.ShareDataCopyFailed(reason=msg)
try: try:
helper_dest.mount_share_instance( helper_dest.mount_share_instance_or_backup(info_dest, mount_path)
connection_info_dest['mount'], mount_path,
dest_share_instance)
except Exception: except Exception:
msg = _("Data copy failed attempting to mount " msg = _("Share data copy failed attempting to mount destination "
"share instance %s.") % dest_share_instance_id "at %s.") % info_dest['mount_point']
LOG.exception(msg) LOG.exception(msg)
_call_cleanups(['temp_folder_dest', 'unmount_src', _call_cleanups(['temp_folder_dest', 'unmount_src',
'temp_folder_src', 'access_dest', 'access_src']) 'temp_folder_src', 'access_dest', 'access_src'])
raise exception.ShareDataCopyFailed(reason=msg) raise exception.ShareDataCopyFailed(reason=msg)
self.busy_tasks_shares[src_share['id']] = copy self.busy_tasks_shares[share['id']] = copy
self.db.share_update( self.db.share_update(
context, src_share['id'], context, share['id'],
{'task_state': constants.TASK_STATE_DATA_COPYING_IN_PROGRESS}) {'task_state': constants.TASK_STATE_DATA_COPYING_IN_PROGRESS})
copied = False
try: try:
copy.run() copy.run()
self.db.share_update( self.db.share_update(
context, src_share['id'], context, share['id'],
{'task_state': constants.TASK_STATE_DATA_COPYING_COMPLETING}) {'task_state': constants.TASK_STATE_DATA_COPYING_COMPLETING})
if copy.get_progress()['total_progress'] == 100: if copy.get_progress()['total_progress'] == 100:
copied = True copied = True
except Exception: except Exception:
LOG.exception("Failed to copy data from share instance " LOG.exception("Failed to copy data from source to destination "
"%(share_instance_id)s to " "%(src)s to %(dest)s.",
"%(dest_share_instance_id)s.", {'src': info_src['mount_point'],
{'share_instance_id': share_instance_id, 'dest': info_dest['mount_point']})
'dest_share_instance_id': dest_share_instance_id})
try: try:
helper_src.unmount_share_instance(connection_info_src['unmount'], helper_src.unmount_share_instance_or_backup(info_src,
mount_path, share_instance_id) mount_path)
except Exception: except Exception:
LOG.exception("Could not unmount folder of instance" LOG.exception("Could not unmount src %s after its data copy.",
" %s after its data copy.", share_instance_id) info_src['mount_point'])
try: try:
helper_dest.unmount_share_instance( helper_dest.unmount_share_instance_or_backup(info_dest,
connection_info_dest['unmount'], mount_path, mount_path)
dest_share_instance_id)
except Exception: except Exception:
LOG.exception("Could not unmount folder of instance" LOG.exception("Could not unmount dest %s after its data copy.",
" %s after its data copy.", dest_share_instance_id) info_dest['mount_point'])
try: try:
helper_src.deny_access_to_data_service( if info_src['share_instance_id']:
access_ref_list_src, share_instance) helper_src.deny_access_to_data_service(access_ref_src,
share_instance_src)
except Exception: except Exception:
LOG.exception("Could not deny access to instance" LOG.exception("Could not deny access to src instance %s after "
" %s after its data copy.", share_instance_id) "its data copy.", info_src['share_instance_id'])
try: try:
helper_dest.deny_access_to_data_service( if info_dest['share_instance_id']:
access_ref_list_dest, dest_share_instance) helper_dest.deny_access_to_data_service(access_ref_dest,
share_instance_dest)
except Exception: except Exception:
LOG.exception("Could not deny access to instance" LOG.exception("Could not deny access to dest instance %s after "
" %s after its data copy.", dest_share_instance_id) "its data copy.", info_dest['share_instance_id'])
if copy and copy.cancelled: if copy and copy.cancelled:
self.db.share_update( self.db.share_update(
context, src_share['id'], context, share['id'],
{'task_state': constants.TASK_STATE_DATA_COPYING_CANCELLED}) {'task_state': constants.TASK_STATE_DATA_COPYING_CANCELLED})
LOG.warning("Copy of data from share instance " LOG.warning("Copy of data from source "
"%(src_instance)s to share instance " "%(src)s to destination %(dest)s was cancelled.",
"%(dest_instance)s was cancelled.", {'src': info_src['mount_point'],
{'src_instance': share_instance_id, 'dest': info_dest['mount_point']})
'dest_instance': dest_share_instance_id}) raise exception.ShareDataCopyCancelled()
raise exception.ShareDataCopyCancelled(
src_instance=share_instance_id,
dest_instance=dest_share_instance_id)
elif not copied: elif not copied:
msg = _("Copying data from share instance %(instance_id)s " msg = _("Copying data from source %(src)s "
"to %(dest_instance_id)s did not succeed.") % ( "to destination %(dest)s did not succeed.") % (
{'instance_id': share_instance_id, {'src': info_src['mount_point'],
'dest_instance_id': dest_share_instance_id}) 'dest': info_dest['mount_point']})
raise exception.ShareDataCopyFailed(reason=msg) raise exception.ShareDataCopyFailed(reason=msg)
self.db.share_update( self.db.share_update(
context, src_share['id'], context, share['id'],
{'task_state': constants.TASK_STATE_DATA_COPYING_COMPLETED}) {'task_state': constants.TASK_STATE_DATA_COPYING_COMPLETED})
LOG.debug("Copy of data from share instance %(src_instance)s to " LOG.debug("Copy of data from source %(src)s to destination "
"share instance %(dest_instance)s was successful.", "%(dest)s was successful.", {
{'src_instance': share_instance_id, 'src': info_src['mount_point'],
'dest_instance': dest_share_instance_id}) 'dest': info_dest['mount_point']})
def create_backup(self, context, backup):
share_id = backup['share_id']
backup_id = backup['id']
share = self.db.share_get(context, share_id)
backup = self.db.share_backup_get(context, backup_id)
LOG.info('Create backup started, backup: %(backup_id)s '
'share: %(share_id)s.',
{'backup_id': backup_id, 'share_id': share_id})
try:
self._run_backup(context, backup, share)
except Exception as err:
with excutils.save_and_reraise_exception():
LOG.error("Failed to create share backup %s by data driver.",
backup['id'])
self.db.share_update(
context, share_id,
{'status': constants.STATUS_AVAILABLE})
self.db.share_backup_update(
context, backup_id,
{'status': constants.STATUS_ERROR, 'fail_reason': err})
@periodic_task.periodic_task(
spacing=CONF.backup_continue_update_interval)
def create_backup_continue(self, context):
filters = {'status': constants.STATUS_CREATING,
'host': self.host,
'topic': CONF.data_topic}
backups = self.db.share_backups_get_all(context, filters)
for backup in backups:
backup_id = backup['id']
share_id = backup['share_id']
result = {}
try:
result = self.data_copy_get_progress(context, share_id)
progress = result.get('total_progress', '0')
self.db.share_backup_update(context, backup_id,
{'progress': progress})
if progress == '100':
self.db.share_update(
context, share_id,
{'status': constants.STATUS_AVAILABLE})
self.db.share_backup_update(
context, backup_id,
{'status': constants.STATUS_AVAILABLE})
LOG.info("Created share backup %s successfully.",
backup_id)
except Exception:
LOG.warning("Failed to get progress of share %(share)s "
"backing up in share_backup %(backup).",
{'share': share_id, 'backup': backup_id})
self.db.share_update(
context, share_id,
{'status': constants.STATUS_AVAILABLE})
self.db.share_backup_update(
context, backup_id,
{'status': constants.STATUS_ERROR, 'progress': '0'})
def _get_share_mount_info(self, share_instance):
mount_template = CONF.backup_share_mount_template
path = next((x['path'] for x in share_instance['export_locations']
if x['is_admin_only']), None)
if not path:
path = share_instance['export_locations'][0]['path']
format_args = {
'proto': share_instance['share_proto'].lower(),
'export': path,
'path': '%(path)s',
'options': '%(options)s',
}
unmount_template = CONF.backup_share_unmount_template
mount_info = {
'mount': mount_template % format_args,
'unmount': unmount_template,
}
return mount_info
def _get_backup_access_mapping(self, share):
mapping = CONF.backup_protocol_access_mapping
result = {}
share_proto = share['share_proto'].lower()
for access_type, protocols in mapping.items():
if share_proto in [y.lower() for y in protocols]:
result[access_type] = result.get(access_type, [])
result[access_type].append(share_proto)
return result
def _run_backup(self, context, backup, share):
share_instance_id = share.instance.get('id')
share_instance = self.db.share_instance_get(
context, share_instance_id, with_share_data=True)
access_mapping = self._get_backup_access_mapping(share)
ignore_list = CONF.backup_ignore_files
mount_path = CONF.mount_tmp_location
backup_mount_path = CONF.backup_mount_tmp_location
mount_info = self._get_share_mount_info(share_instance)
dest_backup_info = self.backup_driver.get_backup_info(backup)
dest_backup_mount_point = os.path.join(backup_mount_path, backup['id'])
backup_folder = os.path.join(dest_backup_mount_point, backup['id'])
try:
copy = data_utils.Copy(
os.path.join(mount_path, share_instance_id),
backup_folder,
ignore_list)
info_src = {
'share_id': share['id'],
'share_instance_id': share_instance_id,
'mount': mount_info['mount'],
'unmount': mount_info['unmount'],
'mount_point': os.path.join(mount_path, share_instance_id),
'access_mapping': access_mapping
}
info_dest = {
'share_id': None,
'share_instance_id': None,
'backup': True,
'backup_id': backup['id'],
'mount': dest_backup_info['mount'],
'unmount': dest_backup_info['unmount'],
'mount_point': dest_backup_mount_point,
'access_mapping': access_mapping
}
self._copy_share_data(context, copy, info_src, info_dest)
self.db.share_update(context, share['id'], {'task_state': None})
except Exception:
self.db.share_update(
context, share['id'],
{'task_state': constants.TASK_STATE_DATA_COPYING_ERROR})
msg = _("Failed to copy contents from share %(src)s to "
"backup %(dest)s.") % (
{'src': share_instance_id, 'dest': backup['id']})
LOG.exception(msg)
raise exception.ShareDataCopyFailed(reason=msg)
finally:
self.busy_tasks_shares.pop(share['id'], None)
def delete_backup(self, context, backup):
backup_id = backup['id']
LOG.info('Delete backup started, backup: %s.', backup_id)
backup = self.db.share_backup_get(context, backup_id)
try:
dest_backup_info = self.backup_driver.get_backup_info(backup)
backup_mount_path = CONF.backup_mount_tmp_location
mount_point = os.path.join(backup_mount_path, backup['id'])
backup_folder = os.path.join(mount_point, backup['id'])
if not os.path.exists(backup_folder):
os.makedirs(backup_folder)
if not os.path.exists(backup_folder):
raise exception.NotFound("Path %s could not be "
"found." % backup_folder)
mount_template = dest_backup_info['mount']
unmount_template = dest_backup_info['unmount']
mount_command = mount_template % {'path': mount_point}
unmount_command = unmount_template % {'path': mount_point}
utils.execute(*(mount_command.split()), run_as_root=True)
# backup_folder should exist after mount, else backup is
# already deleted
if os.path.exists(backup_folder):
for filename in os.listdir(backup_folder):
if filename in CONF.backup_ignore_files:
continue
file_path = os.path.join(backup_folder, filename)
try:
if (os.path.isfile(file_path) or
os.path.islink(file_path)):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception as e:
LOG.debug("Failed to delete %(file_path)s. Reason: "
"%(err)s", {'file_path': file_path,
'err': e})
shutil.rmtree(backup_folder)
utils.execute(*(unmount_command.split()), run_as_root=True)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error("Failed to delete share backup %s.", backup['id'])
self.db.share_backup_update(
context, backup['id'],
{'status': constants.STATUS_ERROR_DELETING})
try:
reserve_opts = {
'backups': -1,
'backup_gigabytes': -backup['size'],
}
reservations = QUOTAS.reserve(
context, project_id=backup['project_id'], **reserve_opts)
except Exception as e:
reservations = None
LOG.warning("Failed to update backup quota for %(pid)s: %(err)s.",
{'pid': backup['project_id'], 'err': e})
raise
if reservations:
QUOTAS.commit(context, reservations,
project_id=backup['project_id'])
self.db.share_backup_delete(context, backup_id)
LOG.info("Share backup %s deleted successfully.", backup_id)
def restore_backup(self, context, backup, share_id):
backup_id = backup['id']
LOG.info('Restore backup started, backup: %(backup_id)s '
'share: %(share_id)s.',
{'backup_id': backup['id'], 'share_id': share_id})
share = self.db.share_get(context, share_id)
backup = self.db.share_backup_get(context, backup_id)
try:
self._run_restore(context, backup, share)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error("Failed to restore backup %(backup)s to share "
"%(share)s by data driver.",
{'backup': backup['id'], 'share': share_id})
self.db.share_update(
context, share_id,
{'status': constants.STATUS_BACKUP_RESTORING_ERROR})
self.db.share_backup_update(
context, backup_id,
{'status': constants.STATUS_ERROR})
@periodic_task.periodic_task(
spacing=CONF.restore_continue_update_interval)
def restore_backup_continue(self, context):
filters = {'status': constants.STATUS_RESTORING,
'host': self.host,
'topic': CONF.data_topic}
backups = self.db.share_backups_get_all(context, filters)
for backup in backups:
backup_id = backup['id']
try:
filters = {'source_backup_id': backup_id}
shares = self.db.share_get_all(context, filters)
except Exception:
LOG.warning('Failed to get shares for backup %s', backup_id)
continue
for share in shares:
if share['status'] != constants.STATUS_BACKUP_RESTORING:
continue
share_id = share['id']
result = {}
try:
result = self.data_copy_get_progress(context, share_id)
progress = result.get('total_progress', '0')
self.db.share_backup_update(context, backup_id,
{'restore_progress': progress})
if progress == '100':
self.db.share_update(
context, share_id,
{'status': constants.STATUS_AVAILABLE})
self.db.share_backup_update(
context, backup_id,
{'status': constants.STATUS_AVAILABLE})
LOG.info("Share backup %s restored successfully.",
backup_id)
except Exception:
LOG.warning("Failed to get progress of share_backup "
"%(backup)s restoring in share %(share).",
{'share': share_id, 'backup': backup_id})
self.db.share_update(
context, share_id,
{'status': constants.STATUS_BACKUP_RESTORING_ERROR})
self.db.share_backup_update(
context, backup_id,
{'status': constants.STATUS_AVAILABLE,
'restore_progress': '0'})
def _run_restore(self, context, backup, share):
share_instance_id = share.instance.get('id')
share_instance = self.db.share_instance_get(
context, share_instance_id, with_share_data=True)
access_mapping = self._get_backup_access_mapping(share)
mount_path = CONF.mount_tmp_location
backup_mount_path = CONF.backup_mount_tmp_location
ignore_list = CONF.backup_ignore_files
mount_info = self._get_share_mount_info(share_instance)
src_backup_info = self.backup_driver.get_backup_info(backup)
src_backup_mount_point = os.path.join(backup_mount_path, backup['id'])
backup_folder = os.path.join(src_backup_mount_point, backup['id'])
try:
copy = data_utils.Copy(
backup_folder,
os.path.join(mount_path, share_instance_id),
ignore_list)
info_src = {
'share_id': None,
'share_instance_id': None,
'restore': True,
'backup_id': backup['id'],
'mount': src_backup_info['mount'],
'unmount': src_backup_info['unmount'],
'mount_point': src_backup_mount_point,
'access_mapping': access_mapping
}
info_dest = {
'share_id': share['id'],
'share_instance_id': share_instance_id,
'mount': mount_info['mount'],
'unmount': mount_info['unmount'],
'mount_point': os.path.join(mount_path, share_instance_id),
'access_mapping': access_mapping
}
self._copy_share_data(context, copy, info_src, info_dest)
self.db.share_update(context, share['id'], {'task_state': None})
except Exception:
self.db.share_update(
context, share['id'],
{'task_state': constants.TASK_STATE_DATA_COPYING_ERROR})
msg = _("Failed to copy/restore contents from backup %(src)s "
"to share %(dest)s.") % (
{'src': backup['id'], 'dest': share_instance_id})
LOG.exception(msg)
raise exception.ShareDataCopyFailed(reason=msg)
finally:
self.busy_tasks_shares.pop(share['id'], None)

View File

@ -33,6 +33,10 @@ class DataAPI(object):
Add migration_start(), Add migration_start(),
data_copy_cancel(), data_copy_cancel(),
data_copy_get_progress() data_copy_get_progress()
1.1 - create_backup(),
delete_backup(),
restore_backup()
""" """
BASE_RPC_API_VERSION = '1.0' BASE_RPC_API_VERSION = '1.0'
@ -41,7 +45,7 @@ class DataAPI(object):
super(DataAPI, self).__init__() super(DataAPI, self).__init__()
target = messaging.Target(topic=CONF.data_topic, target = messaging.Target(topic=CONF.data_topic,
version=self.BASE_RPC_API_VERSION) version=self.BASE_RPC_API_VERSION)
self.client = rpc.get_client(target, version_cap='1.0') self.client = rpc.get_client(target, version_cap='1.1')
def migration_start(self, context, share_id, ignore_list, def migration_start(self, context, share_id, ignore_list,
share_instance_id, dest_share_instance_id, share_instance_id, dest_share_instance_id,
@ -65,3 +69,16 @@ class DataAPI(object):
call_context = self.client.prepare(version='1.0') call_context = self.client.prepare(version='1.0')
return call_context.call(context, 'data_copy_get_progress', return call_context.call(context, 'data_copy_get_progress',
share_id=share_id) share_id=share_id)
def create_backup(self, context, backup):
call_context = self.client.prepare(version='1.1')
call_context.cast(context, 'create_backup', backup=backup)
def delete_backup(self, context, backup):
call_context = self.client.prepare(version='1.1')
call_context.cast(context, 'delete_backup', backup=backup)
def restore_backup(self, context, backup, share_id):
call_context = self.client.prepare(version='1.1')
call_context.cast(context, 'restore_backup', backup=backup,
share_id=share_id)

View File

@ -57,6 +57,9 @@ db_opts = [
default='share-snapshot-%s', default='share-snapshot-%s',
help='Template string to be used to generate share snapshot ' help='Template string to be used to generate share snapshot '
'names.'), 'names.'),
cfg.StrOpt('share_backup_name_template',
default='share-backup-%s',
help='Template string to be used to generate backup names.'),
] ]
CONF = cfg.CONF CONF = cfg.CONF
@ -1789,3 +1792,33 @@ def async_operation_data_update(context, entity_id, details,
def async_operation_data_delete(context, entity_id, key=None): def async_operation_data_delete(context, entity_id, key=None):
"""Remove one, list or all key-value pairs for given entity_id.""" """Remove one, list or all key-value pairs for given entity_id."""
return IMPL.async_operation_data_delete(context, entity_id, key) return IMPL.async_operation_data_delete(context, entity_id, key)
####################
def share_backup_create(context, share_id, values):
"""Create new share backup with specified values."""
return IMPL.share_backup_create(context, share_id, values)
def share_backup_update(context, backup_id, values):
"""Updates a share backup with given values."""
return IMPL.share_backup_update(context, backup_id, values)
def share_backup_get(context, backup_id):
"""Get share backup by id."""
return IMPL.share_backup_get(context, backup_id)
def share_backups_get_all(context, filters=None, limit=None, offset=None,
sort_key=None, sort_dir=None):
"""Get all backups."""
return IMPL.share_backups_get_all(
context, filters=filters, limit=limit, offset=offset,
sort_key=sort_key, sort_dir=sort_dir)
def share_backup_delete(context, backup_id):
"""Deletes backup with the specified ID."""
return IMPL.share_backup_delete(context, backup_id)

View File

@ -0,0 +1,91 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""add backup
Revision ID: 9afbe2df4945
Revises: aebe2a413e13
Create Date: 2022-04-21 23:06:59.144695
"""
# revision identifiers, used by Alembic.
revision = '9afbe2df4945'
down_revision = 'aebe2a413e13'
from alembic import op
from oslo_log import log
import sqlalchemy as sa
LOG = log.getLogger(__name__)
share_backups_table_name = 'share_backups'
def upgrade():
"""Add backup attributes."""
try:
op.create_table(
share_backups_table_name,
sa.Column('id', sa.String(length=36),
primary_key=True, nullable=False),
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime),
sa.Column('deleted_at', sa.DateTime),
sa.Column('deleted', sa.String(length=36), default='False'),
sa.Column('user_id', sa.String(255)),
sa.Column('project_id', sa.String(255)),
sa.Column('availability_zone', sa.String(255)),
sa.Column('fail_reason', sa.String(255)),
sa.Column('display_name', sa.String(255)),
sa.Column('display_description', sa.String(255)),
sa.Column('host', sa.String(255)),
sa.Column('topic', sa.String(255)),
sa.Column('status', sa.String(255)),
sa.Column('progress', sa.String(32)),
sa.Column('restore_progress', sa.String(32)),
sa.Column('size', sa.Integer),
sa.Column('share_id', sa.String(36),
sa.ForeignKey('shares.id',
name="fk_backups_share_id_shares")),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
except Exception:
LOG.error("Table |%s| not created!",
share_backups_table_name)
raise
try:
op.add_column(
'shares',
sa.Column('source_backup_id', sa.String(36), nullable=True))
except Exception:
LOG.error("Column can not be added for 'shares' table!")
raise
def downgrade():
"""Remove share backup attributes and table share_backups."""
try:
op.drop_table(share_backups_table_name)
except Exception:
LOG.error("%s table not dropped.", share_backups_table_name)
raise
try:
op.drop_column('shares', 'source_backup_id')
except Exception:
LOG.error("Column can not be dropped for 'shares' table!")
raise

View File

@ -447,6 +447,16 @@ def _sync_share_groups(context, project_id, user_id, share_type_id=None):
return {'share_groups': share_groups_count} return {'share_groups': share_groups_count}
def _sync_backups(context, project_id, user_id, share_type_id=None):
backups, _ = _backup_data_get_for_project(context, project_id, user_id)
return {'backups': backups}
def _sync_backup_gigabytes(context, project_id, user_id, share_type_id=None):
_, backup_gigs = _backup_data_get_for_project(context, project_id, user_id)
return {'backup_gigabytes': backup_gigs}
def _sync_share_group_snapshots( def _sync_share_group_snapshots(
context, project_id, user_id, share_type_id=None, context, project_id, user_id, share_type_id=None,
): ):
@ -480,6 +490,8 @@ QUOTA_SYNC_FUNCTIONS = {
'_sync_share_group_snapshots': _sync_share_group_snapshots, '_sync_share_group_snapshots': _sync_share_group_snapshots,
'_sync_share_replicas': _sync_share_replicas, '_sync_share_replicas': _sync_share_replicas,
'_sync_replica_gigabytes': _sync_replica_gigabytes, '_sync_replica_gigabytes': _sync_replica_gigabytes,
'_sync_backups': _sync_backups,
'_sync_backup_gigabytes': _sync_backup_gigabytes,
} }
@ -2113,7 +2125,8 @@ def _process_share_filters(query, filters, project_id=None, is_public=False):
if filters is None: if filters is None:
filters = {} filters = {}
share_filter_keys = ['share_group_id', 'snapshot_id', 'is_soft_deleted'] share_filter_keys = ['share_group_id', 'snapshot_id',
'is_soft_deleted', 'source_backup_id']
instance_filter_keys = ['share_server_id', 'status', 'share_type_id', instance_filter_keys = ['share_server_id', 'status', 'share_type_id',
'host', 'share_network_id'] 'host', 'share_network_id']
share_filters = {} share_filters = {}
@ -7045,3 +7058,126 @@ def async_operation_data_update(
def async_operation_data_delete(context, entity_id, key=None): def async_operation_data_delete(context, entity_id, key=None):
query = _async_operation_data_query(context, entity_id, key) query = _async_operation_data_query(context, entity_id, key)
query.update({"deleted": 1, "deleted_at": timeutils.utcnow()}) query.update({"deleted": 1, "deleted_at": timeutils.utcnow()})
@require_context
def share_backup_create(context, share_id, values):
return _share_backup_create(context, share_id, values)
@require_context
@context_manager.writer
def _share_backup_create(context, share_id, values):
if not values.get('id'):
values['id'] = uuidutils.generate_uuid()
values.update({'share_id': share_id})
share_backup_ref = models.ShareBackup()
share_backup_ref.update(values)
share_backup_ref.save(session=context.session)
return share_backup_get(context, share_backup_ref['id'])
@require_context
@context_manager.reader
def share_backup_get(context, share_backup_id):
result = model_query(
context, models.ShareBackup, project_only=True, read_deleted="no"
).filter_by(
id=share_backup_id,
).first()
if result is None:
raise exception.ShareBackupNotFound(backup_id=share_backup_id)
return result
@require_context
@context_manager.reader
def share_backups_get_all(context, filters=None,
limit=None, offset=None,
sort_key=None, sort_dir=None):
project_id = filters.pop('project_id', None) if filters else None
query = _share_backups_get_with_filters(
context,
project_id=project_id,
filters=filters, limit=limit, offset=offset,
sort_key=sort_key, sort_dir=sort_dir)
return query
def _share_backups_get_with_filters(context, project_id=None, filters=None,
limit=None, offset=None,
sort_key=None, sort_dir=None):
"""Retrieves all backups.
If no sorting parameters are specified then returned backups are sorted
by the 'created_at' key and desc order.
:param context: context to query under
:param filters: dictionary of filters
:param limit: maximum number of items to return
:param sort_key: attribute by which results should be sorted,default is
created_at
:param sort_dir: direction in which results should be sorted
:returns: list of matching backups
"""
# Init data
sort_key = sort_key or 'created_at'
sort_dir = sort_dir or 'desc'
filters = copy.deepcopy(filters) if filters else {}
query = model_query(context, models.ShareBackup)
if project_id:
query = query.filter_by(project_id=project_id)
legal_filter_keys = ('display_name', 'display_name~',
'display_description', 'display_description~',
'id', 'share_id', 'host', 'topic', 'status')
query = exact_filter(query, models.ShareBackup,
filters, legal_filter_keys)
query = apply_sorting(models.ShareBackup, query, sort_key, sort_dir)
if limit is not None:
query = query.limit(limit)
if offset:
query = query.offset(offset)
return query.all()
@require_admin_context
@context_manager.reader
def _backup_data_get_for_project(context, project_id, user_id):
query = model_query(context, models.ShareBackup,
func.count(models.ShareBackup.id),
func.sum(models.ShareBackup.size),
read_deleted="no").\
filter_by(project_id=project_id)
if user_id:
result = query.filter_by(user_id=user_id).first()
else:
result = query.first()
return (result[0] or 0, result[1] or 0)
@require_context
@oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True)
@context_manager.writer
def share_backup_update(context, backup_id, values):
backup_ref = share_backup_get(context, backup_id)
backup_ref.update(values)
backup_ref.save(session=context.session)
return backup_ref
@require_context
@context_manager.writer
def share_backup_delete(context, backup_id):
backup_ref = share_backup_get(context, backup_id)
backup_ref.soft_delete(session=context.session, update_status=True)

View File

@ -305,6 +305,7 @@ class Share(BASE, ManilaBase):
display_name = Column(String(255)) display_name = Column(String(255))
display_description = Column(String(255)) display_description = Column(String(255))
snapshot_id = Column(String(36)) snapshot_id = Column(String(36))
source_backup_id = Column(String(36))
snapshot_support = Column(Boolean, default=True) snapshot_support = Column(Boolean, default=True)
create_share_from_snapshot_support = Column(Boolean, default=True) create_share_from_snapshot_support = Column(Boolean, default=True)
revert_to_snapshot_support = Column(Boolean, default=False) revert_to_snapshot_support = Column(Boolean, default=False)
@ -1469,6 +1470,32 @@ class AsynchronousOperationData(BASE, ManilaBase):
value = Column(String(1023), nullable=False) value = Column(String(1023), nullable=False)
class ShareBackup(BASE, ManilaBase):
"""Represents a backup of a share."""
__tablename__ = 'share_backups'
id = Column(String(36), primary_key=True)
@property
def name(self):
return CONF.share_backup_name_template % self.id
deleted = Column(String(36), default='False')
user_id = Column(String(255), nullable=False)
project_id = Column(String(255), nullable=False)
share_id = Column(String(36), ForeignKey('shares.id'))
size = Column(Integer)
host = Column(String(255))
topic = Column(String(255))
availability_zone = Column(String(255))
display_name = Column(String(255))
display_description = Column(String(255))
progress = Column(String(32))
restore_progress = Column(String(32))
status = Column(String(255))
fail_reason = Column(String(1023))
def register_models(): def register_models():
"""Register Models and create metadata. """Register Models and create metadata.

View File

@ -284,8 +284,7 @@ class ShareDataCopyFailed(ManilaException):
class ShareDataCopyCancelled(ManilaException): class ShareDataCopyCancelled(ManilaException):
message = _("Copy of contents from share instance %(src_instance)s " message = _("Copy of contents from source to destination was cancelled.")
"to share instance %(dest_instance)s was cancelled.")
class ServiceIPNotFound(ManilaException): class ServiceIPNotFound(ManilaException):
@ -1147,3 +1146,26 @@ class ZadaraServerNotFound(NotFound):
# Macrosan Storage driver # Macrosan Storage driver
class MacrosanBackendExeption(ShareBackendException): class MacrosanBackendExeption(ShareBackendException):
message = _("Macrosan backend exception: %(reason)s") message = _("Macrosan backend exception: %(reason)s")
# Backup
class BackupException(ManilaException):
message = _("Unable to perform a backup action: %(reason)s.")
class InvalidBackup(Invalid):
message = _("Invalid backup: %(reason)s.")
class BackupLimitExceeded(QuotaError):
message = _("Maximum number of backups allowed (%(allowed)d) exceeded.")
class ShareBackupNotFound(NotFound):
message = _("Backup %(backup_id)s could not be found.")
class ShareBackupSizeExceedsAvailableQuota(QuotaError):
message = _("Requested backup exceeds allowed Backup gigabytes "
"quota. Requested %(requested)sG, quota is %(quota)sG and "
"%(consumed)sG has been consumed.")

View File

@ -26,6 +26,7 @@ from manila.policies import security_service
from manila.policies import service from manila.policies import service
from manila.policies import share_access from manila.policies import share_access
from manila.policies import share_access_metadata from manila.policies import share_access_metadata
from manila.policies import share_backup
from manila.policies import share_export_location from manila.policies import share_export_location
from manila.policies import share_group from manila.policies import share_group
from manila.policies import share_group_snapshot from manila.policies import share_group_snapshot
@ -80,4 +81,5 @@ def list_rules():
share_access.list_rules(), share_access.list_rules(),
share_access_metadata.list_rules(), share_access_metadata.list_rules(),
share_transfer.list_rules(), share_transfer.list_rules(),
share_backup.list_rules(),
) )

View File

@ -0,0 +1,154 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from manila.policies import base
BASE_POLICY_NAME = 'share_backup:%s'
DEPRECATED_REASON = """
The share backup API now supports system scope and default roles.
"""
deprecated_backup_create = policy.DeprecatedRule(
name=BASE_POLICY_NAME % 'create',
check_str=base.RULE_DEFAULT,
deprecated_reason=DEPRECATED_REASON,
deprecated_since='2023.2/Bobcat'
)
deprecated_backup_get = policy.DeprecatedRule(
name=BASE_POLICY_NAME % 'get',
check_str=base.RULE_DEFAULT,
deprecated_reason=DEPRECATED_REASON,
deprecated_since='2023.2/Bobcat',
)
deprecated_backup_get_all = policy.DeprecatedRule(
name=BASE_POLICY_NAME % 'get_all',
check_str=base.RULE_DEFAULT,
deprecated_reason=DEPRECATED_REASON,
deprecated_since='2023.2/Bobcat',
)
deprecated_backup_restore = policy.DeprecatedRule(
name=BASE_POLICY_NAME % 'restore',
check_str=base.RULE_ADMIN_OR_OWNER,
deprecated_reason=DEPRECATED_REASON,
deprecated_since='2023.2/Bobcat',
)
deprecated_backup_update = policy.DeprecatedRule(
name=BASE_POLICY_NAME % 'update',
check_str=base.RULE_ADMIN_OR_OWNER,
deprecated_reason=DEPRECATED_REASON,
deprecated_since='2023.2/Bobcat',
)
deprecated_backup_delete = policy.DeprecatedRule(
name=BASE_POLICY_NAME % 'delete',
check_str=base.RULE_ADMIN_OR_OWNER,
deprecated_reason=DEPRECATED_REASON,
deprecated_since='2023.2/Bobcat',
)
share_backup_policies = [
policy.DocumentedRuleDefault(
name=BASE_POLICY_NAME % 'create',
check_str=base.ADMIN_OR_PROJECT_MEMBER,
scope_types=['project'],
description="Create share backup.",
operations=[
{
'method': 'POST',
'path': '/share-backups'
}
],
deprecated_rule=deprecated_backup_create,
),
policy.DocumentedRuleDefault(
name=BASE_POLICY_NAME % 'get',
check_str=base.ADMIN_OR_PROJECT_READER,
scope_types=['project'],
description="Get share backup.",
operations=[
{
'method': 'GET',
'path': '/share-backups/{backup_id}'
}
],
deprecated_rule=deprecated_backup_get,
),
policy.DocumentedRuleDefault(
name=BASE_POLICY_NAME % 'get_all',
check_str=base.ADMIN_OR_PROJECT_READER,
scope_types=['project'],
description="Get all share backups.",
operations=[
{
'method': 'GET',
'path': '/share-backups'
},
{
'method': 'GET',
'path': '/share-backups/detail'
},
{
'method': 'GET',
'path': '/share-backups/detail?share_id=(share_id}',
},
],
deprecated_rule=deprecated_backup_get_all,
),
policy.DocumentedRuleDefault(
name=BASE_POLICY_NAME % 'restore',
check_str=base.ADMIN_OR_PROJECT_MEMBER,
scope_types=['project'],
description="Restore a share backup.",
operations=[
{
'method': 'POST',
'path': '/share-backups/{backup_id}/action'
}
],
deprecated_rule=deprecated_backup_restore,
),
policy.DocumentedRuleDefault(
name=BASE_POLICY_NAME % 'update',
check_str=base.ADMIN_OR_PROJECT_MEMBER,
scope_types=['project'],
description="Update a share backup.",
operations=[
{
'method': 'PUT',
'path': '/share-backups/{backup_id}',
}
],
deprecated_rule=deprecated_backup_update,
),
policy.DocumentedRuleDefault(
name=BASE_POLICY_NAME % 'delete',
check_str=base.ADMIN_OR_PROJECT_MEMBER,
scope_types=['project'],
description="Force Delete a share backup.",
operations=[
{
'method': 'DELETE',
'path': '/share-backups/{backup_id}'
}
],
deprecated_rule=deprecated_backup_delete,
),
]
def list_rules():
return share_backup_policies

View File

@ -97,7 +97,18 @@ quota_opts = [
default='manila.quota.DbQuotaDriver', default='manila.quota.DbQuotaDriver',
help='Default driver to use for quota checks.', help='Default driver to use for quota checks.',
deprecated_group='DEFAULT', deprecated_group='DEFAULT',
deprecated_name='quota_driver'), ] deprecated_name='quota_driver'),
cfg.IntOpt('backups',
default=10,
help='Number of share backups allowed per project.',
deprecated_group='DEFAULT',
deprecated_name='quota_backups'),
cfg.IntOpt('backup_gigabytes',
default=1000,
help='Total amount of storage, in gigabytes, allowed '
'for backups per project.',
deprecated_group='DEFAULT',
deprecated_name='quota_backup_gigabytes'), ]
CONF = cfg.CONF CONF = cfg.CONF
CONF.register_opts(quota_opts, QUOTA_GROUP) CONF.register_opts(quota_opts, QUOTA_GROUP)
@ -1173,6 +1184,9 @@ resources = [
'share_replicas'), 'share_replicas'),
ReservableResource('replica_gigabytes', '_sync_replica_gigabytes', ReservableResource('replica_gigabytes', '_sync_replica_gigabytes',
'replica_gigabytes'), 'replica_gigabytes'),
ReservableResource('backups', '_sync_backups', 'backups'),
ReservableResource('backup_gigabytes', '_sync_backup_gigabytes',
'backup_gigabytes')
] ]

View File

@ -1266,6 +1266,12 @@ class API(base.Base):
msg = _("Share still has %d dependent snapshots.") % len(snapshots) msg = _("Share still has %d dependent snapshots.") % len(snapshots)
raise exception.InvalidShare(reason=msg) raise exception.InvalidShare(reason=msg)
filters = dict(share_id=share_id)
backups = self.db.share_backups_get_all(context, filters=filters)
if len(backups):
msg = _("Share still has %d dependent backups.") % len(backups)
raise exception.InvalidShare(reason=msg)
share_group_snapshot_members_count = ( share_group_snapshot_members_count = (
self.db.count_share_group_snapshot_members_in_share( self.db.count_share_group_snapshot_members_in_share(
context, share_id)) context, share_id))
@ -1308,6 +1314,12 @@ class API(base.Base):
msg = _("Share still has %d dependent snapshots.") % len(snapshots) msg = _("Share still has %d dependent snapshots.") % len(snapshots)
raise exception.InvalidShare(reason=msg) raise exception.InvalidShare(reason=msg)
filters = dict(share_id=share_id)
backups = self.db.share_backups_get_all(context, filters=filters)
if len(backups):
msg = _("Share still has %d dependent backups.") % len(backups)
raise exception.InvalidShare(reason=msg)
share_group_snapshot_members_count = ( share_group_snapshot_members_count = (
self.db.count_share_group_snapshot_members_in_share( self.db.count_share_group_snapshot_members_in_share(
context, share_id)) context, share_id))
@ -3746,3 +3758,170 @@ class API(base.Base):
'subnet_id': new_share_network_subnet_db['id'], 'subnet_id': new_share_network_subnet_db['id'],
}) })
return new_share_network_subnet_db return new_share_network_subnet_db
def create_share_backup(self, context, share, backup):
share_id = share['id']
self._check_is_share_busy(share)
if share['status'] != constants.STATUS_AVAILABLE:
msg_args = {'share_id': share_id, 'state': share['status']}
msg = (_("Share %(share_id)s is in '%(state)s' state, but it must "
"be in 'available' state to create a backup.") % msg_args)
raise exception.InvalidShare(message=msg)
snapshots = self.db.share_snapshot_get_all_for_share(context, share_id)
if snapshots:
msg = _("Cannot backup share %s while it has snapshots.")
raise exception.InvalidShare(message=msg % share_id)
if share.has_replicas:
msg = _("Cannot backup share %s while it has replicas.")
raise exception.InvalidShare(message=msg % share_id)
# Reserve a quota before setting share status and backup status
try:
reservations = QUOTAS.reserve(
context, backups=1, backup_gigabytes=share['size'])
except exception.OverQuota as e:
overs = e.kwargs['overs']
usages = e.kwargs['usages']
quotas = e.kwargs['quotas']
def _consumed(resource_name):
return (usages[resource_name]['reserved'] +
usages[resource_name]['in_use'])
for over in overs:
if 'backup_gigabytes' in over:
msg = ("Quota exceeded for %(s_pid)s, tried to create "
"%(s_size)sG backup, but (%(d_consumed)dG of "
"%(d_quota)dG already consumed.)")
LOG.warning(msg, {'s_pid': context.project_id,
's_size': share['size'],
'd_consumed': _consumed(over),
'd_quota': quotas[over]})
raise exception.ShareBackupSizeExceedsAvailableQuota(
requested=share['size'],
consumed=_consumed('backup_gigabytes'),
quota=quotas['backup_gigabytes'])
elif 'backups' in over:
msg = ("Quota exceeded for %(s_pid)s, tried to create "
"backup, but (%(d_consumed)d of %(d_quota)d "
"backups already consumed.)")
LOG.warning(msg, {'s_pid': context.project_id,
'd_consumed': _consumed(over),
'd_quota': quotas[over]})
raise exception.BackupLimitExceeded(
allowed=quotas[over])
try:
backup_ref = self.db.share_backup_create(
context, share['id'],
{
'user_id': context.user_id,
'project_id': context.project_id,
'progress': '0',
'restore_progress': '0',
'status': constants.STATUS_CREATING,
'display_description': backup.get('description'),
'display_name': backup.get('name'),
'size': share['size'],
}
)
QUOTAS.commit(context, reservations)
except Exception:
with excutils.save_and_reraise_exception():
QUOTAS.rollback(context, reservations)
self.db.share_update(
context, share_id,
{'status': constants.STATUS_BACKUP_CREATING})
backup_ref['backup_options'] = backup.get('backup_options', {})
if backup_ref['backup_options']:
topic = CONF.share_topic
else:
topic = CONF.data_topic
backup_ref['host'] = share['host']
self.db.share_backup_update(
context, backup_ref['id'],
{'host': backup_ref['host'], 'topic': topic})
if topic == CONF.share_topic:
self.share_rpcapi.create_backup(context, backup_ref)
elif topic == CONF.data_topic:
data_rpc = data_rpcapi.DataAPI()
data_rpc.create_backup(context, backup_ref)
return backup_ref
def delete_share_backup(self, context, backup):
"""Make the RPC call to delete a share backup.
:param context: request context
:param backup: the model of backup that is retrieved from DB.
:raises: InvalidBackup
:raises: BackupDriverException
:raises: ServiceNotFound
"""
if backup.status not in [constants.STATUS_AVAILABLE,
constants.STATUS_ERROR]:
msg = (_('Backup %s status must be available or error.')
% backup['id'])
raise exception.InvalidBackup(reason=msg)
self.db.share_backup_update(
context, backup['id'], {'status': constants.STATUS_DELETING})
if backup['topic'] == CONF.share_topic:
self.share_rpcapi.delete_backup(context, backup)
elif backup['topic'] == CONF.data_topic:
data_rpc = data_rpcapi.DataAPI()
data_rpc.delete_backup(context, backup)
def restore_share_backup(self, context, backup):
"""Make the RPC call to restore a backup."""
backup_id = backup['id']
if backup['status'] != constants.STATUS_AVAILABLE:
msg = (_('Backup %s status must be available.') % backup['id'])
raise exception.InvalidBackup(reason=msg)
share = self.get(context, backup['share_id'])
share_id = share['id']
if share['status'] != constants.STATUS_AVAILABLE:
msg = _('Share to be restored to must be available.')
raise exception.InvalidShare(reason=msg)
backup_size = backup['size']
LOG.debug('Checking backup size %(backup_size)s against share size '
'%(share_size)s.', {'backup_size': backup_size,
'share_size': share['size']})
if backup_size > share['size']:
msg = (_('Share size %(share_size)d is too small to restore '
'backup of size %(size)d.') %
{'share_size': share['size'], 'size': backup_size})
raise exception.InvalidShare(reason=msg)
LOG.info("Overwriting share %(share_id)s with restore of "
"backup %(backup_id)s.",
{'share_id': share_id, 'backup_id': backup_id})
self.db.share_backup_update(
context, backup_id,
{'status': constants.STATUS_RESTORING})
self.db.share_update(
context, share_id,
{'status': constants.STATUS_BACKUP_RESTORING,
'source_backup_id': backup_id})
if backup['topic'] == CONF.share_topic:
self.share_rpcapi.restore_backup(context, backup, share_id)
elif backup['topic'] == CONF.data_topic:
data_rpc = data_rpcapi.DataAPI()
data_rpc.restore_backup(context, backup, share_id)
restore_info = {'backup_id': backup_id, 'share_id': share_id}
return restore_info
def update_share_backup(self, context, backup, fields):
return self.db.share_backup_update(context, backup['id'], fields)

View File

@ -3633,3 +3633,61 @@ class ShareDriver(object):
""" """
raise NotImplementedError() raise NotImplementedError()
def create_backup(self, context, share_instance, backup):
"""Starts backup of a given share_instance into backup.
Driver should implement this method if willing to perform backup of
share_instance. This method should start the backup procedure in the
backend and end. Following steps should be done in
'create_backup_continue'.
:param context: The 'context.RequestContext' object for the request.
:param share_instance: Reference to the original share instance.
:param backup: Share backup model.
"""
raise NotImplementedError()
def create_backup_continue(self, context, share_instance, backup):
"""Continue backup of a given share_instance into backup.
Driver must implement this method if it supports 'create_backup'
method. This method should continue the remaining backup procedure
in the backend and report the progress of backup.
:param context: The 'context.RequestContext' object for the request.
:param share_instance: Reference to the original share instance.
:param backup: Share backup model.
"""
raise NotImplementedError()
def delete_backup(self, context, backup):
"""Is called to remove backup."""
raise NotImplementedError()
def restore_backup(self, context, backup, share_instance):
"""Starts restoring backup into a given share_instance.
Driver should implement this method if willing to perform restore of
backup into a share_instance. This method should start the backup
restore procedure in the backend and end. Following steps should be
done in 'restore_backup_continue'.
:param context: The 'context.RequestContext' object for the request.
:param share_instance: Reference to the original share instance.
:param backup: Share backup model.
"""
raise NotImplementedError()
def restore_backup_continue(self, context, backup, share_instance):
"""Continue restore of a given backup into share_instance.
Driver must implement this method if it supports 'restore_backup'
method. This method should continue the remaining restore procedure
in the backend and report the progress of backup restore.
:param context: The 'context.RequestContext' object for the request.
:param share_instance: Reference to the original share instance.
:param backup: Share backup model.
"""
raise NotImplementedError()

View File

@ -142,6 +142,16 @@ share_manager_opts = [
help='This value, specified in seconds, determines how often ' help='This value, specified in seconds, determines how often '
'the share manager will check for expired transfers and ' 'the share manager will check for expired transfers and '
'destroy them and roll back share state.'), 'destroy them and roll back share state.'),
cfg.IntOpt('driver_backup_continue_update_interval',
default=60,
help='This value, specified in seconds, determines how often '
'the share manager will poll to perform the next steps '
'of backup such as fetch the progress of backup.'),
cfg.IntOpt('driver_restore_continue_update_interval',
default=60,
help='This value, specified in seconds, determines how often '
'the share manager will poll to perform the next steps '
'of restore such as fetch the progress of restore.')
] ]
CONF = cfg.CONF CONF = cfg.CONF
@ -249,7 +259,7 @@ def add_hooks(f):
class ShareManager(manager.SchedulerDependentManager): class ShareManager(manager.SchedulerDependentManager):
"""Manages NAS storages.""" """Manages NAS storages."""
RPC_API_VERSION = '1.25' RPC_API_VERSION = '1.26'
def __init__(self, share_driver=None, service_name=None, *args, **kwargs): def __init__(self, share_driver=None, service_name=None, *args, **kwargs):
"""Load the driver from args, or from flags.""" """Load the driver from args, or from flags."""
@ -5077,6 +5087,177 @@ class ShareManager(manager.SchedulerDependentManager):
context, share, share_instance, event_suffix, context, share, share_instance, event_suffix,
extra_usage_info=extra_usage_info, host=self.host) extra_usage_info=extra_usage_info, host=self.host)
@utils.require_driver_initialized
def create_backup(self, context, backup):
share_id = backup['share_id']
backup_id = backup['id']
share = self.db.share_get(context, share_id)
share_instance = self._get_share_instance(context, share)
LOG.info('Create backup started, backup: %(backup)s share: '
'%(share)s.', {'backup': backup_id, 'share': share_id})
try:
self.driver.create_backup(context, share_instance, backup)
except Exception as err:
with excutils.save_and_reraise_exception():
LOG.error("Failed to create share backup %s by driver.",
backup_id)
self.db.share_update(
context, share_id,
{'status': constants.STATUS_AVAILABLE})
self.db.share_backup_update(
context, backup_id,
{'status': constants.STATUS_ERROR, 'fail_reason': err})
@periodic_task.periodic_task(
spacing=CONF.driver_backup_continue_update_interval)
@utils.require_driver_initialized
def create_backup_continue(self, context):
"""Invokes driver to continue backup of share."""
filters = {'status': constants.STATUS_CREATING,
'host': self.host,
'topic': CONF.share_topic}
backups = self.db.share_backups_get_all(context, filters)
for backup in backups:
backup_id = backup['id']
share_id = backup['share_id']
share = self.db.share_get(context, share_id)
share_instance = self._get_share_instance(context, share)
result = {}
try:
result = self.driver.create_backup_continue(
context, share_instance, backup)
progress = result.get('total_progress', '0')
self.db.share_backup_update(context, backup_id,
{'progress': progress})
if progress == '100':
self.db.share_update(
context, share_id,
{'status': constants.STATUS_AVAILABLE})
self.db.share_backup_update(
context, backup_id,
{'status': constants.STATUS_AVAILABLE})
LOG.info("Created share backup %s successfully.",
backup_id)
except Exception:
LOG.warning("Failed to get progress of share %(share)s "
"backing up in share_backup %(backup).",
{'share': share_id, 'backup': backup_id})
self.db.share_update(
context, share_id,
{'status': constants.STATUS_AVAILABLE})
self.db.share_backup_update(
context, backup_id,
{'status': constants.STATUS_ERROR, 'progress': '0'})
def delete_backup(self, context, backup):
LOG.info('Delete backup started, backup: %s.', backup['id'])
backup_id = backup['id']
project_id = backup['project_id']
try:
self.driver.delete_backup(context, backup)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error("Failed to delete share backup %s.", backup_id)
self.db.share_backup_update(
context, backup_id,
{'status': constants.STATUS_ERROR_DELETING})
try:
reserve_opts = {
'backups': -1,
'backup_gigabytes': -backup['size'],
}
reservations = QUOTAS.reserve(
context, project_id=project_id, **reserve_opts)
except Exception as e:
reservations = None
LOG.warning("Failed to update backup quota for %(pid)s: %(err)s.",
{'pid': project_id, 'err': e})
if reservations:
QUOTAS.commit(context, reservations, project_id=project_id)
self.db.share_backup_delete(context, backup_id)
LOG.info("Share backup %s deleted successfully.", backup_id)
def restore_backup(self, context, backup, share_id):
LOG.info('Restore backup started, backup: %(backup_id)s '
'share: %(share_id)s.',
{'backup_id': backup['id'], 'share_id': share_id})
backup_id = backup['id']
share = self.db.share_get(context, share_id)
share_instance = self._get_share_instance(context, share)
try:
self.driver.restore_backup(context, backup, share_instance)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error("Failed to restore backup %(backup)s to share "
"%(share)s by driver.",
{'backup': backup_id, 'share': share_id})
self.db.share_update(
context, share_id,
{'status': constants.STATUS_BACKUP_RESTORING_ERROR})
self.db.share_backup_update(
context, backup['id'],
{'status': constants.STATUS_ERROR})
@periodic_task.periodic_task(
spacing=CONF.driver_restore_continue_update_interval)
@utils.require_driver_initialized
def restore_backup_continue(self, context):
filters = {'status': constants.STATUS_RESTORING,
'host': self.host,
'topic': CONF.share_topic}
backups = self.db.share_backups_get_all(context, filters)
for backup in backups:
backup_id = backup['id']
try:
filters = {'source_backup_id': backup_id}
shares = self.db.share_get_all(context, filters)
except Exception:
LOG.warning('Failed to get shares for backup %s', backup_id)
continue
for share in shares:
if share['status'] != constants.STATUS_BACKUP_RESTORING:
continue
share_id = share['id']
share_instance = self._get_share_instance(context, share)
result = {}
try:
result = self.driver.restore_backup_continue(
context, backup, share_instance)
progress = result.get('total_progress', '0')
self.db.share_backup_update(
context, backup_id, {'restore_progress': progress})
if progress == '100':
self.db.share_update(
context, share_id,
{'status': constants.STATUS_AVAILABLE})
self.db.share_backup_update(
context, backup_id,
{'status': constants.STATUS_AVAILABLE})
LOG.info("Share backup %s restored successfully.",
backup_id)
except Exception:
LOG.warning("Failed to get progress of share_backup "
"%(backup)s restoring in share %(share).",
{'share': share_id, 'backup': backup_id})
self.db.share_update(
context, share_id,
{'status': constants.STATUS_BACKUP_RESTORING_ERROR})
self.db.share_backup_update(
context, backup_id,
{'status': constants.STATUS_AVAILABLE,
'restore_progress': '0'})
@periodic_task.periodic_task( @periodic_task.periodic_task(
spacing=CONF.share_usage_size_update_interval, spacing=CONF.share_usage_size_update_interval,
enabled=CONF.enable_gathering_share_usage_size) enabled=CONF.enable_gathering_share_usage_size)

View File

@ -85,6 +85,8 @@ class ShareAPI(object):
check_update_share_server_network_allocations() check_update_share_server_network_allocations()
1.24 - Add quiesce_wait_time paramater to promote_share_replica() 1.24 - Add quiesce_wait_time paramater to promote_share_replica()
1.25 - Add transfer_accept() 1.25 - Add transfer_accept()
1.26 - Add create_backup() and delete_backup()
restore_backup() methods
""" """
BASE_RPC_API_VERSION = '1.0' BASE_RPC_API_VERSION = '1.0'
@ -93,7 +95,7 @@ class ShareAPI(object):
super(ShareAPI, self).__init__() super(ShareAPI, self).__init__()
target = messaging.Target(topic=CONF.share_topic, target = messaging.Target(topic=CONF.share_topic,
version=self.BASE_RPC_API_VERSION) version=self.BASE_RPC_API_VERSION)
self.client = rpc.get_client(target, version_cap='1.25') self.client = rpc.get_client(target, version_cap='1.26')
def create_share_instance(self, context, share_instance, host, def create_share_instance(self, context, share_instance, host,
request_spec, filter_properties, request_spec, filter_properties,
@ -502,3 +504,25 @@ class ShareAPI(object):
'update_share_server_network_allocations', 'update_share_server_network_allocations',
share_network_id=share_network_id, share_network_id=share_network_id,
new_share_network_subnet_id=new_share_network_subnet_id) new_share_network_subnet_id=new_share_network_subnet_id)
def create_backup(self, context, backup):
host = utils.extract_host(backup['host'])
call_context = self.client.prepare(server=host, version='1.26')
return call_context.cast(context,
'create_backup',
backup=backup)
def delete_backup(self, context, backup):
host = utils.extract_host(backup['host'])
call_context = self.client.prepare(server=host, version='1.26')
return call_context.cast(context,
'delete_backup',
backup=backup)
def restore_backup(self, context, backup, share_id):
host = utils.extract_host(backup['host'])
call_context = self.client.prepare(server=host, version='1.26')
return call_context.cast(context,
'restore_backup',
backup=backup,
share_id=share_id)

View File

@ -97,6 +97,9 @@ class QuotaSetsControllerTest(test.TestCase):
expected['quota_class_set']['replica_gigabytes'] = 1000 expected['quota_class_set']['replica_gigabytes'] = 1000
if req.api_version_request >= api_version.APIVersionRequest("2.62"): if req.api_version_request >= api_version.APIVersionRequest("2.62"):
expected['quota_class_set']['per_share_gigabytes'] = -1 expected['quota_class_set']['per_share_gigabytes'] = -1
if req.api_version_request >= api_version.APIVersionRequest("2.80"):
expected['quota_class_set']['backups'] = 10
expected['quota_class_set']['backup_gigabytes'] = 1000
result = controller().show(req, self.class_name) result = controller().show(req, self.class_name)
@ -154,6 +157,9 @@ class QuotaSetsControllerTest(test.TestCase):
expected['quota_class_set']['replica_gigabytes'] = 1000 expected['quota_class_set']['replica_gigabytes'] = 1000
if req.api_version_request >= api_version.APIVersionRequest("2.62"): if req.api_version_request >= api_version.APIVersionRequest("2.62"):
expected['quota_class_set']['per_share_gigabytes'] = -1 expected['quota_class_set']['per_share_gigabytes'] = -1
if req.api_version_request >= api_version.APIVersionRequest("2.80"):
expected['quota_class_set']['backups'] = 10
expected['quota_class_set']['backup_gigabytes'] = 1000
update_result = controller().update( update_result = controller().update(
req, self.class_name, body=body) req, self.class_name, body=body)

View File

@ -0,0 +1,507 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
from oslo_config import cfg
from unittest import mock
from webob import exc
from manila.api.v2 import share_backups
from manila.common import constants
from manila import context
from manila import exception
from manila import policy
from manila import share
from manila import test
from manila.tests.api import fakes
from manila.tests import db_utils
from manila.tests import fake_share
CONF = cfg.CONF
@ddt.ddt
class ShareBackupsApiTest(test.TestCase):
"""Share backups API Test Cases."""
def setUp(self):
super(ShareBackupsApiTest, self).setUp()
self.controller = share_backups.ShareBackupController()
self.resource_name = self.controller.resource_name
self.api_version = share_backups.MIN_SUPPORTED_API_VERSION
self.backups_req = fakes.HTTPRequest.blank(
'/share-backups', version=self.api_version,
experimental=True)
self.member_context = context.RequestContext('fake', 'fake')
self.backups_req.environ['manila.context'] = self.member_context
self.backups_req_admin = fakes.HTTPRequest.blank(
'/share-backups', version=self.api_version,
experimental=True, use_admin_context=True)
self.admin_context = self.backups_req_admin.environ['manila.context']
self.mock_policy_check = self.mock_object(policy, 'check_policy')
def _get_context(self, role):
return getattr(self, '%s_context' % role)
def _create_backup_get_req(self, **kwargs):
if 'status' not in kwargs:
kwargs['status'] = constants.STATUS_AVAILABLE
backup = db_utils.create_share_backup(**kwargs)
req = fakes.HTTPRequest.blank(
'/v2/fake/share-backups/%s/action' % backup['id'],
version=self.api_version)
req.method = 'POST'
req.headers['content-type'] = 'application/json'
req.headers['X-Openstack-Manila-Api-Version'] = self.api_version
req.headers['X-Openstack-Manila-Api-Experimental'] = True
return backup, req
def _get_fake_backup(self, admin=False, summary=False, **values):
backup = fake_share.fake_backup(**values)
backup['updated_at'] = '2016-06-12T19:57:56.506805'
expected_keys = {'id', 'share_id', 'backup_state'}
expected_backup = {key: backup[key] for key in backup if key
in expected_keys}
if not summary:
expected_backup.update({
'id': backup.get('id'),
'size': backup.get('size'),
'share_id': backup.get('share_id'),
'availability_zone': backup.get('availability_zone'),
'created_at': backup.get('created_at'),
'backup_state': backup.get('status'),
'updated_at': backup.get('updated_at'),
'name': backup.get('display_name'),
'description': backup.get('display_description'),
})
if admin:
expected_backup.update({
'host': backup.get('host'),
'topic': backup.get('topic'),
})
return backup, expected_backup
def test_list_backups_summary(self):
fake_backup, expected_backup = self._get_fake_backup(summary=True)
self.mock_object(share_backups.db, 'share_backups_get_all',
mock.Mock(return_value=[fake_backup]))
res_dict = self.controller.index(self.backups_req)
self.assertEqual([expected_backup], res_dict['share_backups'])
self.mock_policy_check.assert_called_once_with(
self.member_context, self.resource_name, 'get_all')
def test_list_share_backups_summary(self):
fake_backup, expected_backup = self._get_fake_backup(summary=True)
self.mock_object(share.API, 'get',
mock.Mock(return_value={'id': 'FAKE_SHAREID'}))
self.mock_object(share_backups.db, 'share_backups_get_all',
mock.Mock(return_value=[fake_backup]))
req = fakes.HTTPRequest.blank(
'/share-backups?share_id=FAKE_SHARE_ID',
version=self.api_version, experimental=True)
req_context = req.environ['manila.context']
res_dict = self.controller.index(req)
self.assertEqual([expected_backup], res_dict['share_backups'])
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'get_all')
@ddt.data(True, False)
def test_list_backups_detail(self, is_admin):
fake_backup, expected_backup = self._get_fake_backup(admin=is_admin)
self.mock_object(share_backups.db, 'share_backups_get_all',
mock.Mock(return_value=[fake_backup]))
req = self.backups_req if not is_admin else self.backups_req_admin
req_context = req.environ['manila.context']
res_dict = self.controller.detail(req)
self.assertEqual([expected_backup], res_dict['share_backups'])
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'get_all')
def test_list_share_backups_detail_with_limit(self):
fake_backup_1, expected_backup_1 = self._get_fake_backup()
fake_backup_2, expected_backup_2 = self._get_fake_backup(
id="fake_id2")
self.mock_object(
share_backups.db, 'share_backups_get_all',
mock.Mock(return_value=[fake_backup_1]))
req = fakes.HTTPRequest.blank('/share-backups?limit=1',
version=self.api_version,
experimental=True)
req_context = req.environ['manila.context']
res_dict = self.controller.detail(req)
self.assertEqual(1, len(res_dict['share_backups']))
self.assertEqual([expected_backup_1], res_dict['share_backups'])
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'get_all')
def test_list_share_backups_detail_with_limit_and_offset(self):
fake_backup_1, expected_backup_1 = self._get_fake_backup()
fake_backup_2, expected_backup_2 = self._get_fake_backup(
id="fake_id2")
self.mock_object(
share_backups.db, 'share_backups_get_all',
mock.Mock(return_value=[fake_backup_2]))
req = fakes.HTTPRequest.blank(
'/share-backups/detail?limit=1&offset=1',
version=self.api_version, experimental=True)
req_context = req.environ['manila.context']
res_dict = self.controller.detail(req)
self.assertEqual(1, len(res_dict['share_backups']))
self.assertEqual([expected_backup_2], res_dict['share_backups'])
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'get_all')
def test_list_share_backups_detail_invalid_share(self):
self.mock_object(share_backups.db, 'share_backups_get_all',
mock.Mock(side_effect=exception.NotFound))
mock__view_builder_call = self.mock_object(
share_backups.backup_view.BackupViewBuilder,
'detail_list')
req = self.backups_req
req.GET['share_id'] = 'FAKE_SHARE_ID'
self.assertRaises(exc.HTTPBadRequest,
self.controller.detail, req)
self.assertFalse(mock__view_builder_call.called)
self.mock_policy_check.assert_called_once_with(
self.member_context, self.resource_name, 'get_all')
def test_list_share_backups_detail(self):
fake_backup, expected_backup = self._get_fake_backup()
self.mock_object(share.API, 'get',
mock.Mock(return_value={'id': 'FAKE_SHAREID'}))
self.mock_object(share_backups.db, 'share_backups_get_all',
mock.Mock(return_value=[fake_backup]))
req = fakes.HTTPRequest.blank(
'/share-backups?share_id=FAKE_SHARE_ID',
version=self.api_version, experimental=True)
req.environ['manila.context'] = (
self.member_context)
req_context = req.environ['manila.context']
res_dict = self.controller.detail(req)
self.assertEqual([expected_backup], res_dict['share_backups'])
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'get_all')
def test_list_share_backups_with_limit(self):
fake_backup_1, expected_backup_1 = self._get_fake_backup()
fake_backup_2, expected_backup_2 = self._get_fake_backup(
id="fake_id2")
self.mock_object(share.API, 'get',
mock.Mock(return_value={'id': 'FAKE_SHAREID'}))
self.mock_object(
share_backups.db, 'share_backups_get_all',
mock.Mock(return_value=[fake_backup_1]))
req = fakes.HTTPRequest.blank(
'/share-backups?share_id=FAKE_SHARE_ID&limit=1',
version=self.api_version, experimental=True)
req_context = req.environ['manila.context']
res_dict = self.controller.detail(req)
self.assertEqual(1, len(res_dict['share_backups']))
self.assertEqual([expected_backup_1], res_dict['share_backups'])
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'get_all')
def test_list_share_backups_with_limit_and_offset(self):
fake_backup_1, expected_backup_1 = self._get_fake_backup()
fake_backup_2, expected_backup_2 = self._get_fake_backup(
id="fake_id2")
self.mock_object(share.API, 'get',
mock.Mock(return_value={'id': 'FAKE_SHAREID'}))
self.mock_object(
share_backups.db, 'share_backups_get_all',
mock.Mock(return_value=[fake_backup_2]))
req = fakes.HTTPRequest.blank(
'/share-backups?share_id=FAKE_SHARE_ID&limit=1&offset=1',
version=self.api_version, experimental=True)
req_context = req.environ['manila.context']
res_dict = self.controller.detail(req)
self.assertEqual(1, len(res_dict['share_backups']))
self.assertEqual([expected_backup_2], res_dict['share_backups'])
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'get_all')
def test_show(self):
fake_backup, expected_backup = self._get_fake_backup()
self.mock_object(
share_backups.db, 'share_backup_get',
mock.Mock(return_value=fake_backup))
req = self.backups_req
res_dict = self.controller.show(req, fake_backup.get('id'))
self.assertEqual(expected_backup, res_dict['share_backup'])
def test_show_no_backup(self):
mock__view_builder_call = self.mock_object(
share_backups.backup_view.BackupViewBuilder, 'detail')
fake_exception = exception.ShareBackupNotFound(
backup_id='FAKE_backup_ID')
self.mock_object(share_backups.db, 'share_backup_get', mock.Mock(
side_effect=fake_exception))
self.assertRaises(exc.HTTPNotFound,
self.controller.show,
self.backups_req,
'FAKE_backup_ID')
self.assertFalse(mock__view_builder_call.called)
def test_create_invalid_body(self):
body = {}
mock__view_builder_call = self.mock_object(
share_backups.backup_view.BackupViewBuilder,
'detail_list')
self.assertRaises(exc.HTTPUnprocessableEntity,
self.controller.create,
self.backups_req, body)
self.assertEqual(0, mock__view_builder_call.call_count)
def test_create_no_share_id(self):
body = {
'share_backup': {
'share_id': None,
'availability_zone': None,
}
}
mock__view_builder_call = self.mock_object(
share_backups.backup_view.BackupViewBuilder,
'detail_list')
self.mock_object(share_backups.db, 'share_get',
mock.Mock(side_effect=exception.NotFound))
self.assertRaises(exc.HTTPBadRequest,
self.controller.create,
self.backups_req, body)
self.assertFalse(mock__view_builder_call.called)
def test_create_invalid_share_id(self):
body = {
'share_backup': {
'share_id': None,
}
}
mock__view_builder_call = self.mock_object(
share_backups.backup_view.BackupViewBuilder,
'detail_list')
self.mock_object(share.API, 'get',
mock.Mock(side_effect=exception.NotFound))
self.assertRaises(exc.HTTPBadRequest,
self.controller.create,
self.backups_req, body)
self.assertFalse(mock__view_builder_call.called)
@ddt.data(exception.InvalidBackup, exception.ShareBusyException)
def test_create_exception_path(self, exception_type):
fake_backup, _ = self._get_fake_backup()
mock__view_builder_call = self.mock_object(
share_backups.backup_view.BackupViewBuilder,
'detail_list')
body = {
'share_backup': {
'share_id': 'FAKE_SHAREID',
}
}
exc_args = {'id': 'xyz', 'reason': 'abc'}
self.mock_object(share.API, 'get',
mock.Mock(return_value={'id': 'FAKE_SHAREID'}))
self.mock_object(share.API, 'create_share_backup',
mock.Mock(side_effect=exception_type(**exc_args)))
if exception_type == exception.InvalidBackup:
expected_exception = exc.HTTPBadRequest
else:
expected_exception = exc.HTTPConflict
self.assertRaises(expected_exception,
self.controller.create,
self.backups_req, body)
self.assertFalse(mock__view_builder_call.called)
def test_create(self):
fake_backup, expected_backup = self._get_fake_backup()
body = {
'share_backup': {
'share_id': 'FAKE_SHAREID',
}
}
self.mock_object(share.API, 'get',
mock.Mock(return_value={'id': 'FAKE_SHAREID'}))
self.mock_object(share.API, 'create_share_backup',
mock.Mock(return_value=fake_backup))
req = self.backups_req
res_dict = self.controller.create(req, body)
self.assertEqual(expected_backup, res_dict['share_backup'])
def test_delete_invalid_backup(self):
fake_exception = exception.ShareBackupNotFound(
backup_id='FAKE_backup_ID')
self.mock_object(share_backups.db, 'share_backup_get',
mock.Mock(side_effect=fake_exception))
mock_delete_backup_call = self.mock_object(
share.API, 'delete_share_backup')
self.assertRaises(
exc.HTTPNotFound, self.controller.delete,
self.backups_req, 'FAKE_backup_ID')
self.assertFalse(mock_delete_backup_call.called)
def test_delete_exception(self):
fake_backup_1 = self._get_fake_backup(
share_id='FAKE_SHARE_ID',
backup_state=constants.STATUS_BACKUP_CREATING)[0]
fake_backup_2 = self._get_fake_backup(
share_id='FAKE_SHARE_ID',
backup_state=constants.STATUS_BACKUP_CREATING)[0]
exception_type = exception.InvalidBackup(reason='xyz')
self.mock_object(share_backups.db, 'share_backup_get',
mock.Mock(return_value=fake_backup_1))
self.mock_object(
share_backups.db, 'share_backups_get_all',
mock.Mock(return_value=[fake_backup_1, fake_backup_2]))
self.mock_object(share.API, 'delete_share_backup',
mock.Mock(side_effect=exception_type))
self.assertRaises(exc.HTTPBadRequest, self.controller.delete,
self.backups_req, 'FAKE_backup_ID')
def test_delete(self):
fake_backup = self._get_fake_backup(
share_id='FAKE_SHARE_ID',
backup_state=constants.STATUS_AVAILABLE)[0]
self.mock_object(share_backups.db, 'share_backup_get',
mock.Mock(return_value=fake_backup))
self.mock_object(share.API, 'delete_share_backup')
resp = self.controller.delete(
self.backups_req, 'FAKE_backup_ID')
self.assertEqual(202, resp.status_code)
def test_restore_invalid_backup_id(self):
body = {'restore': None}
fake_exception = exception.ShareBackupNotFound(
backup_id='FAKE_BACKUP_ID')
self.mock_object(share.API, 'restore',
mock.Mock(side_effect=fake_exception))
self.assertRaises(exc.HTTPNotFound,
self.controller.restore,
self.backups_req,
'FAKE_BACKUP_ID', body)
def test_restore(self):
body = {'restore': {'share_id': 'fake_id'}}
fake_backup = self._get_fake_backup(
share_id='FAKE_SHARE_ID',
backup_state=constants.STATUS_AVAILABLE)[0]
self.mock_object(share_backups.db, 'share_backup_get',
mock.Mock(return_value=fake_backup))
fake_backup_restore = {
'share_id': 'FAKE_SHARE_ID',
'backup_id': fake_backup['id'],
}
mock_api_restore_backup_call = self.mock_object(
share.API, 'restore_share_backup',
mock.Mock(return_value=fake_backup_restore))
self.mock_object(share.API, 'get',
mock.Mock(return_value={'id': 'FAKE_SHAREID'}))
resp = self.controller.restore(self.backups_req,
fake_backup['id'], body)
self.assertEqual(fake_backup_restore, resp['restore'])
self.assertTrue(mock_api_restore_backup_call.called)
def test_update(self):
fake_backup = self._get_fake_backup(
share_id='FAKE_SHARE_ID',
backup_state=constants.STATUS_AVAILABLE)[0]
self.mock_object(share_backups.db, 'share_backup_get',
mock.Mock(return_value=fake_backup))
body = {'share_backup': {'name': 'backup1'}}
fake_backup_update = {
'share_id': 'FAKE_SHARE_ID',
'backup_id': fake_backup['id'],
'display_name': 'backup1'
}
mock_api_update_backup_call = self.mock_object(
share.API, 'update_share_backup',
mock.Mock(return_value=fake_backup_update))
resp = self.controller.update(self.backups_req,
fake_backup['id'], body)
self.assertEqual(fake_backup_update['display_name'],
resp['share_backup']['name'])
self.assertTrue(mock_api_update_backup_call.called)
@ddt.data('index', 'detail')
def test_policy_not_authorized(self, method_name):
method = getattr(self.controller, method_name)
arguments = {
'id': 'FAKE_backup_ID',
'body': {'FAKE_KEY': 'FAKE_VAL'},
}
if method_name in ('index', 'detail'):
arguments.clear()
noauthexc = exception.PolicyNotAuthorized(action=method)
with mock.patch.object(
policy, 'check_policy', mock.Mock(side_effect=noauthexc)):
self.assertRaises(
exc.HTTPForbidden, method, self.backups_req, **arguments)
@ddt.data('index', 'detail', 'show', 'create', 'delete')
def test_upsupported_microversion(self, method_name):
unsupported_microversions = ('1.0', '2.2', '2.18')
method = getattr(self.controller, method_name)
arguments = {
'id': 'FAKE_BACKUP_ID',
'body': {'FAKE_KEY': 'FAKE_VAL'},
}
if method_name in ('index', 'detail'):
arguments.clear()
for microversion in unsupported_microversions:
req = fakes.HTTPRequest.blank(
'/share-backups', version=microversion,
experimental=True)
self.assertRaises(exception.VersionNotFoundForAPIMethod,
method, req, **arguments)

View File

@ -36,6 +36,7 @@ class ViewBuilderTestCase(test.TestCase):
("fake_quota_class", "2.39"), (None, "2.39"), ("fake_quota_class", "2.39"), (None, "2.39"),
("fake_quota_class", "2.53"), (None, "2.53"), ("fake_quota_class", "2.53"), (None, "2.53"),
("fake_quota_class", "2.62"), (None, "2.62"), ("fake_quota_class", "2.62"), (None, "2.62"),
("fake_quota_class", "2.80"), (None, "2.80"),
) )
@ddt.unpack @ddt.unpack
def test_detail_list_with_share_type(self, quota_class, microversion): def test_detail_list_with_share_type(self, quota_class, microversion):
@ -82,6 +83,16 @@ class ViewBuilderTestCase(test.TestCase):
"per_share_gigabytes"] = fake_per_share_gigabytes "per_share_gigabytes"] = fake_per_share_gigabytes
quota_class_set['per_share_gigabytes'] = fake_per_share_gigabytes quota_class_set['per_share_gigabytes'] = fake_per_share_gigabytes
if req.api_version_request >= api_version.APIVersionRequest("2.80"):
fake_share_backups_value = 46
fake_backup_gigabytes_value = 100
expected[self.builder._collection_name]["backups"] = (
fake_share_backups_value)
expected[self.builder._collection_name][
"backup_gigabytes"] = fake_backup_gigabytes_value
quota_class_set['backups'] = fake_share_backups_value
quota_class_set['backup_gigabytes'] = fake_backup_gigabytes_value
result = self.builder.detail_list( result = self.builder.detail_list(
req, quota_class_set, quota_class=quota_class) req, quota_class_set, quota_class=quota_class)

View File

@ -46,6 +46,8 @@ class ViewBuilderTestCase(test.TestCase):
(None, 'fake_share_type_id', "2.62"), (None, 'fake_share_type_id', "2.62"),
('fake_project_id', None, "2.62"), ('fake_project_id', None, "2.62"),
(None, None, "2.62"), (None, None, "2.62"),
('fake_project_id', None, "2.80"),
(None, None, "2.80"),
) )
@ddt.unpack @ddt.unpack
def test_detail_list_with_share_type(self, project_id, share_type, def test_detail_list_with_share_type(self, project_id, share_type,
@ -95,6 +97,16 @@ class ViewBuilderTestCase(test.TestCase):
fake_per_share_gigabytes) fake_per_share_gigabytes)
quota_set['per_share_gigabytes'] = fake_per_share_gigabytes quota_set['per_share_gigabytes'] = fake_per_share_gigabytes
if req.api_version_request >= api_version.APIVersionRequest("2.80"):
fake_share_backups_value = 46
fake_backup_gigabytes_value = 100
expected[self.builder._collection_name]["backups"] = (
fake_share_backups_value)
expected[self.builder._collection_name][
"backup_gigabytes"] = fake_backup_gigabytes_value
quota_set['backups'] = fake_share_backups_value
quota_set['backup_gigabytes'] = fake_backup_gigabytes_value
result = self.builder.detail_list( result = self.builder.detail_list(
req, quota_set, project_id=project_id, share_type=share_type) req, quota_set, project_id=project_id, share_type=share_type)

View File

@ -213,7 +213,7 @@ class DataServiceHelperTestCase(test.TestCase):
# run # run
self.helper.cleanup_temp_folder( self.helper.cleanup_temp_folder(
self.share_instance['id'], '/fake_path/') '/fake_path/', self.share_instance['id'])
# asserts # asserts
os.rmdir.assert_called_once_with(fake_path) os.rmdir.assert_called_once_with(fake_path)
@ -230,17 +230,20 @@ class DataServiceHelperTestCase(test.TestCase):
def test_cleanup_unmount_temp_folder(self, exc): def test_cleanup_unmount_temp_folder(self, exc):
# mocks # mocks
self.mock_object(self.helper, 'unmount_share_instance', self.mock_object(self.helper, 'unmount_share_instance_or_backup',
mock.Mock(side_effect=exc)) mock.Mock(side_effect=exc))
self.mock_object(data_copy_helper.LOG, 'warning') self.mock_object(data_copy_helper.LOG, 'warning')
unmount_info = {
'unmount': 'unmount_template',
'share_instance_id': self.share_instance['id']
}
# run # run
self.helper.cleanup_unmount_temp_folder( self.helper.cleanup_unmount_temp_folder(unmount_info, 'fake_path')
'unmount_template', 'fake_path', self.share_instance['id'])
# asserts # asserts
self.helper.unmount_share_instance.assert_called_once_with( self.helper.unmount_share_instance_or_backup.assert_called_once_with(
'unmount_template', 'fake_path', self.share_instance['id']) unmount_info, 'fake_path')
if exc: if exc:
self.assertTrue(data_copy_helper.LOG.warning.called) self.assertTrue(data_copy_helper.LOG.warning.called)
@ -283,33 +286,65 @@ class DataServiceHelperTestCase(test.TestCase):
self.context, self.helper.db, self.share_instance, self.context, self.helper.db, self.share_instance,
data_copy_helper.CONF.data_access_wait_access_rules_timeout) data_copy_helper.CONF.data_access_wait_access_rules_timeout)
def test_mount_share_instance(self): @ddt.data('migration', 'backup', 'restore')
def test_mount_share_instance_or_backup(self, op):
fake_path = ''.join(('/fake_path/', self.share_instance['id']))
# mocks # mocks
self.mock_object(utils, 'execute') self.mock_object(utils, 'execute')
self.mock_object(os.path, 'exists', mock.Mock( exists_calls = [False, True]
side_effect=[False, False, True])) if op == 'backup':
exists_calls.extend([False, True])
if op == 'restore':
exists_calls.append([True])
self.mock_object(os.path, 'exists',
mock.Mock(side_effect=exists_calls))
self.mock_object(os, 'makedirs') self.mock_object(os, 'makedirs')
mount_info = {'mount': 'mount %(path)s'}
if op in ('backup', 'restore'):
fake_path = '/fake_backup_path/'
mount_info.update(
{'backup_id': 'fake_backup_id',
'mount_point': '/fake_backup_path/', op: True})
if op == 'migration':
share_instance_id = self.share_instance['id']
fake_path = ''.join(('/fake_path/', share_instance_id))
mount_info.update({'share_instance_id': share_instance_id})
# run # run
self.helper.mount_share_instance( self.helper.mount_share_instance_or_backup(mount_info, '/fake_path')
'mount %(path)s', '/fake_path', self.share_instance)
# asserts # asserts
utils.execute.assert_called_once_with('mount', fake_path, utils.execute.assert_called_once_with('mount', fake_path,
run_as_root=True) run_as_root=True)
os.makedirs.assert_called_once_with(fake_path) if op == 'migration':
os.path.exists.assert_has_calls([ os.makedirs.assert_called_once_with(fake_path)
mock.call(fake_path), os.path.exists.assert_has_calls([
mock.call(fake_path), mock.call(fake_path),
mock.call(fake_path) mock.call(fake_path),
]) ])
if op == 'backup':
os.makedirs.assert_has_calls([
mock.call(fake_path),
mock.call(fake_path + 'fake_backup_id')
])
os.path.exists.assert_has_calls([
mock.call(fake_path),
mock.call(fake_path),
mock.call(fake_path + 'fake_backup_id'),
mock.call(fake_path + 'fake_backup_id'),
])
if op == 'restore':
os.makedirs.assert_called_once_with(fake_path)
os.path.exists.assert_has_calls([
mock.call(fake_path),
mock.call(fake_path),
mock.call(fake_path + 'fake_backup_id'),
])
@ddt.data([True, True, False], [True, True, Exception('fake')]) @ddt.data([True, True], [True, False], [True, Exception('fake')])
def test_unmount_share_instance(self, side_effect): def test_unmount_share_instance_or_backup(self, side_effect):
fake_path = ''.join(('/fake_path/', self.share_instance['id'])) fake_path = ''.join(('/fake_path/', self.share_instance['id']))
@ -320,9 +355,14 @@ class DataServiceHelperTestCase(test.TestCase):
self.mock_object(os, 'rmdir') self.mock_object(os, 'rmdir')
self.mock_object(data_copy_helper.LOG, 'warning') self.mock_object(data_copy_helper.LOG, 'warning')
unmount_info = {
'unmount': 'unmount %(path)s',
'share_instance_id': self.share_instance['id']
}
# run # run
self.helper.unmount_share_instance( self.helper.unmount_share_instance_or_backup(
'unmount %(path)s', '/fake_path', self.share_instance['id']) unmount_info, '/fake_path')
# asserts # asserts
utils.execute.assert_called_once_with('unmount', fake_path, utils.execute.assert_called_once_with('unmount', fake_path,
@ -331,7 +371,6 @@ class DataServiceHelperTestCase(test.TestCase):
os.path.exists.assert_has_calls([ os.path.exists.assert_has_calls([
mock.call(fake_path), mock.call(fake_path),
mock.call(fake_path), mock.call(fake_path),
mock.call(fake_path)
]) ])
if any(isinstance(x, Exception) for x in side_effect): if any(isinstance(x, Exception) for x in side_effect):

View File

@ -19,6 +19,7 @@ Tests For Data Manager
from unittest import mock from unittest import mock
import ddt import ddt
from oslo_config import cfg
from manila.common import constants from manila.common import constants
from manila import context from manila import context
@ -27,12 +28,16 @@ from manila.data import manager
from manila.data import utils as data_utils from manila.data import utils as data_utils
from manila import db from manila import db
from manila import exception from manila import exception
from manila import quota
from manila.share import rpcapi as share_rpc from manila.share import rpcapi as share_rpc
from manila import test from manila import test
from manila.tests import db_utils from manila.tests import db_utils
from manila import utils from manila import utils
CONF = cfg.CONF
@ddt.ddt @ddt.ddt
class DataManagerTestCase(test.TestCase): class DataManagerTestCase(test.TestCase):
"""Test case for data manager.""" """Test case for data manager."""
@ -44,6 +49,10 @@ class DataManagerTestCase(test.TestCase):
self.topic = 'fake_topic' self.topic = 'fake_topic'
self.share = db_utils.create_share() self.share = db_utils.create_share()
manager.CONF.set_default('mount_tmp_location', '/tmp/') manager.CONF.set_default('mount_tmp_location', '/tmp/')
manager.CONF.set_default('backup_mount_tmp_location', '/tmp/')
manager.CONF.set_default(
'backup_driver',
'manila.tests.fake_backup_driver.FakeBackupDriver')
def test_init(self): def test_init(self):
manager = self.manager manager = self.manager
@ -73,11 +82,17 @@ class DataManagerTestCase(test.TestCase):
utils.IsAMatcher(context.RequestContext), share['id'], utils.IsAMatcher(context.RequestContext), share['id'],
{'task_state': constants.TASK_STATE_DATA_COPYING_ERROR}) {'task_state': constants.TASK_STATE_DATA_COPYING_ERROR})
@ddt.data(None, Exception('fake'), exception.ShareDataCopyCancelled( @ddt.data(None, Exception('fake'), exception.ShareDataCopyCancelled())
src_instance='ins1',
dest_instance='ins2'))
def test_migration_start(self, exc): def test_migration_start(self, exc):
migration_info_src = {
'mount': 'mount_cmd_src',
'unmount': 'unmount_cmd_src',
}
migration_info_dest = {
'mount': 'mount_cmd_dest',
'unmount': 'unmount_cmd_dest',
}
# mocks # mocks
self.mock_object(db, 'share_get', mock.Mock(return_value=self.share)) self.mock_object(db, 'share_get', mock.Mock(return_value=self.share))
self.mock_object(db, 'share_instance_get', mock.Mock( self.mock_object(db, 'share_instance_get', mock.Mock(
@ -102,12 +117,13 @@ class DataManagerTestCase(test.TestCase):
if exc is None or isinstance(exc, exception.ShareDataCopyCancelled): if exc is None or isinstance(exc, exception.ShareDataCopyCancelled):
self.manager.migration_start( self.manager.migration_start(
self.context, [], self.share['id'], self.context, [], self.share['id'],
'ins1_id', 'ins2_id', 'info_src', 'info_dest') 'ins1_id', 'ins2_id', migration_info_src,
migration_info_dest)
else: else:
self.assertRaises( self.assertRaises(
exception.ShareDataCopyFailed, self.manager.migration_start, exception.ShareDataCopyFailed, self.manager.migration_start,
self.context, [], self.share['id'], 'ins1_id', 'ins2_id', self.context, [], self.share['id'], 'ins1_id', 'ins2_id',
'info_src', 'info_dest') migration_info_src, migration_info_dest)
db.share_update.assert_called_once_with( db.share_update.assert_called_once_with(
self.context, self.share['id'], self.context, self.share['id'],
@ -116,26 +132,73 @@ class DataManagerTestCase(test.TestCase):
# asserts # asserts
self.assertFalse(self.manager.busy_tasks_shares.get(self.share['id'])) self.assertFalse(self.manager.busy_tasks_shares.get(self.share['id']))
self.manager._copy_share_data.assert_called_once_with(
self.context, 'fake_copy', self.share, 'ins1_id', 'ins2_id',
'info_src', 'info_dest')
if exc: if exc:
share_rpc.ShareAPI.migration_complete.assert_called_once_with( share_rpc.ShareAPI.migration_complete.assert_called_once_with(
self.context, self.share.instance, 'ins2_id') self.context, self.share.instance, 'ins2_id')
@ddt.data({'cancelled': False, 'exc': None}, @ddt.data(
{'cancelled': False, 'exc': Exception('fake')}, {'cancelled': False, 'exc': None, 'case': 'migration'},
{'cancelled': True, 'exc': None}) {'cancelled': False, 'exc': Exception('fake'), 'case': 'migration'},
{'cancelled': True, 'exc': None, 'case': 'migration'},
{'cancelled': False, 'exc': None, 'case': 'backup'},
{'cancelled': False, 'exc': Exception('fake'), 'case': 'backup'},
{'cancelled': True, 'exc': None, 'case': 'backup'},
{'cancelled': False, 'exc': None, 'case': 'restore'},
{'cancelled': False, 'exc': Exception('fake'), 'case': 'restore'},
{'cancelled': True, 'exc': None, 'case': 'restore'},
)
@ddt.unpack @ddt.unpack
def test__copy_share_data(self, cancelled, exc): def test__copy_share_data(self, cancelled, exc, case):
access = db_utils.create_access(share_id=self.share['id']) access = db_utils.create_access(share_id=self.share['id'])
connection_info_src = {'mount': 'mount_cmd_src', if case == 'migration':
'unmount': 'unmount_cmd_src'} connection_info_src = {
connection_info_dest = {'mount': 'mount_cmd_dest', 'mount': 'mount_cmd_src',
'unmount': 'unmount_cmd_dest'} 'unmount': 'unmount_cmd_src',
'share_id': self.share['id'],
'share_instance_id': 'ins1_id',
'mount_point': '/tmp/ins1_id',
}
connection_info_dest = {
'mount': 'mount_cmd_dest',
'unmount': 'unmount_cmd_dest',
'share_id': None,
'share_instance_id': 'ins2_id',
'mount_point': '/tmp/ins2_id',
}
if case == 'backup':
connection_info_src = {
'mount': 'mount_cmd_src',
'unmount': 'unmount_cmd_src',
'share_id': self.share['id'],
'share_instance_id': 'ins1_id',
'mount_point': '/tmp/ins1_id',
}
connection_info_dest = {
'mount': 'mount_cmd_dest',
'unmount': 'unmount_cmd_dest',
'share_id': None,
'share_instance_id': None,
'mount_point': '/tmp/backup_id',
'backup': True
}
if case == 'restore':
connection_info_src = {
'mount': 'mount_cmd_src',
'unmount': 'unmount_cmd_src',
'share_id': None,
'share_instance_id': None,
'mount_point': '/tmp/backup_id',
'restore': True
}
connection_info_dest = {
'mount': 'mount_cmd_dest',
'unmount': 'unmount_cmd_dest',
'share_id': self.share['id'],
'share_instance_id': 'ins2_id',
'mount_point': '/tmp/ins2_id',
}
get_progress = {'total_progress': 100} get_progress = {'total_progress': 100}
@ -150,14 +213,16 @@ class DataManagerTestCase(test.TestCase):
'allow_access_to_data_service', 'allow_access_to_data_service',
mock.Mock(return_value=[access])) mock.Mock(return_value=[access]))
self.mock_object(helper.DataServiceHelper, 'mount_share_instance') self.mock_object(helper.DataServiceHelper,
'mount_share_instance_or_backup')
self.mock_object(fake_copy, 'run', mock.Mock(side_effect=exc)) self.mock_object(fake_copy, 'run', mock.Mock(side_effect=exc))
self.mock_object(fake_copy, 'get_progress', self.mock_object(fake_copy, 'get_progress',
mock.Mock(return_value=get_progress)) mock.Mock(return_value=get_progress))
self.mock_object(helper.DataServiceHelper, 'unmount_share_instance', self.mock_object(helper.DataServiceHelper,
'unmount_share_instance_or_backup',
mock.Mock(side_effect=Exception('fake'))) mock.Mock(side_effect=Exception('fake')))
self.mock_object(helper.DataServiceHelper, self.mock_object(helper.DataServiceHelper,
@ -171,8 +236,7 @@ class DataManagerTestCase(test.TestCase):
self.assertRaises( self.assertRaises(
exception.ShareDataCopyCancelled, exception.ShareDataCopyCancelled,
self.manager._copy_share_data, self.context, fake_copy, self.manager._copy_share_data, self.context, fake_copy,
self.share, 'ins1_id', 'ins2_id', connection_info_src, connection_info_src, connection_info_dest)
connection_info_dest)
extra_updates = [ extra_updates = [
mock.call( mock.call(
self.context, self.share['id'], self.context, self.share['id'],
@ -187,13 +251,13 @@ class DataManagerTestCase(test.TestCase):
elif exc: elif exc:
self.assertRaises( self.assertRaises(
exception.ShareDataCopyFailed, self.manager._copy_share_data, exception.ShareDataCopyFailed, self.manager._copy_share_data,
self.context, fake_copy, self.share, 'ins1_id', self.context, fake_copy, connection_info_src,
'ins2_id', connection_info_src, connection_info_dest) connection_info_dest)
else: else:
self.manager._copy_share_data( self.manager._copy_share_data(
self.context, fake_copy, self.share, 'ins1_id', self.context, fake_copy, connection_info_src,
'ins2_id', connection_info_src, connection_info_dest) connection_info_dest)
extra_updates = [ extra_updates = [
mock.call( mock.call(
self.context, self.share['id'], self.context, self.share['id'],
@ -222,35 +286,36 @@ class DataManagerTestCase(test.TestCase):
db.share_update.assert_has_calls(update_list) db.share_update.assert_has_calls(update_list)
(helper.DataServiceHelper.allow_access_to_data_service. helper.DataServiceHelper.\
assert_called_once_with( mount_share_instance_or_backup.assert_has_calls([
self.share['instance'], connection_info_src, mock.call(connection_info_src, '/tmp/'),
self.share['instance'], connection_info_dest)) mock.call(connection_info_dest, '/tmp/')])
helper.DataServiceHelper.mount_share_instance.assert_has_calls([
mock.call(connection_info_src['mount'], '/tmp/',
self.share['instance']),
mock.call(connection_info_dest['mount'], '/tmp/',
self.share['instance'])])
fake_copy.run.assert_called_once_with() fake_copy.run.assert_called_once_with()
if exc is None: if exc is None:
fake_copy.get_progress.assert_called_once_with() fake_copy.get_progress.assert_called_once_with()
helper.DataServiceHelper.unmount_share_instance.assert_has_calls([ helper.DataServiceHelper.\
mock.call(connection_info_src['unmount'], '/tmp/', 'ins1_id'), unmount_share_instance_or_backup.assert_has_calls([
mock.call(connection_info_dest['unmount'], '/tmp/', 'ins2_id')]) mock.call(connection_info_src, '/tmp/'),
mock.call(connection_info_dest, '/tmp/')])
helper.DataServiceHelper.deny_access_to_data_service.assert_has_calls([
mock.call([access], self.share['instance']),
mock.call([access], self.share['instance'])])
def test__copy_share_data_exception_access(self): def test__copy_share_data_exception_access(self):
connection_info_src = {'mount': 'mount_cmd_src', connection_info_src = {
'unmount': 'unmount_cmd_src'} 'mount': 'mount_cmd_src',
connection_info_dest = {'mount': 'mount_cmd_src', 'unmount': 'unmount_cmd_src',
'unmount': 'unmount_cmd_src'} 'share_id': self.share['id'],
'share_instance_id': 'ins1_id',
'mount_point': '/tmp/ins1_id',
}
connection_info_dest = {
'mount': 'mount_cmd_dest',
'unmount': 'unmount_cmd_dest',
'share_id': None,
'share_instance_id': 'ins2_id',
'mount_point': '/tmp/ins2_id',
}
fake_copy = mock.MagicMock(cancelled=False) fake_copy = mock.MagicMock(cancelled=False)
@ -270,8 +335,7 @@ class DataManagerTestCase(test.TestCase):
# run # run
self.assertRaises(exception.ShareDataCopyFailed, self.assertRaises(exception.ShareDataCopyFailed,
self.manager._copy_share_data, self.context, self.manager._copy_share_data, self.context,
fake_copy, self.share, 'ins1_id', 'ins2_id', fake_copy, connection_info_src, connection_info_dest)
connection_info_src, connection_info_dest)
# asserts # asserts
db.share_update.assert_called_once_with( db.share_update.assert_called_once_with(
@ -287,10 +351,20 @@ class DataManagerTestCase(test.TestCase):
access = db_utils.create_access(share_id=self.share['id']) access = db_utils.create_access(share_id=self.share['id'])
connection_info_src = {'mount': 'mount_cmd_src', connection_info_src = {
'unmount': 'unmount_cmd_src'} 'mount': 'mount_cmd_src',
connection_info_dest = {'mount': 'mount_cmd_src', 'unmount': 'unmount_cmd_src',
'unmount': 'unmount_cmd_src'} 'share_id': self.share['id'],
'share_instance_id': 'ins1_id',
'mount_point': '/tmp/ins1_id',
}
connection_info_dest = {
'mount': 'mount_cmd_dest',
'unmount': 'unmount_cmd_dest',
'share_id': None,
'share_instance_id': 'ins2_id',
'mount_point': '/tmp/ins2_id',
}
fake_copy = mock.MagicMock(cancelled=False) fake_copy = mock.MagicMock(cancelled=False)
@ -304,7 +378,8 @@ class DataManagerTestCase(test.TestCase):
'allow_access_to_data_service', 'allow_access_to_data_service',
mock.Mock(return_value=[access])) mock.Mock(return_value=[access]))
self.mock_object(helper.DataServiceHelper, 'mount_share_instance', self.mock_object(helper.DataServiceHelper,
'mount_share_instance_or_backup',
mock.Mock(side_effect=Exception('fake'))) mock.Mock(side_effect=Exception('fake')))
self.mock_object(helper.DataServiceHelper, 'cleanup_data_access') self.mock_object(helper.DataServiceHelper, 'cleanup_data_access')
@ -313,36 +388,42 @@ class DataManagerTestCase(test.TestCase):
# run # run
self.assertRaises(exception.ShareDataCopyFailed, self.assertRaises(exception.ShareDataCopyFailed,
self.manager._copy_share_data, self.context, self.manager._copy_share_data, self.context,
fake_copy, self.share, 'ins1_id', 'ins2_id', fake_copy, connection_info_src, connection_info_dest)
connection_info_src, connection_info_dest)
# asserts # asserts
db.share_update.assert_called_once_with( db.share_update.assert_called_once_with(
self.context, self.share['id'], self.context, self.share['id'],
{'task_state': constants.TASK_STATE_DATA_COPYING_STARTING}) {'task_state': constants.TASK_STATE_DATA_COPYING_STARTING})
(helper.DataServiceHelper.allow_access_to_data_service. helper.DataServiceHelper.\
assert_called_once_with( mount_share_instance_or_backup.assert_called_once_with(
self.share['instance'], connection_info_src, connection_info_src, '/tmp/')
self.share['instance'], connection_info_dest))
helper.DataServiceHelper.mount_share_instance.assert_called_once_with(
connection_info_src['mount'], '/tmp/', self.share['instance'])
helper.DataServiceHelper.cleanup_temp_folder.assert_called_once_with( helper.DataServiceHelper.cleanup_temp_folder.assert_called_once_with(
'ins1_id', '/tmp/') '/tmp/', 'ins1_id')
helper.DataServiceHelper.cleanup_data_access.assert_has_calls([ helper.DataServiceHelper.cleanup_data_access.assert_has_calls([
mock.call([access], 'ins2_id'), mock.call([access], 'ins1_id')]) mock.call([access], self.share['instance']),
mock.call([access], self.share['instance'])])
def test__copy_share_data_exception_mount_2(self): def test__copy_share_data_exception_mount_2(self):
access = db_utils.create_access(share_id=self.share['id']) access = db_utils.create_access(share_id=self.share['id'])
connection_info_src = {'mount': 'mount_cmd_src', connection_info_src = {
'unmount': 'unmount_cmd_src'} 'mount': 'mount_cmd_src',
connection_info_dest = {'mount': 'mount_cmd_src', 'unmount': 'unmount_cmd_src',
'unmount': 'unmount_cmd_src'} 'share_id': self.share['id'],
'share_instance_id': 'ins1_id',
'mount_point': '/tmp/ins1_id',
}
connection_info_dest = {
'mount': 'mount_cmd_dest',
'unmount': 'unmount_cmd_dest',
'share_id': None,
'share_instance_id': 'ins2_id',
'mount_point': '/tmp/ins2_id',
}
fake_copy = mock.MagicMock(cancelled=False) fake_copy = mock.MagicMock(cancelled=False)
@ -356,7 +437,8 @@ class DataManagerTestCase(test.TestCase):
'allow_access_to_data_service', 'allow_access_to_data_service',
mock.Mock(return_value=[access])) mock.Mock(return_value=[access]))
self.mock_object(helper.DataServiceHelper, 'mount_share_instance', self.mock_object(helper.DataServiceHelper,
'mount_share_instance_or_backup',
mock.Mock(side_effect=[None, Exception('fake')])) mock.Mock(side_effect=[None, Exception('fake')]))
self.mock_object(helper.DataServiceHelper, 'cleanup_data_access') self.mock_object(helper.DataServiceHelper, 'cleanup_data_access')
@ -367,34 +449,23 @@ class DataManagerTestCase(test.TestCase):
# run # run
self.assertRaises(exception.ShareDataCopyFailed, self.assertRaises(exception.ShareDataCopyFailed,
self.manager._copy_share_data, self.context, self.manager._copy_share_data, self.context,
fake_copy, self.share, 'ins1_id', 'ins2_id', fake_copy, connection_info_src, connection_info_dest)
connection_info_src, connection_info_dest)
# asserts # asserts
db.share_update.assert_called_once_with( db.share_update.assert_called_once_with(
self.context, self.share['id'], self.context, self.share['id'],
{'task_state': constants.TASK_STATE_DATA_COPYING_STARTING}) {'task_state': constants.TASK_STATE_DATA_COPYING_STARTING})
(helper.DataServiceHelper.allow_access_to_data_service. helper.DataServiceHelper.\
assert_called_once_with( mount_share_instance_or_backup.assert_has_calls([
self.share['instance'], connection_info_src, mock.call(connection_info_src, '/tmp/'),
self.share['instance'], connection_info_dest)) mock.call(connection_info_dest, '/tmp/')])
helper.DataServiceHelper.mount_share_instance.assert_has_calls([ helper.DataServiceHelper.cleanup_unmount_temp_folder.\
mock.call(connection_info_src['mount'], '/tmp/', assert_called_once_with(connection_info_src, '/tmp/')
self.share['instance']),
mock.call(connection_info_dest['mount'], '/tmp/',
self.share['instance'])])
(helper.DataServiceHelper.cleanup_unmount_temp_folder.
assert_called_once_with(
connection_info_src['unmount'], '/tmp/', 'ins1_id'))
helper.DataServiceHelper.cleanup_temp_folder.assert_has_calls([ helper.DataServiceHelper.cleanup_temp_folder.assert_has_calls([
mock.call('ins2_id', '/tmp/'), mock.call('ins1_id', '/tmp/')]) mock.call('/tmp/', 'ins2_id'), mock.call('/tmp/', 'ins1_id')])
helper.DataServiceHelper.cleanup_data_access.assert_has_calls([
mock.call([access], 'ins2_id'), mock.call([access], 'ins1_id')])
def test_data_copy_cancel(self): def test_data_copy_cancel(self):
@ -442,3 +513,286 @@ class DataManagerTestCase(test.TestCase):
self.assertRaises(exception.InvalidShare, self.assertRaises(exception.InvalidShare,
self.manager.data_copy_get_progress, self.context, self.manager.data_copy_get_progress, self.context,
'fake_id') 'fake_id')
def test_create_share_backup(self):
share_info = db_utils.create_share(
status=constants.STATUS_BACKUP_CREATING)
backup_info = db_utils.create_backup(
share_info['id'], status=constants.STATUS_CREATING)
# mocks
self.mock_object(db, 'share_update')
self.mock_object(db, 'share_get', mock.Mock(return_value=share_info))
self.mock_object(db, 'share_backup_get',
mock.Mock(return_value=backup_info))
self.mock_object(self.manager, '_run_backup',
mock.Mock(side_effect=None))
self.manager.create_backup(self.context, backup_info)
def test_create_share_backup_exception(self):
share_info = db_utils.create_share(status=constants.STATUS_AVAILABLE)
backup_info = db_utils.create_backup(
share_info['id'], status=constants.STATUS_AVAILABLE, size=2)
# mocks
self.mock_object(db, 'share_update')
self.mock_object(db, 'share_backup_update')
self.mock_object(db, 'share_get', mock.Mock(return_value=share_info))
self.mock_object(db, 'share_backup_get',
mock.Mock(return_value=backup_info))
self.mock_object(
self.manager, '_run_backup',
mock.Mock(
side_effect=exception.ShareDataCopyFailed(reason='fake')))
self.assertRaises(exception.ManilaException,
self.manager.create_backup,
self.context, backup_info)
db.share_update.assert_called_with(
self.context, share_info['id'],
{'status': constants.STATUS_AVAILABLE})
db.share_backup_update.assert_called_once()
@ddt.data('90', '100')
def test_create_share_backup_continue(self, progress):
share_info = db_utils.create_share(
status=constants.STATUS_BACKUP_CREATING)
backup_info = db_utils.create_backup(
share_info['id'], status=constants.STATUS_CREATING,
topic=CONF.data_topic)
# mocks
self.mock_object(db, 'share_update')
self.mock_object(db, 'share_backup_update')
self.mock_object(db, 'share_backups_get_all',
mock.Mock(return_value=[backup_info]))
self.mock_object(self.manager, 'data_copy_get_progress',
mock.Mock(return_value={'total_progress': progress}))
self.manager.create_backup_continue(self.context)
if progress == '100':
db.share_backup_update.assert_called_with(
self.context, backup_info['id'],
{'status': constants.STATUS_AVAILABLE})
db.share_update.assert_called_with(
self.context, share_info['id'],
{'status': constants.STATUS_AVAILABLE})
else:
db.share_backup_update.assert_called_with(
self.context, backup_info['id'],
{'progress': progress})
def test_create_share_backup_continue_exception(self):
share_info = db_utils.create_share(
status=constants.STATUS_BACKUP_CREATING)
backup_info = db_utils.create_backup(
share_info['id'], status=constants.STATUS_CREATING,
topic=CONF.data_topic)
# mocks
self.mock_object(db, 'share_update')
self.mock_object(db, 'share_backup_update')
self.mock_object(db, 'share_backups_get_all',
mock.Mock(return_value=[backup_info]))
self.mock_object(self.manager, 'data_copy_get_progress',
mock.Mock(side_effect=exception.ManilaException))
self.manager.create_backup_continue(self.context)
db.share_backup_update.assert_called_with(
self.context, backup_info['id'],
{'status': constants.STATUS_ERROR, 'progress': '0'})
db.share_update.assert_called_with(
self.context, share_info['id'],
{'status': constants.STATUS_AVAILABLE})
@ddt.data(None, exception.ShareDataCopyFailed())
def test__run_backup(self, exc):
share_info = db_utils.create_share(status=constants.STATUS_AVAILABLE)
backup_info = db_utils.create_backup(
share_info['id'], status=constants.STATUS_AVAILABLE, size=2)
share_instance = {
'export_locations': [{
'path': 'test_path',
"is_admin_only": False
}, ],
'share_proto': 'nfs',
}
# mocks
self.mock_object(db, 'share_instance_get',
mock.Mock(return_value=share_instance))
self.mock_object(data_utils, 'Copy',
mock.Mock(return_value='fake_copy'))
self.manager.busy_tasks_shares[self.share['id']] = 'fake_copy'
self.mock_object(self.manager, '_copy_share_data',
mock.Mock(side_effect=exc))
self.mock_object(self.manager, '_run_backup')
if exc is isinstance(exc, exception.ShareDataCopyFailed):
self.assertRaises(exception.ShareDataCopyFailed,
self.manager._run_backup, self.context,
backup_info, share_info)
else:
self.manager._run_backup(self.context, backup_info, share_info)
def test_delete_share_backup(self):
share_info = db_utils.create_share(status=constants.STATUS_AVAILABLE)
backup_info = db_utils.create_backup(
share_info['id'], status=constants.STATUS_AVAILABLE, size=2)
# mocks
self.mock_object(db, 'share_backup_delete')
self.mock_object(db, 'share_backup_get',
mock.Mock(return_value=backup_info))
self.mock_object(utils, 'execute')
reservation = 'fake'
self.mock_object(quota.QUOTAS, 'reserve',
mock.Mock(return_value=reservation))
self.mock_object(quota.QUOTAS, 'commit')
self.manager.delete_backup(self.context, backup_info)
db.share_backup_delete.assert_called_with(
self.context, backup_info['id'])
def test_delete_share_backup_exception(self):
share_info = db_utils.create_share(status=constants.STATUS_AVAILABLE)
backup_info = db_utils.create_backup(
share_info['id'], status=constants.STATUS_AVAILABLE, size=2)
# mocks
self.mock_object(db, 'share_backup_get',
mock.Mock(return_value=backup_info))
self.mock_object(utils, 'execute')
self.mock_object(
quota.QUOTAS, 'reserve',
mock.Mock(side_effect=exception.ManilaException))
self.assertRaises(exception.ManilaException,
self.manager.delete_backup, self.context,
backup_info)
def test_restore_share_backup(self):
share_info = db_utils.create_share(status=constants.STATUS_AVAILABLE)
backup_info = db_utils.create_backup(
share_info['id'], status=constants.STATUS_AVAILABLE, size=2)
share_id = share_info['id']
# mocks
self.mock_object(db, 'share_update')
self.mock_object(db, 'share_get', mock.Mock(return_value=share_info))
self.mock_object(db, 'share_backup_get',
mock.Mock(return_value=backup_info))
self.mock_object(self.manager, '_run_restore')
self.manager.restore_backup(self.context, backup_info, share_id)
def test_restore_share_backup_exception(self):
share_info = db_utils.create_share(status=constants.STATUS_AVAILABLE)
backup_info = db_utils.create_backup(
share_info['id'], status=constants.STATUS_AVAILABLE, size=2)
share_id = share_info['id']
# mocks
self.mock_object(db, 'share_update')
self.mock_object(db, 'share_get', mock.Mock(return_value=share_info))
self.mock_object(db, 'share_backup_get',
mock.Mock(return_value=backup_info))
self.mock_object(
self.manager, '_run_restore',
mock.Mock(
side_effect=exception.ShareDataCopyFailed(reason='fake')))
self.assertRaises(exception.ManilaException,
self.manager.restore_backup, self.context,
backup_info, share_id)
db.share_update.assert_called_with(
self.context, share_info['id'],
{'status': constants.STATUS_BACKUP_RESTORING_ERROR})
@ddt.data('90', '100')
def test_restore_share_backup_continue(self, progress):
share_info = db_utils.create_share(
status=constants.STATUS_BACKUP_RESTORING)
backup_info = db_utils.create_backup(
share_info['id'], status=constants.STATUS_RESTORING,
topic=CONF.data_topic)
share_info['source_backup_id'] = backup_info['id']
# mocks
self.mock_object(db, 'share_update')
self.mock_object(db, 'share_backup_update')
self.mock_object(db, 'share_get_all',
mock.Mock(return_value=[share_info]))
self.mock_object(db, 'share_backups_get_all',
mock.Mock(return_value=[backup_info]))
self.mock_object(self.manager, 'data_copy_get_progress',
mock.Mock(return_value={'total_progress': progress}))
self.manager.restore_backup_continue(self.context)
if progress == '100':
db.share_backup_update.assert_called_with(
self.context, backup_info['id'],
{'status': constants.STATUS_AVAILABLE})
db.share_update.assert_called_with(
self.context, share_info['id'],
{'status': constants.STATUS_AVAILABLE})
else:
db.share_backup_update.assert_called_with(
self.context, backup_info['id'],
{'restore_progress': progress})
def test_restore_share_backup_continue_exception(self):
share_info = db_utils.create_share(
status=constants.STATUS_BACKUP_RESTORING)
backup_info = db_utils.create_backup(
share_info['id'], status=constants.STATUS_RESTORING,
topic=CONF.data_topic)
share_info['source_backup_id'] = backup_info['id']
# mocks
self.mock_object(db, 'share_update')
self.mock_object(db, 'share_backup_update')
self.mock_object(db, 'share_get_all',
mock.Mock(return_value=[share_info]))
self.mock_object(db, 'share_backups_get_all',
mock.Mock(return_value=[backup_info]))
self.mock_object(self.manager, 'data_copy_get_progress',
mock.Mock(side_effect=exception.ManilaException))
self.manager.restore_backup_continue(self.context)
db.share_backup_update.assert_called_with(
self.context, backup_info['id'],
{'status': constants.STATUS_AVAILABLE, 'restore_progress': '0'})
db.share_update.assert_called_with(
self.context, share_info['id'],
{'status': constants.STATUS_BACKUP_RESTORING_ERROR})
@ddt.data(None, exception.ShareDataCopyFailed())
def test__run_restore(self, exc):
share_info = db_utils.create_share(status=constants.STATUS_AVAILABLE)
backup_info = db_utils.create_backup(
share_info['id'], status=constants.STATUS_AVAILABLE, size=2)
share_instance = {
'export_locations': [{
'path': 'test_path',
"is_admin_only": False
}, ],
'share_proto': 'nfs',
}
# mocks
self.mock_object(db, 'share_instance_get',
mock.Mock(return_value=share_instance))
self.mock_object(data_utils, 'Copy',
mock.Mock(return_value='fake_copy'))
self.manager.busy_tasks_shares[self.share['id']] = 'fake_copy'
self.mock_object(self.manager, '_copy_share_data',
mock.Mock(side_effect=exc))
self.mock_object(self.manager, '_run_restore')
if exc is isinstance(exc, exception.ShareDataCopyFailed):
self.assertRaises(exception.ShareDataCopyFailed,
self.manager._run_restore, self.context,
backup_info, share_info)
else:
self.manager._run_restore(self.context, backup_info, share_info)

View File

@ -40,6 +40,8 @@ class DataRpcAPITestCase(test.TestCase):
status=constants.STATUS_AVAILABLE status=constants.STATUS_AVAILABLE
) )
self.fake_share = jsonutils.to_primitive(share) self.fake_share = jsonutils.to_primitive(share)
self.backup = db_utils.create_backup(
share_id=self.fake_share['id'], status=constants.STATUS_AVAILABLE)
def tearDown(self): def tearDown(self):
super(DataRpcAPITestCase, self).tearDown() super(DataRpcAPITestCase, self).tearDown()
@ -102,3 +104,22 @@ class DataRpcAPITestCase(test.TestCase):
rpc_method='call', rpc_method='call',
version='1.0', version='1.0',
share_id=self.fake_share['id']) share_id=self.fake_share['id'])
def test_create_backup(self):
self._test_data_api('create_backup',
rpc_method='cast',
version='1.1',
backup=self.backup)
def test_delete_backup(self):
self._test_data_api('delete_backup',
rpc_method='cast',
version='1.1',
backup=self.backup)
def test_restore_backup(self):
self._test_data_api('restore_backup',
rpc_method='cast',
version='1.1',
backup=self.backup,
share_id=self.fake_share['id'])

View File

@ -5358,3 +5358,83 @@ class TransfersTestCase(test.TestCase):
self.assertEqual(share['project_id'], self.project_id) self.assertEqual(share['project_id'], self.project_id)
self.assertEqual(share['user_id'], self.user_id) self.assertEqual(share['user_id'], self.user_id)
self.assertFalse(transfer['accepted']) self.assertFalse(transfer['accepted'])
class ShareBackupDatabaseAPITestCase(BaseDatabaseAPITestCase):
def setUp(self):
"""Run before each test."""
super(ShareBackupDatabaseAPITestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.backup = {
'id': 'fake_backup_id',
'host': "fake_host",
'user_id': 'fake',
'project_id': 'fake',
'availability_zone': 'fake_availability_zone',
'status': constants.STATUS_CREATING,
'progress': '0',
'display_name': 'fake_name',
'display_description': 'fake_description',
'size': 1,
}
self.share_id = "fake_share_id"
def test_create_share_backup(self):
result = db_api.share_backup_create(
self.ctxt, self.share_id, self.backup)
self._check_fields(expected=self.backup, actual=result)
def test_create_with_duplicated_id(self):
db_api.share_backup_create(
self.ctxt, self.share_id, self.backup)
self.assertRaises(db_exception.DBDuplicateEntry,
db_api.share_backup_create,
self.ctxt,
self.share_id,
self.backup)
def test_get(self):
db_api.share_backup_create(
self.ctxt, self.share_id, self.backup)
result = db_api.share_backup_get(
self.ctxt, self.backup['id'])
self._check_fields(expected=self.backup, actual=result)
def test_delete(self):
db_api.share_backup_create(
self.ctxt, self.share_id, self.backup)
db_api.share_backup_delete(self.ctxt,
self.backup['id'])
self.assertRaises(exception.ShareBackupNotFound,
db_api.share_backup_get,
self.ctxt,
self.backup['id'])
def test_delete_not_found(self):
self.assertRaises(exception.ShareBackupNotFound,
db_api.share_backup_delete,
self.ctxt,
'fake not exist id')
def test_update(self):
new_status = constants.STATUS_ERROR
db_api.share_backup_create(
self.ctxt, self.share_id, self.backup)
result_update = db_api.share_backup_update(
self.ctxt, self.backup['id'],
{'status': constants.STATUS_ERROR})
result_get = db_api.share_backup_get(self.ctxt,
self.backup['id'])
self.assertEqual(new_status, result_update['status'])
self._check_fields(expected=dict(result_update.items()),
actual=dict(result_get.items()))
def test_update_not_found(self):
self.assertRaises(exception.ShareBackupNotFound,
db_api.share_backup_update,
self.ctxt,
'fake id',
{})

View File

@ -307,3 +307,22 @@ def create_transfer(**kwargs):
'crypt_hash': 'crypt_hash', 'crypt_hash': 'crypt_hash',
'resource_type': constants.SHARE_RESOURCE_TYPE} 'resource_type': constants.SHARE_RESOURCE_TYPE}
return _create_db_row(db.transfer_create, transfer, kwargs) return _create_db_row(db.transfer_create, transfer, kwargs)
def create_backup(share_id, **kwargs):
"""Create a share backup object."""
backup = {
'host': "fake_host",
'share_network_id': None,
'share_server_id': None,
'user_id': 'fake',
'project_id': 'fake',
'availability_zone': 'fake_availability_zone',
'status': constants.STATUS_CREATING,
'topic': 'fake_topic',
'description': 'fake_description',
'size': '1',
}
backup.update(kwargs)
return db.share_backup_create(
context.get_admin_context(), share_id, backup)

View File

@ -0,0 +1,43 @@
# Copyright 2023 Cloudification GmbH.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from manila.data import backup_driver
class FakeBackupDriver(backup_driver.BackupDriver):
"""Fake Backup driver."""
def __init__(self, *args, **kwargs):
super(FakeBackupDriver, self).__init__(*args, **kwargs)
pass
def backup(self, backup, share):
"""Start a backup of a specified share."""
pass
def restore(self, backup, share):
"""Restore a saved backup."""
pass
def delete(self, backup):
"""Delete a saved backup."""
pass
def get_backup_info(self, backup):
"""Get backup capabilities information of driver."""
backup_info = {
'mount': 'mount -vt fake_proto /fake-export %(path)s',
'unmount': 'umount -v %(path)s',
}
return backup_info

View File

@ -319,3 +319,27 @@ def fake_share_server_get():
} }
} }
return fake_share_server return fake_share_server
def fake_backup(as_primitive=True, **kwargs):
backup = {
'id': uuidutils.generate_uuid(),
'host': "fake_host",
'user_id': 'fake',
'project_id': 'fake',
'availability_zone': 'fake_availability_zone',
'backup_state': constants.STATUS_CREATING,
'status': constants.STATUS_CREATING,
'progress': '0',
'restore_progress': '0',
'topic': 'fake_topic',
'share_id': uuidutils.generate_uuid(),
'display_name': 'fake_name',
'display_description': 'fake_description',
'size': '1',
}
backup.update(kwargs)
if as_primitive:
return backup
else:
return db_fakes.FakeModel(backup)

View File

@ -85,6 +85,10 @@ dummy_opts = [
"migration_cancel": 1.04, "migration_cancel": 1.04,
"migration_get_progress": 1.05, "migration_get_progress": 1.05,
"migration_check_compatibility": 0.05, "migration_check_compatibility": 0.05,
"create_backup": "1.50",
"restore_backup": "1.50",
}, },
), ),
] ]
@ -1010,3 +1014,34 @@ class DummyDriver(driver.ShareDriver):
new_server["backend_details"]["subnet_allocations"]) new_server["backend_details"]["subnet_allocations"])
}, },
} }
@slow_me_down
def create_backup(self, context, share_instance, backup):
LOG.debug("Created backup %(backup)s of share %(share)s "
"using dummy driver.",
{'backup': backup['id'],
'share': share_instance['share_id']})
def create_backup_continue(self, context, share_instance, backup):
LOG.debug("Continue backup %(backup)s of share %(share)s "
"using dummy driver.",
{'backup': backup['id'],
'share': share_instance['share_id']})
return {'total_progress': '100'}
def delete_backup(self, context, backup):
LOG.debug("Deleted backup '%s' using dummy driver.", backup['id'])
@slow_me_down
def restore_backup(self, context, backup, share_instance):
LOG.debug("Restored backup %(backup)s into share %(share)s "
"using dummy driver.",
{'backup': backup['id'],
'share': share_instance['share_id']})
def restore_backup_continue(self, context, share_instance, backup):
LOG.debug("Continue restore of backup %(backup)s into share "
"%(share)s using dummy driver.",
{'backup': backup['id'],
'share': share_instance['share_id']})
return {'total_progress': '100'}

View File

@ -224,6 +224,8 @@ class ShareAPITestCase(test.TestCase):
self.mock_object(db_api, 'share_server_update') self.mock_object(db_api, 'share_server_update')
self.mock_object(db_api, 'share_snapshot_get_all_for_share', self.mock_object(db_api, 'share_snapshot_get_all_for_share',
mock.Mock(return_value=snapshots)) mock.Mock(return_value=snapshots))
self.mock_object(db_api, 'share_backups_get_all',
mock.Mock(return_value=[]))
self.mock_object(self.api, 'delete_instance') self.mock_object(self.api, 'delete_instance')
return share return share
@ -6622,6 +6624,8 @@ class ShareAPITestCase(test.TestCase):
status=constants.STATUS_AVAILABLE, status=constants.STATUS_AVAILABLE,
has_replicas=False, has_replicas=False,
is_soft_deleted=False) is_soft_deleted=False)
self.mock_object(db_api, 'share_backups_get_all',
mock.Mock(return_value=[]))
self.assertRaises(exception.InvalidShare, self.assertRaises(exception.InvalidShare,
self.api.soft_delete, self.context, share) self.api.soft_delete, self.context, share)
@ -6633,6 +6637,8 @@ class ShareAPITestCase(test.TestCase):
is_soft_deleted=False) is_soft_deleted=False)
self.mock_object(db_api, 'share_snapshot_get_all_for_share', self.mock_object(db_api, 'share_snapshot_get_all_for_share',
mock.Mock(return_value=[])) mock.Mock(return_value=[]))
self.mock_object(db_api, 'share_backups_get_all',
mock.Mock(return_value=[]))
self.mock_object(db_api, 'count_share_group_snapshot_members_in_share', self.mock_object(db_api, 'count_share_group_snapshot_members_in_share',
mock.Mock(return_value=0)) mock.Mock(return_value=0))
self.mock_object(db_api, 'share_soft_delete') self.mock_object(db_api, 'share_soft_delete')
@ -7050,6 +7056,213 @@ class ShareAPITestCase(test.TestCase):
share_network, share_network,
new_share_network_subnet) new_share_network_subnet)
@ddt.data(None, {'driver': test})
def test_create_share_backup(self, backup_opts):
share = db_utils.create_share(is_public=True, status='available')
backup_ref = db_utils.create_backup(share['id'], status='available')
reservation = 'fake'
self.mock_object(quota.QUOTAS, 'reserve',
mock.Mock(return_value=reservation))
self.mock_object(quota.QUOTAS, 'commit')
self.mock_object(db_api, 'share_backup_create',
mock.Mock(return_value=backup_ref))
self.mock_object(db_api, 'share_backup_update', mock.Mock())
self.mock_object(data_rpc.DataAPI, 'create_backup', mock.Mock())
self.mock_object(self.share_rpcapi, 'create_backup', mock.Mock())
backup = {'display_name': 'tmp_backup', 'backup_options': backup_opts}
self.api.create_share_backup(self.context, share, backup)
quota.QUOTAS.reserve.assert_called_once()
db_api.share_backup_create.assert_called_once()
quota.QUOTAS.commit.assert_called_once()
db_api.share_backup_update.assert_called_once()
if backup_opts:
self.share_rpcapi.create_backup.assert_called_once_with(
self.context, backup_ref)
else:
data_rpc.DataAPI.create_backup.assert_called_once_with(
self.context, backup_ref)
def test_create_share_backup_share_error_state(self):
share = db_utils.create_share(is_public=True, status='error')
backup = {'display_name': 'tmp_backup'}
self.assertRaises(exception.InvalidShare,
self.api.create_share_backup,
self.context, share, backup)
def test_create_share_backup_share_busy_task_state(self):
share = db_utils.create_share(
is_public=True, task_state='data_copying_in_progress')
backup = {'display_name': 'tmp_backup'}
self.assertRaises(exception.ShareBusyException,
self.api.create_share_backup,
self.context, share, backup)
def test_create_share_backup_share_has_snapshots(self):
share = db_utils.create_share(
is_public=True, state='available')
snapshot = db_utils.create_snapshot(
share_id=share['id'], status='available', size=1)
backup = {'display_name': 'tmp_backup'}
self.mock_object(db_api, 'share_snapshot_get_all_for_share',
mock.Mock(return_value=[snapshot]))
self.assertRaises(exception.InvalidShare,
self.api.create_share_backup,
self.context, share, backup)
def test_create_share_backup_share_has_replicas(self):
share = fakes.fake_share(id='fake_id',
has_replicas=True,
status=constants.STATUS_AVAILABLE,
is_soft_deleted=False)
backup = {'display_name': 'tmp_backup'}
self.assertRaises(exception.InvalidShare,
self.api.create_share_backup,
self.context, share, backup)
@ddt.data({'overs': {'backup_gigabytes': 'fake'},
'expected_exception':
exception.ShareBackupSizeExceedsAvailableQuota},
{'overs': {'backups': 'fake'},
'expected_exception': exception.BackupLimitExceeded},)
@ddt.unpack
def test_create_share_backup_over_quota(self, overs, expected_exception):
share = fakes.fake_share(id='fake_id',
status=constants.STATUS_AVAILABLE,
is_soft_deleted=False, size=5)
backup = {'display_name': 'tmp_backup'}
usages = {'backup_gigabytes': {'reserved': 5, 'in_use': 5},
'backups': {'reserved': 5, 'in_use': 5}}
quotas = {'backup_gigabytes': 5, 'backups': 5}
exc = exception.OverQuota(overs=overs, usages=usages, quotas=quotas)
self.mock_object(quota.QUOTAS, 'reserve', mock.Mock(side_effect=exc))
self.assertRaises(expected_exception, self.api.create_share_backup,
self.context, share, backup)
quota.QUOTAS.reserve.assert_called_once_with(
self.context, backups=1, backup_gigabytes=share['size'])
def test_create_share_backup_rollback_quota(self):
share = db_utils.create_share(is_public=True, status='available')
reservation = 'fake'
self.mock_object(quota.QUOTAS, 'reserve',
mock.Mock(return_value=reservation))
self.mock_object(quota.QUOTAS, 'rollback')
self.mock_object(db_api, 'share_backup_create',
mock.Mock(side_effect=exception.ManilaException))
self.mock_object(data_rpc.DataAPI, 'create_backup', mock.Mock())
self.mock_object(self.share_rpcapi, 'create_backup', mock.Mock())
backup = {'display_name': 'tmp_backup'}
self.assertRaises(exception.ManilaException,
self.api.create_share_backup,
self.context, share, backup)
quota.QUOTAS.reserve.assert_called_once()
db_api.share_backup_create.assert_called_once()
quota.QUOTAS.rollback.assert_called_once_with(
self.context, reservation)
@ddt.data(CONF.share_topic, CONF.data_topic)
def test_delete_share_backup(self, topic):
share = db_utils.create_share(is_public=True, status='available')
backup = db_utils.create_backup(share['id'], status='available')
self.mock_object(db_api, 'share_backup_update', mock.Mock())
self.mock_object(data_rpc.DataAPI, 'delete_backup', mock.Mock())
self.mock_object(self.share_rpcapi, 'delete_backup', mock.Mock())
backup.update({'topic': topic})
self.api.delete_share_backup(self.context, backup)
db_api.share_backup_update.assert_called_once()
if topic == CONF.share_topic:
self.share_rpcapi.delete_backup.assert_called_once_with(
self.context, backup)
else:
data_rpc.DataAPI.delete_backup.assert_called_once_with(
self.context, backup)
@ddt.data(constants.STATUS_DELETING, constants.STATUS_CREATING)
def test_delete_share_backup_invalid_state(self, state):
share = db_utils.create_share(is_public=True, status='available')
backup = db_utils.create_backup(share['id'], status=state)
self.assertRaises(exception.InvalidBackup,
self.api.delete_share_backup,
self.context, backup)
@ddt.data(CONF.share_topic, CONF.data_topic)
def test_restore_share_backup(self, topic):
share = db_utils.create_share(
is_public=True, status='available', size=1)
backup = db_utils.create_backup(
share['id'], status='available', size=1)
self.mock_object(self.api, 'get', mock.Mock(return_value=share))
self.mock_object(db_api, 'share_backup_update', mock.Mock())
self.mock_object(db_api, 'share_update', mock.Mock())
self.mock_object(data_rpc.DataAPI, 'restore_backup', mock.Mock())
self.mock_object(self.share_rpcapi, 'restore_backup', mock.Mock())
backup.update({'topic': topic})
self.api.restore_share_backup(self.context, backup)
self.api.get.assert_called_once()
db_api.share_update.assert_called_once()
db_api.share_backup_update.assert_called_once()
if topic == CONF.share_topic:
self.share_rpcapi.restore_backup.assert_called_once_with(
self.context, backup, share['id'])
else:
data_rpc.DataAPI.restore_backup.assert_called_once_with(
self.context, backup, share['id'])
def test_restore_share_backup_invalid_share_sizee(self):
share = db_utils.create_share(
is_public=True, status='available', size=1)
backup = db_utils.create_backup(
share['id'], status='available', size=2)
self.assertRaises(exception.InvalidShare,
self.api.restore_share_backup,
self.context, backup)
def test_restore_share_backup_invalid_share_state(self):
share = db_utils.create_share(is_public=True, status='deleting')
backup = db_utils.create_backup(share['id'], status='available')
self.assertRaises(exception.InvalidShare,
self.api.restore_share_backup,
self.context, backup)
def test_restore_share_backup_invalid_backup_state(self):
share = db_utils.create_share(is_public=True, status='available')
backup = db_utils.create_backup(share['id'], status='deleting')
self.assertRaises(exception.InvalidBackup,
self.api.restore_share_backup,
self.context, backup)
def test_update_share_backup(self):
share = db_utils.create_share(is_public=True, status='available')
backup = db_utils.create_backup(share['id'], status='available')
self.mock_object(db_api, 'share_backup_update', mock.Mock())
self.api.update_share_backup(self.context, backup,
{'display_name': 'new_name'})
db_api.share_backup_update.assert_called_once()
class OtherTenantsShareActionsTestCase(test.TestCase): class OtherTenantsShareActionsTestCase(test.TestCase):
def setUp(self): def setUp(self):

View File

@ -714,7 +714,8 @@ class QuotaEngineTestCase(test.TestCase):
def test_current_common_resources(self): def test_current_common_resources(self):
self.assertEqual( self.assertEqual(
['gigabytes', 'per_share_gigabytes', 'replica_gigabytes', sorted(['gigabytes', 'per_share_gigabytes', 'replica_gigabytes',
'share_group_snapshots', 'share_groups', 'share_networks', 'share_group_snapshots', 'share_groups', 'share_networks',
'share_replicas', 'shares', 'snapshot_gigabytes', 'snapshots'], 'share_replicas', 'shares', 'snapshot_gigabytes',
'snapshots', 'backups', 'backup_gigabytes']),
quota.QUOTAS.resources) quota.QUOTAS.resources)

View File

@ -0,0 +1,6 @@
---
features:
- Added support to share-backup feature. From this release, a backup of a share
can be can be created, deleted, listed, queried for detail, updated its
name/description and also restored to original share. Sample NFS backup-driver
is added.