Merge "Fix backup metadata import missing fields"
This commit is contained in:
commit
3f00403264
|
@ -19,7 +19,6 @@
|
|||
Handles all requests relating to the volume backups service.
|
||||
"""
|
||||
|
||||
|
||||
from eventlet import greenthread
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
|
@ -370,6 +369,68 @@ class API(base.Base):
|
|||
|
||||
return export_data
|
||||
|
||||
def _get_import_backup(self, context, backup_url):
|
||||
"""Prepare database backup record for import.
|
||||
|
||||
This method decodes provided backup_url and expects to find the id of
|
||||
the backup in there.
|
||||
|
||||
Then checks the DB for the presence of this backup record and if it
|
||||
finds it and is not deleted it will raise an exception because the
|
||||
record cannot be created or used.
|
||||
|
||||
If the record is in deleted status then we must be trying to recover
|
||||
this record, so we'll reuse it.
|
||||
|
||||
If the record doesn't already exist we create it with provided id.
|
||||
|
||||
:param context: running context
|
||||
:param backup_url: backup description to be used by the backup driver
|
||||
:return: BackupImport object
|
||||
:raises: InvalidBackup
|
||||
:raises: InvalidInput
|
||||
"""
|
||||
# Deserialize string backup record into a dictionary
|
||||
backup_record = objects.Backup.decode_record(backup_url)
|
||||
|
||||
# ID is a required field since it's what links incremental backups
|
||||
if 'id' not in backup_record:
|
||||
msg = _('Provided backup record is missing an id')
|
||||
raise exception.InvalidInput(reason=msg)
|
||||
|
||||
kwargs = {
|
||||
'user_id': context.user_id,
|
||||
'project_id': context.project_id,
|
||||
'volume_id': '0000-0000-0000-0000',
|
||||
'status': 'creating',
|
||||
}
|
||||
|
||||
try:
|
||||
# Try to get the backup with that ID in all projects even among
|
||||
# deleted entries.
|
||||
backup = objects.BackupImport.get_by_id(context,
|
||||
backup_record['id'],
|
||||
read_deleted='yes',
|
||||
project_only=False)
|
||||
|
||||
# If record exists and it's not deleted we cannot proceed with the
|
||||
# import
|
||||
if backup.status != 'deleted':
|
||||
msg = _('Backup already exists in database.')
|
||||
raise exception.InvalidBackup(reason=msg)
|
||||
|
||||
# Otherwise we'll "revive" delete backup record
|
||||
backup.update(kwargs)
|
||||
backup.save()
|
||||
|
||||
except exception.BackupNotFound:
|
||||
# If record doesn't exist create it with the specific ID
|
||||
backup = objects.BackupImport(context=context,
|
||||
id=backup_record['id'], **kwargs)
|
||||
backup.create()
|
||||
|
||||
return backup
|
||||
|
||||
def import_record(self, context, backup_service, backup_url):
|
||||
"""Make the RPC call to import a volume backup.
|
||||
|
||||
|
@ -378,6 +439,7 @@ class API(base.Base):
|
|||
:param backup_url: backup description to be used by the backup driver
|
||||
:raises: InvalidBackup
|
||||
:raises: ServiceNotFound
|
||||
:raises: InvalidInput
|
||||
"""
|
||||
check_policy(context, 'backup-import')
|
||||
|
||||
|
@ -391,14 +453,9 @@ class API(base.Base):
|
|||
if len(hosts) == 0:
|
||||
raise exception.ServiceNotFound(service_id=backup_service)
|
||||
|
||||
kwargs = {
|
||||
'user_id': context.user_id,
|
||||
'project_id': context.project_id,
|
||||
'volume_id': '0000-0000-0000-0000',
|
||||
'status': 'creating',
|
||||
}
|
||||
backup = objects.Backup(context=context, **kwargs)
|
||||
backup.create()
|
||||
# Get Backup object that will be used to import this backup record
|
||||
backup = self._get_import_backup(context, backup_url)
|
||||
|
||||
first_host = hosts.pop()
|
||||
self.backup_rpcapi.import_record(context,
|
||||
first_host,
|
||||
|
|
|
@ -661,28 +661,48 @@ class BackupManager(manager.SchedulerDependentManager):
|
|||
self._update_backup_error(backup, context, msg)
|
||||
raise exception.InvalidBackup(reason=msg)
|
||||
|
||||
required_import_options = ['display_name',
|
||||
'display_description',
|
||||
'container',
|
||||
'size',
|
||||
'service_metadata',
|
||||
'service',
|
||||
'object_count']
|
||||
required_import_options = {
|
||||
'display_name',
|
||||
'display_description',
|
||||
'container',
|
||||
'size',
|
||||
'service_metadata',
|
||||
'service',
|
||||
'object_count',
|
||||
'id'
|
||||
}
|
||||
|
||||
# Check for missing fields in imported data
|
||||
missing_opts = required_import_options - set(backup_options)
|
||||
if missing_opts:
|
||||
msg = (_('Driver successfully decoded imported backup data, '
|
||||
'but there are missing fields (%s).') %
|
||||
', '.join(missing_opts))
|
||||
self._update_backup_error(backup, context, msg)
|
||||
raise exception.InvalidBackup(reason=msg)
|
||||
|
||||
# Confirm the ID from the record in the DB is the right one
|
||||
backup_id = backup_options['id']
|
||||
if backup_id != backup.id:
|
||||
msg = (_('Trying to import backup metadata from id %(meta_id)s'
|
||||
' into backup %(id)s.') %
|
||||
{'meta_id': backup_id, 'id': backup.id})
|
||||
self._update_backup_error(backup, context, msg)
|
||||
raise exception.InvalidBackup(reason=msg)
|
||||
|
||||
# Overwrite some fields
|
||||
backup_options['status'] = 'available'
|
||||
backup_options['service'] = self.driver_name
|
||||
backup_options['availability_zone'] = self.az
|
||||
backup_options['host'] = self.host
|
||||
|
||||
# Remove some values which are not actual fields and some that
|
||||
# were set by the API node
|
||||
for key in ('name', 'user_id', 'project_id'):
|
||||
backup_options.pop(key, None)
|
||||
|
||||
backup_update = {}
|
||||
backup_update['status'] = 'available'
|
||||
backup_update['service'] = self.driver_name
|
||||
backup_update['availability_zone'] = self.az
|
||||
backup_update['host'] = self.host
|
||||
for entry in required_import_options:
|
||||
if entry not in backup_options:
|
||||
msg = (_('Backup metadata received from driver for '
|
||||
'import is missing %s.'), entry)
|
||||
self._update_backup_error(backup, context, msg)
|
||||
raise exception.InvalidBackup(reason=msg)
|
||||
backup_update[entry] = backup_options[entry]
|
||||
# Update the database
|
||||
backup.update(backup_update)
|
||||
backup.update(backup_options)
|
||||
backup.save()
|
||||
|
||||
# Verify backup
|
||||
|
|
|
@ -848,9 +848,9 @@ def reservation_expire(context):
|
|||
###################
|
||||
|
||||
|
||||
def backup_get(context, backup_id):
|
||||
def backup_get(context, backup_id, read_deleted=None, project_only=True):
|
||||
"""Get a backup or raise if it does not exist."""
|
||||
return IMPL.backup_get(context, backup_id)
|
||||
return IMPL.backup_get(context, backup_id, read_deleted, project_only)
|
||||
|
||||
|
||||
def backup_get_all(context, filters=None, marker=None, limit=None,
|
||||
|
|
|
@ -3420,13 +3420,17 @@ def volume_glance_metadata_delete_by_snapshot(context, snapshot_id):
|
|||
|
||||
|
||||
@require_context
|
||||
def backup_get(context, backup_id):
|
||||
def backup_get(context, backup_id, read_deleted=None, project_only=True):
|
||||
return _backup_get(context, backup_id)
|
||||
|
||||
|
||||
def _backup_get(context, backup_id, session=None):
|
||||
def _backup_get(context, backup_id, session=None, read_deleted=None,
|
||||
project_only=True):
|
||||
result = model_query(context, models.Backup, session=session,
|
||||
project_only=True).filter_by(id=backup_id).first()
|
||||
project_only=project_only,
|
||||
read_deleted=read_deleted).\
|
||||
filter_by(id=backup_id).\
|
||||
first()
|
||||
|
||||
if not result:
|
||||
raise exception.BackupNotFound(backup_id=backup_id)
|
||||
|
|
|
@ -102,8 +102,9 @@ class Backup(base.CinderPersistentObject, base.CinderObject,
|
|||
return backup
|
||||
|
||||
@base.remotable_classmethod
|
||||
def get_by_id(cls, context, id):
|
||||
db_backup = db.backup_get(context, id)
|
||||
def get_by_id(cls, context, id, read_deleted=None, project_only=None):
|
||||
db_backup = db.backup_get(context, id, read_deleted=read_deleted,
|
||||
project_only=project_only)
|
||||
return cls._from_db_object(context, cls(context), db_backup)
|
||||
|
||||
@base.remotable
|
||||
|
@ -146,7 +147,13 @@ class Backup(base.CinderPersistentObject, base.CinderObject,
|
|||
@base.remotable
|
||||
def encode_record(self, **kwargs):
|
||||
"""Serialize backup object, with optional extra info, into a string."""
|
||||
kwargs.update(self)
|
||||
# We don't want to export extra fields and we want to force lazy
|
||||
# loading, so we can't use dict(self) or self.obj_to_primitive
|
||||
record = {name: field.to_primitive(self, name, getattr(self, name))
|
||||
for name, field in self.fields.items()}
|
||||
# We must update kwargs instead of record to ensure we don't overwrite
|
||||
# "real" data from the backup
|
||||
kwargs.update(record)
|
||||
retval = jsonutils.dumps(kwargs)
|
||||
if six.PY3:
|
||||
retval = retval.encode('utf-8')
|
||||
|
@ -193,3 +200,25 @@ class BackupList(base.ObjectListBase, base.CinderObject):
|
|||
backups = db.backup_get_all_by_volume(context, volume_id, filters)
|
||||
return base.obj_make_list(context, cls(context), objects.Backup,
|
||||
backups)
|
||||
|
||||
|
||||
@base.CinderObjectRegistry.register
|
||||
class BackupImport(Backup):
|
||||
"""Special object for Backup Imports.
|
||||
|
||||
This class should not be used for anything but Backup creation when
|
||||
importing backups to the DB.
|
||||
|
||||
On creation it allows to specify the ID for the backup, since it's the
|
||||
reference used in parent_id it is imperative that this is preserved.
|
||||
|
||||
Backup Import objects get promoted to standard Backups when the import is
|
||||
completed.
|
||||
"""
|
||||
|
||||
@base.remotable
|
||||
def create(self):
|
||||
updates = self.cinder_obj_get_changes()
|
||||
|
||||
db_backup = db.backup_create(self._context, updates)
|
||||
self._from_db_object(self._context, self, db_backup)
|
||||
|
|
|
@ -30,6 +30,7 @@ from cinder import context
|
|||
from cinder import db
|
||||
from cinder import exception
|
||||
from cinder.i18n import _
|
||||
from cinder import objects
|
||||
from cinder import test
|
||||
from cinder.tests.unit.api import fakes
|
||||
from cinder.tests.unit import utils
|
||||
|
@ -1599,20 +1600,15 @@ class BackupsAPITestCase(test.TestCase):
|
|||
def test_import_record_volume_id_specified_json(self,
|
||||
_mock_import_record_rpc,
|
||||
_mock_list_services):
|
||||
ctx = context.RequestContext('admin', 'fake', is_admin=True)
|
||||
utils.replace_obj_loader(self, objects.Backup)
|
||||
project_id = 'fake'
|
||||
backup_service = 'fake'
|
||||
backup_url = 'fake'
|
||||
_mock_import_record_rpc.return_value = \
|
||||
{'display_name': 'fake',
|
||||
'display_description': 'fake',
|
||||
'container': 'fake',
|
||||
'size': 1,
|
||||
'service_metadata': 'fake',
|
||||
'service': 'fake',
|
||||
'object_count': 1,
|
||||
'status': 'available',
|
||||
'availability_zone': 'fake'}
|
||||
_mock_list_services.return_value = ['fake']
|
||||
ctx = context.RequestContext('admin', project_id, is_admin=True)
|
||||
backup = objects.Backup(ctx, id='id', user_id='user_id',
|
||||
project_id=project_id, status='available')
|
||||
backup_url = backup.encode_record()
|
||||
_mock_import_record_rpc.return_value = None
|
||||
_mock_list_services.return_value = [backup_service]
|
||||
|
||||
req = webob.Request.blank('/v2/fake/backups/import_record')
|
||||
body = {'backup-record': {'backup_service': backup_service,
|
||||
|
@ -1623,29 +1619,77 @@ class BackupsAPITestCase(test.TestCase):
|
|||
|
||||
res = req.get_response(fakes.wsgi_app(fake_auth_context=ctx))
|
||||
res_dict = json.loads(res.body)
|
||||
|
||||
# verify that request is successful
|
||||
self.assertEqual(201, res.status_int)
|
||||
self.assertTrue('id' in res_dict['backup'])
|
||||
self.assertIn('id', res_dict['backup'])
|
||||
self.assertEqual('id', res_dict['backup']['id'])
|
||||
|
||||
# Verify that entry in DB is as expected
|
||||
db_backup = objects.Backup.get_by_id(ctx, 'id')
|
||||
self.assertEqual(ctx.project_id, db_backup.project_id)
|
||||
self.assertEqual(ctx.user_id, db_backup.user_id)
|
||||
self.assertEqual('0000-0000-0000-0000', db_backup.volume_id)
|
||||
self.assertEqual('creating', db_backup.status)
|
||||
|
||||
@mock.patch('cinder.backup.api.API._list_backup_services')
|
||||
@mock.patch('cinder.backup.rpcapi.BackupAPI.import_record')
|
||||
def test_import_record_volume_id_exists_deleted(self,
|
||||
_mock_import_record_rpc,
|
||||
_mock_list_services):
|
||||
ctx = context.RequestContext('admin', 'fake', is_admin=True)
|
||||
utils.replace_obj_loader(self, objects.Backup)
|
||||
|
||||
# Original backup belonged to a different user_id and project_id
|
||||
backup = objects.Backup(ctx, id='id', user_id='original_user_id',
|
||||
project_id='original_project_id',
|
||||
status='available')
|
||||
backup_url = backup.encode_record()
|
||||
|
||||
# Deleted DB entry has project_id and user_id set to fake
|
||||
backup_id = self._create_backup('id', status='deleted')
|
||||
backup_service = 'fake'
|
||||
_mock_import_record_rpc.return_value = None
|
||||
_mock_list_services.return_value = [backup_service]
|
||||
|
||||
req = webob.Request.blank('/v2/fake/backups/import_record')
|
||||
body = {'backup-record': {'backup_service': backup_service,
|
||||
'backup_url': backup_url}}
|
||||
req.body = json.dumps(body)
|
||||
req.method = 'POST'
|
||||
req.headers['content-type'] = 'application/json'
|
||||
|
||||
res = req.get_response(fakes.wsgi_app(fake_auth_context=ctx))
|
||||
res_dict = json.loads(res.body)
|
||||
|
||||
# verify that request is successful
|
||||
self.assertEqual(201, res.status_int)
|
||||
self.assertIn('id', res_dict['backup'])
|
||||
self.assertEqual('id', res_dict['backup']['id'])
|
||||
|
||||
# Verify that entry in DB is as expected, with new project and user_id
|
||||
db_backup = objects.Backup.get_by_id(ctx, 'id')
|
||||
self.assertEqual(ctx.project_id, db_backup.project_id)
|
||||
self.assertEqual(ctx.user_id, db_backup.user_id)
|
||||
self.assertEqual('0000-0000-0000-0000', db_backup.volume_id)
|
||||
self.assertEqual('creating', db_backup.status)
|
||||
|
||||
db.backup_destroy(context.get_admin_context(), backup_id)
|
||||
|
||||
@mock.patch('cinder.backup.api.API._list_backup_services')
|
||||
@mock.patch('cinder.backup.rpcapi.BackupAPI.import_record')
|
||||
def test_import_record_volume_id_specified_xml(self,
|
||||
_mock_import_record_rpc,
|
||||
_mock_list_services):
|
||||
ctx = context.RequestContext('admin', 'fake', is_admin=True)
|
||||
utils.replace_obj_loader(self, objects.Backup)
|
||||
project_id = 'fake'
|
||||
backup_service = 'fake'
|
||||
backup_url = 'fake'
|
||||
_mock_import_record_rpc.return_value = \
|
||||
{'display_name': 'fake',
|
||||
'display_description': 'fake',
|
||||
'container': 'fake',
|
||||
'size': 1,
|
||||
'service_metadata': 'fake',
|
||||
'service': 'fake',
|
||||
'object_count': 1,
|
||||
'status': 'available',
|
||||
'availability_zone': 'fake'}
|
||||
_mock_list_services.return_value = ['fake']
|
||||
ctx = context.RequestContext('admin', project_id, is_admin=True)
|
||||
backup = objects.Backup(ctx, id='id', user_id='user_id',
|
||||
project_id=project_id, status='available')
|
||||
backup_url = backup.encode_record()
|
||||
_mock_import_record_rpc.return_value = None
|
||||
_mock_list_services.return_value = [backup_service]
|
||||
|
||||
req = webob.Request.blank('/v2/fake/backups/import_record')
|
||||
req.body = ('<backup-record backup_service="%(backup_service)s" '
|
||||
|
@ -1658,10 +1702,20 @@ class BackupsAPITestCase(test.TestCase):
|
|||
req.headers['Accept'] = 'application/xml'
|
||||
res = req.get_response(fakes.wsgi_app(fake_auth_context=ctx))
|
||||
|
||||
# verify that request is successful
|
||||
self.assertEqual(201, res.status_int)
|
||||
|
||||
# Verify that entry in DB is as expected
|
||||
db_backup = objects.Backup.get_by_id(ctx, 'id')
|
||||
self.assertEqual(ctx.project_id, db_backup.project_id)
|
||||
self.assertEqual(ctx.user_id, db_backup.user_id)
|
||||
self.assertEqual('0000-0000-0000-0000', db_backup.volume_id)
|
||||
self.assertEqual('creating', db_backup.status)
|
||||
|
||||
# Verify the response
|
||||
dom = minidom.parseString(res.body)
|
||||
backup = dom.getElementsByTagName('backup')
|
||||
self.assertTrue(backup.item(0).hasAttribute('id'))
|
||||
back = dom.getElementsByTagName('backup')
|
||||
self.assertEqual(backup.id, back.item(0).attributes['id'].value)
|
||||
|
||||
@mock.patch('cinder.backup.api.API._list_backup_services')
|
||||
def test_import_record_with_no_backup_services(self,
|
||||
|
@ -1686,14 +1740,61 @@ class BackupsAPITestCase(test.TestCase):
|
|||
% backup_service,
|
||||
res_dict['computeFault']['message'])
|
||||
|
||||
@mock.patch('cinder.backup.api.API._list_backup_services')
|
||||
def test_import_backup_with_wrong_backup_url(self, _mock_list_services):
|
||||
ctx = context.RequestContext('admin', 'fake', is_admin=True)
|
||||
backup_service = 'fake'
|
||||
backup_url = 'fake'
|
||||
_mock_list_services.return_value = ['no-match1', 'no-match2']
|
||||
req = webob.Request.blank('/v2/fake/backups/import_record')
|
||||
body = {'backup-record': {'backup_service': backup_service,
|
||||
'backup_url': backup_url}}
|
||||
req.body = json.dumps(body)
|
||||
req.method = 'POST'
|
||||
req.headers['content-type'] = 'application/json'
|
||||
|
||||
res = req.get_response(fakes.wsgi_app(fake_auth_context=ctx))
|
||||
res_dict = json.loads(res.body)
|
||||
self.assertEqual(400, res.status_int)
|
||||
self.assertEqual(400, res_dict['badRequest']['code'])
|
||||
self.assertEqual("Invalid input received: Can't parse backup record.",
|
||||
res_dict['badRequest']['message'])
|
||||
|
||||
@mock.patch('cinder.backup.api.API._list_backup_services')
|
||||
def test_import_backup_with_existing_backup_record(self,
|
||||
_mock_list_services):
|
||||
ctx = context.RequestContext('admin', 'fake', is_admin=True)
|
||||
backup_id = self._create_backup('1')
|
||||
backup_service = 'fake'
|
||||
backup = objects.Backup.get_by_id(ctx, backup_id)
|
||||
backup_url = backup.encode_record()
|
||||
_mock_list_services.return_value = ['no-match1', 'no-match2']
|
||||
req = webob.Request.blank('/v2/fake/backups/import_record')
|
||||
body = {'backup-record': {'backup_service': backup_service,
|
||||
'backup_url': backup_url}}
|
||||
req.body = json.dumps(body)
|
||||
req.method = 'POST'
|
||||
req.headers['content-type'] = 'application/json'
|
||||
|
||||
res = req.get_response(fakes.wsgi_app(fake_auth_context=ctx))
|
||||
res_dict = json.loads(res.body)
|
||||
self.assertEqual(400, res.status_int)
|
||||
self.assertEqual(400, res_dict['badRequest']['code'])
|
||||
self.assertEqual('Invalid backup: Backup already exists in database.',
|
||||
res_dict['badRequest']['message'])
|
||||
|
||||
db.backup_destroy(context.get_admin_context(), backup_id)
|
||||
|
||||
@mock.patch('cinder.backup.api.API._list_backup_services')
|
||||
@mock.patch('cinder.backup.rpcapi.BackupAPI.import_record')
|
||||
def test_import_backup_with_missing_backup_services(self,
|
||||
_mock_import_record,
|
||||
_mock_list_services):
|
||||
ctx = context.RequestContext('admin', 'fake', is_admin=True)
|
||||
backup_id = self._create_backup('1', status='deleted')
|
||||
backup_service = 'fake'
|
||||
backup_url = 'fake'
|
||||
backup = objects.Backup.get_by_id(ctx, backup_id)
|
||||
backup_url = backup.encode_record()
|
||||
_mock_list_services.return_value = ['no-match1', 'no-match2']
|
||||
_mock_import_record.side_effect = \
|
||||
exception.ServiceNotFound(service_id='fake')
|
||||
|
@ -1708,10 +1809,11 @@ class BackupsAPITestCase(test.TestCase):
|
|||
res_dict = json.loads(res.body)
|
||||
self.assertEqual(500, res.status_int)
|
||||
self.assertEqual(500, res_dict['computeFault']['code'])
|
||||
self.assertEqual('Service %s could not be found.'
|
||||
% backup_service,
|
||||
self.assertEqual('Service %s could not be found.' % backup_service,
|
||||
res_dict['computeFault']['message'])
|
||||
|
||||
db.backup_destroy(context.get_admin_context(), backup_id)
|
||||
|
||||
def test_import_record_with_missing_body_elements(self):
|
||||
ctx = context.RequestContext('admin', 'fake', is_admin=True)
|
||||
backup_service = 'fake'
|
||||
|
|
|
@ -19,6 +19,7 @@ from cinder import exception
|
|||
from cinder import objects
|
||||
from cinder.tests.unit import fake_volume
|
||||
from cinder.tests.unit import objects as test_objects
|
||||
from cinder.tests.unit import utils
|
||||
|
||||
|
||||
fake_backup = {
|
||||
|
@ -86,15 +87,17 @@ class TestBackup(test_objects.BaseObjectsTestCase):
|
|||
self.assertEqual('3', backup.temp_snapshot_id)
|
||||
|
||||
def test_import_record(self):
|
||||
utils.replace_obj_loader(self, objects.Backup)
|
||||
backup = objects.Backup(context=self.context, id=1, parent_id=None,
|
||||
num_dependent_backups=0)
|
||||
export_string = backup.encode_record()
|
||||
imported_backup = objects.Backup.decode_record(export_string)
|
||||
|
||||
# Make sure we don't lose data when converting from string
|
||||
self.assertDictEqual(dict(backup), imported_backup)
|
||||
self.assertDictEqual(self._expected_backup(backup), imported_backup)
|
||||
|
||||
def test_import_record_additional_info(self):
|
||||
utils.replace_obj_loader(self, objects.Backup)
|
||||
backup = objects.Backup(context=self.context, id=1, parent_id=None,
|
||||
num_dependent_backups=0)
|
||||
extra_info = {'driver': {'key1': 'value1', 'key2': 'value2'}}
|
||||
|
@ -107,18 +110,24 @@ class TestBackup(test_objects.BaseObjectsTestCase):
|
|||
|
||||
# Make sure we don't lose data when converting from string and that
|
||||
# extra info is still there
|
||||
expected = dict(backup)
|
||||
expected = self._expected_backup(backup)
|
||||
expected['extra_info'] = extra_info
|
||||
self.assertDictEqual(expected, imported_backup)
|
||||
|
||||
def _expected_backup(self, backup):
|
||||
record = {name: field.to_primitive(backup, name, getattr(backup, name))
|
||||
for name, field in backup.fields.items()}
|
||||
return record
|
||||
|
||||
def test_import_record_additional_info_cant_overwrite(self):
|
||||
utils.replace_obj_loader(self, objects.Backup)
|
||||
backup = objects.Backup(context=self.context, id=1, parent_id=None,
|
||||
num_dependent_backups=0)
|
||||
export_string = backup.encode_record(id='fake_id')
|
||||
imported_backup = objects.Backup.decode_record(export_string)
|
||||
|
||||
# Make sure the extra_info can't overwrite basic data
|
||||
self.assertDictEqual(dict(backup), imported_backup)
|
||||
self.assertDictEqual(self._expected_backup(backup), imported_backup)
|
||||
|
||||
def test_import_record_decoding_error(self):
|
||||
export_string = '123456'
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
|
||||
import ddt
|
||||
import tempfile
|
||||
import uuid
|
||||
|
||||
import mock
|
||||
from oslo_config import cfg
|
||||
|
@ -141,20 +142,24 @@ class BaseBackupTest(test.TestCase):
|
|||
db.volume_attached(self.ctxt, attachment['id'], None, 'testhost',
|
||||
'/dev/vd0')
|
||||
|
||||
def _create_exported_record_entry(self, vol_size=1):
|
||||
def _create_exported_record_entry(self, vol_size=1, exported_id=None):
|
||||
"""Create backup metadata export entry."""
|
||||
vol_id = self._create_volume_db_entry(status='available',
|
||||
size=vol_size)
|
||||
backup = self._create_backup_db_entry(status='available',
|
||||
volume_id=vol_id)
|
||||
|
||||
if exported_id is not None:
|
||||
backup.id = exported_id
|
||||
|
||||
export = self.backup_mgr.export_record(self.ctxt, backup)
|
||||
return export
|
||||
|
||||
def _create_export_record_db_entry(self,
|
||||
volume_id='0000',
|
||||
status='creating',
|
||||
project_id='fake'):
|
||||
project_id='fake',
|
||||
backup_id=None):
|
||||
"""Create a backup entry in the DB.
|
||||
|
||||
Return the entry ID
|
||||
|
@ -164,7 +169,9 @@ class BaseBackupTest(test.TestCase):
|
|||
kwargs['user_id'] = 'fake'
|
||||
kwargs['project_id'] = project_id
|
||||
kwargs['status'] = status
|
||||
backup = objects.Backup(context=self.ctxt, **kwargs)
|
||||
if backup_id:
|
||||
kwargs['id'] = backup_id
|
||||
backup = objects.BackupImport(context=self.ctxt, **kwargs)
|
||||
backup.create()
|
||||
return backup
|
||||
|
||||
|
@ -682,8 +689,11 @@ class BackupTestCase(BaseBackupTest):
|
|||
driver does not support verify.
|
||||
"""
|
||||
vol_size = 1
|
||||
export = self._create_exported_record_entry(vol_size=vol_size)
|
||||
imported_record = self._create_export_record_db_entry()
|
||||
backup_id = uuid.uuid4()
|
||||
export = self._create_exported_record_entry(vol_size=vol_size,
|
||||
exported_id=backup_id)
|
||||
imported_record = self._create_export_record_db_entry(
|
||||
backup_id=backup_id)
|
||||
backup_hosts = []
|
||||
self.backup_mgr.import_record(self.ctxt,
|
||||
imported_record,
|
||||
|
@ -694,6 +704,24 @@ class BackupTestCase(BaseBackupTest):
|
|||
self.assertEqual('available', backup['status'])
|
||||
self.assertEqual(vol_size, backup['size'])
|
||||
|
||||
def test_import_record_with_wrong_id(self):
|
||||
"""Test normal backup record import.
|
||||
|
||||
Test the case when import succeeds for the case that the
|
||||
driver does not support verify.
|
||||
"""
|
||||
vol_size = 1
|
||||
export = self._create_exported_record_entry(vol_size=vol_size)
|
||||
imported_record = self._create_export_record_db_entry()
|
||||
backup_hosts = []
|
||||
self.assertRaises(exception.InvalidBackup,
|
||||
self.backup_mgr.import_record,
|
||||
self.ctxt,
|
||||
imported_record,
|
||||
export['backup_service'],
|
||||
export['backup_url'],
|
||||
backup_hosts)
|
||||
|
||||
def test_import_record_with_bad_service(self):
|
||||
"""Test error handling.
|
||||
|
||||
|
@ -808,8 +836,11 @@ class BackupTestCaseWithVerify(BaseBackupTest):
|
|||
driver implements verify.
|
||||
"""
|
||||
vol_size = 1
|
||||
export = self._create_exported_record_entry(vol_size=vol_size)
|
||||
imported_record = self._create_export_record_db_entry()
|
||||
backup_id = uuid.uuid4()
|
||||
export = self._create_exported_record_entry(
|
||||
vol_size=vol_size, exported_id=backup_id)
|
||||
imported_record = self._create_export_record_db_entry(
|
||||
backup_id=backup_id)
|
||||
backup_hosts = []
|
||||
backup_driver = self.backup_mgr.service.get_backup_driver(self.ctxt)
|
||||
_mock_backup_verify_class = ('%s.%s.%s' %
|
||||
|
@ -833,8 +864,11 @@ class BackupTestCaseWithVerify(BaseBackupTest):
|
|||
record where the backup driver returns an exception.
|
||||
"""
|
||||
vol_size = 1
|
||||
export = self._create_exported_record_entry(vol_size=vol_size)
|
||||
imported_record = self._create_export_record_db_entry()
|
||||
backup_id = uuid.uuid4()
|
||||
export = self._create_exported_record_entry(
|
||||
vol_size=vol_size, exported_id=backup_id)
|
||||
imported_record = self._create_export_record_db_entry(
|
||||
backup_id=backup_id)
|
||||
backup_hosts = []
|
||||
backup_driver = self.backup_mgr.service.get_backup_driver(self.ctxt)
|
||||
_mock_backup_verify_class = ('%s.%s.%s' %
|
||||
|
|
|
@ -14,9 +14,11 @@
|
|||
#
|
||||
|
||||
import socket
|
||||
import uuid
|
||||
|
||||
from oslo_service import loopingcall
|
||||
from oslo_utils import timeutils
|
||||
import oslo_versionedobjects
|
||||
|
||||
from cinder import context
|
||||
from cinder import db
|
||||
|
@ -184,3 +186,24 @@ class ZeroIntervalLoopingCall(loopingcall.FixedIntervalLoopingCall):
|
|||
def start(self, interval, **kwargs):
|
||||
kwargs['initial_delay'] = 0
|
||||
return super(ZeroIntervalLoopingCall, self).start(0, **kwargs)
|
||||
|
||||
|
||||
def replace_obj_loader(testcase, obj):
|
||||
def fake_obj_load_attr(self, name):
|
||||
# This will raise KeyError for non existing fields as expected
|
||||
field = self.fields[name]
|
||||
|
||||
if field.default != oslo_versionedobjects.fields.UnspecifiedDefault:
|
||||
value = field.default
|
||||
elif field.nullable:
|
||||
value = None
|
||||
elif isinstance(field, oslo_versionedobjects.fields.StringField):
|
||||
value = ''
|
||||
elif isinstance(field, oslo_versionedobjects.fields.IntegerField):
|
||||
value = 1
|
||||
elif isinstance(field, oslo_versionedobjects.fields.UUIDField):
|
||||
value = uuid.uuid4()
|
||||
setattr(self, name, value)
|
||||
|
||||
testcase.addCleanup(setattr, obj, 'obj_load_attr', obj.obj_load_attr)
|
||||
obj.obj_load_attr = fake_obj_load_attr
|
||||
|
|
|
@ -71,6 +71,7 @@ objects_ignore_messages = [
|
|||
"Module 'cinder.objects' has no 'BackupList' member",
|
||||
"Module 'cinder.objects' has no 'Service' member",
|
||||
"Module 'cinder.objects' has no 'ServiceList' member",
|
||||
"Module 'cinder.objects' has no 'BackupImport' member",
|
||||
]
|
||||
objects_ignore_modules = ["cinder/objects/"]
|
||||
|
||||
|
|
Loading…
Reference in New Issue