Merge "adding more protection parameters retention period"

This commit is contained in:
Zuul 2017-11-13 08:46:44 +00:00 committed by Gerrit Code Review
commit 28abeecf83
3 changed files with 395 additions and 0 deletions

View File

@ -101,6 +101,12 @@ OPERATION_RUN_TYPE_RESUME = 'resume'
OPERATION_EXE_STATE_IN_PROGRESS = 'in_progress'
OPERATION_EXE_STATE_SUCCESS = 'success'
OPERATION_EXE_STATE_FAILED = 'failed'
OPERATION_GET_MAX_BACKUP_STATE_FAILED = 'get_max_backup_policy_failed'
OPERATION_EXE_MAX_BACKUP_STATE_SUCCESS = 'excute_max_backup_policy_success'
OPERATION_EXE_MAX_BACKUP_STATE_FAILED = 'excute_max_backup_policy_failed'
OPERATION_GET_DURATION_STATE_FAILED = 'get_duration_policy_failed'
OPERATION_EXE_DURATION_STATE_SUCCESS = 'excute_duration_policy_success'
OPERATION_EXE_DURATION_STATE_FAILED = 'excute_duration_policy_failed'
OPERATION_EXE_STATE_DROPPED_OUT_OF_WINDOW = 'dropped_out_of_window'
RESTORE_STATUS_SUCCESS = 'success'

View File

@ -0,0 +1,192 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
from oslo_utils import uuidutils
from karbor.common import constants
from karbor import context
from karbor import exception
from karbor.i18n import _
from karbor import objects
from karbor.services.operationengine.operations import base
class RetentionProtectOperation(base.Operation):
"""Protect operation."""
OPERATION_TYPE = "retention_protect"
def check_operation_definition(self, operation_definition):
provider_id = operation_definition.get("provider_id")
if not provider_id or not uuidutils.is_uuid_like(provider_id):
reason = _("Provider_id is invalid")
raise exception.InvalidOperationDefinition(reason=reason)
plan_id = operation_definition.get("plan_id")
if not plan_id or not uuidutils.is_uuid_like(plan_id):
reason = _("Plan_id is invalid")
raise exception.InvalidOperationDefinition(reason=reason)
plan = objects.Plan.get_by_id(context.get_admin_context(), plan_id)
if provider_id != plan.provider_id:
reason = _("Provider_id is conflict")
raise exception.InvalidOperationDefinition(reason=reason)
def _execute(self, operation_definition, param):
log_ref = self._create_operation_log(param)
self._run(operation_definition, param, log_ref)
def _resume(self, operation_definition, param, log_ref):
self._run(operation_definition, param, log_ref)
def _run(self, operation_definition, param, log_ref):
project_id = param.get("project_id")
client = self._create_karbor_client(
param.get("user_id"), project_id)
provider_id = operation_definition.get("provider_id")
plan_id = operation_definition.get("plan_id")
trigger_id = param.get("trigger_id", None)
scheduled_operation_id = param.get("scheduled_operation_id", None)
extra_info = {
'created_by': constants.OPERATION_ENGINE,
'trigger_id': trigger_id,
'scheduled_operation_id': scheduled_operation_id
}
try:
client.checkpoints.create(provider_id, plan_id, extra_info)
except Exception:
state = constants.OPERATION_EXE_STATE_FAILED
else:
state = constants.OPERATION_EXE_STATE_SUCCESS
finally:
self._update_log_when_operation_finished(log_ref, state)
try:
max_backups = int(operation_definition.get("max_backups", -1))
max_backups = -1 if max_backups <= 0 else max_backups
except Exception:
state = constants.OPERATION_GET_MAX_BACKUP_STATE_FAILED
self._update_log_when_operation_finished(log_ref, state)
reason = _("Failed to get max_backups")
raise exception.InvalidOperationDefinition(reason=reason)
try:
retention_duration = int(operation_definition.get(
"retention_duration", -1))
retention_duration = -1 if retention_duration <= 0\
else retention_duration
except Exception:
state = constants.OPERATION_GET_DURATION_STATE_FAILED
self._update_log_when_operation_finished(log_ref, state)
reason = _("Failed to get retention_duration")
raise exception.InvalidOperationDefinition(reason=reason)
try:
self._delete_old_backup_by_max_backups(
client, max_backups, project_id, provider_id, plan_id)
state = constants.OPERATION_EXE_MAX_BACKUP_STATE_SUCCESS
except Exception:
state = constants.OPERATION_EXE_MAX_BACKUP_STATE_FAILED
reason = (_("Can't execute retention policy provider_id: "
"%(provider_id)s plan_id:%(plan_id)s"
" max_backups:%(max_backups)s") %
{"provider_id": provider_id, "plan_id": plan_id,
"max_backups": max_backups})
raise exception.InvalidOperationDefinition(reason=reason)
finally:
self._update_log_when_operation_finished(log_ref, state)
try:
self._delete_old_backup_by_duration(
client, retention_duration, project_id, provider_id, plan_id)
state = constants.OPERATION_EXE_DURATION_STATE_SUCCESS
except Exception:
state = constants.OPERATION_EXE_DURATION_STATE_FAILED
reason = (_("Can't execute retention policy provider_id: "
"%(provider_id)s plan_id:%(plan_id)s"
" retention_duration:%(retention_duration)s") %
{"provider_id": provider_id, "plan_id": plan_id,
"retention_duration": retention_duration})
raise exception.InvalidOperationDefinition(reason=reason)
finally:
self._update_log_when_operation_finished(log_ref, state)
@staticmethod
def _list_available_checkpoint(client, project_id,
provider_id, plan_id):
search_opts = {'project_id': project_id,
'plan_id': plan_id,
"status": constants.CHECKPOINT_STATUS_AVAILABLE
}
sort = {"created_at": "desc"}
try:
checkpoints = client.checkpoints.list(
provider_id=provider_id,
search_opts=search_opts,
limit=None,
sort=sort)
avi_check = [x for x in checkpoints if x.status ==
constants.CHECKPOINT_STATUS_AVAILABLE]
except Exception as e:
reason = (_("Failed to list checkpoint by %(provider_id)s"
"and %(plan_id)s reason: %(reason)s") %
{"provider_id": provider_id,
"plan_id": plan_id, "reason": e})
raise exception.InvalidOperationDefinition(reason=reason)
return avi_check
def _delete_old_backup_by_max_backups(
self, client, max_backups, project_id, provider_id, plan_id):
if max_backups == -1:
return
backup_items = self._list_available_checkpoint(
client, project_id, provider_id, plan_id)
count = len(backup_items)
if count > max_backups:
for item in backup_items[max_backups:]:
try:
client.checkpoints.delete(provider_id, item.id)
except Exception as e:
reason = (_("Failed to delete checkpoint: %(cp_id)s by "
"max_backups with the reason: %(reason)s") %
{"cp_id": item.id, "reason": e})
raise exception.InvalidOperationDefinition(reason=reason)
def _delete_old_backup_by_duration(
self, client, retention_duration,
project_id, provider_id, plan_id):
if retention_duration == -1:
return
backup_items = self._list_available_checkpoint(
client, project_id, provider_id, plan_id)
now = datetime.utcnow()
for item in backup_items:
created_at = datetime.strptime(item.created_at, "%Y-%m-%d")
interval = (now - created_at).days
if interval > retention_duration:
try:
client.checkpoints.delete(provider_id, item.id)
except Exception as e:
reason = (_("Failed to delete checkpoint: %(cp_id)s "
"by retention_duration with the reasion: "
"%(reason)s") %
{"cp_id": item.id, "reason": e})
raise exception.InvalidOperationDefinition(reason=reason)

View File

@ -0,0 +1,197 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
from datetime import timedelta
import mock
from karbor.common import constants
from karbor import context
from karbor import exception
from karbor import objects
from karbor.services.operationengine.operations import base as base_operation
from karbor.services.operationengine.operations import retention_operation
from karbor.tests import base
class FakeUserTrustManager(object):
def add_operation(self, context, operation_id):
return "123"
def delete_operation(self, context, operation_id):
pass
def resume_operation(self, operation_id, user_id, project_id, trust_id):
pass
class FakeCheckPointInstance(object):
def __init__(self, id, created_at):
super(FakeCheckPointInstance, self).__init__()
self.id = id
self.created_at = created_at
self.status = 'available'
self.project_id = '123'
self.protection_plan = {
'provider_id': '123',
'id': '123',
'resources': None,
'name': 'protect vm resource'
}
class FakeCheckPoint(object):
_checkpoints = []
def __init__(self):
super(FakeCheckPoint, self).__init__()
def create_all_check_points(self):
now = datetime.utcnow()
d1 = now - timedelta(days=16)
d2 = now - timedelta(days=15)
d3 = now - timedelta(days=3)
self._checkpoints.insert(
0, FakeCheckPointInstance("1", d1.strftime("%Y-%m-%d")))
self._checkpoints.insert(
0, FakeCheckPointInstance("2", d2.strftime("%Y-%m-%d")))
self._checkpoints.insert(
0, FakeCheckPointInstance("3", d3.strftime("%Y-%m-%d")))
def create(self, provider_id, plan_id, extra_info):
now = datetime.utcnow()
self._checkpoints.insert(
0, FakeCheckPointInstance("4", now.strftime("%Y-%m-%d")))
def delete(self, provider_id, checkpoint_id):
self._checkpoints = [x for x in self._checkpoints if x.id !=
checkpoint_id]
def list(self, provider_id, search_opts=None, limit=None, sort=None):
return self._checkpoints
class FakeKarborClient(object):
def __init__(self):
super(FakeKarborClient, self).__init__()
self._check_point = FakeCheckPoint()
@property
def checkpoints(self):
return self._check_point
def create_all_check_points(self):
self._check_point.create_all_check_points()
class ProtectOperationTestCase(base.TestCase):
"""Test cases for ProtectOperation class."""
def setUp(self):
super(ProtectOperationTestCase, self).setUp()
self._user_trust_manager = FakeUserTrustManager()
self._operation = retention_operation.RetentionProtectOperation(
self._user_trust_manager
)
self._operation_db = self._create_operation()
self._fake_karbor_client = FakeKarborClient()
def test_check_operation_definition(self):
self.assertRaises(exception.InvalidOperationDefinition,
self._operation.check_operation_definition,
{})
@mock.patch.object(base_operation.Operation, '_create_karbor_client')
def test_execute(self, client):
client.return_value = self._fake_karbor_client
self._fake_karbor_client.create_all_check_points()
now = datetime.utcnow()
param = {
'operation_id': self._operation_db.id,
'triggered_time': now,
'expect_start_time': now,
'window_time': 30,
'run_type': constants.OPERATION_RUN_TYPE_EXECUTE,
'user_id': self._operation_db.user_id,
'project_id': self._operation_db.project_id
}
self._operation.run(self._operation_db.operation_definition,
param=param)
logs = objects.ScheduledOperationLogList.get_by_filters(
context.get_admin_context(),
{'state': constants.OPERATION_EXE_DURATION_STATE_SUCCESS,
'operation_id': self._operation_db.id}, 1,
None, ['created_at'], ['desc'])
self.assertIsNotNone(logs)
log = logs.objects[0]
self.assertTrue(now, log.triggered_time)
checkpoints = self._fake_karbor_client.checkpoints.list("123")
self.assertEqual(2, len(checkpoints))
@mock.patch.object(base_operation.Operation, '_create_karbor_client')
def test_resume(self, client):
log = self._create_operation_log(self._operation_db.id)
client.return_value = self._fake_karbor_client
now = datetime.utcnow()
param = {
'operation_id': self._operation_db.id,
'triggered_time': now,
'expect_start_time': now,
'window_time': 30,
'run_type': constants.OPERATION_RUN_TYPE_RESUME,
'user_id': self._operation_db.user_id,
'project_id': self._operation_db.project_id
}
self._operation.run(self._operation_db.operation_definition,
param=param)
logs = objects.ScheduledOperationLogList.get_by_filters(
context.get_admin_context(),
{'state': constants.OPERATION_EXE_DURATION_STATE_SUCCESS,
'operation_id': self._operation_db.id}, 1,
None, ['created_at'], ['desc'])
self.assertIsNotNone(logs)
log1 = logs.objects[0]
self.assertTrue(log.id, log1.id)
def _create_operation(self):
operation_info = {
'name': 'protect vm',
'description': 'protect vm resource',
'operation_type': 'retention_protect',
'user_id': '123',
'project_id': '123',
'trigger_id': '123',
'operation_definition': {
'max_backups': '3',
'provider_id': '123',
'plan_id': '123',
'retention_duration': '14'
}
}
operation = objects.ScheduledOperation(context.get_admin_context(),
**operation_info)
operation.create()
return operation
def _create_operation_log(self, operation_id):
log_info = {
'operation_id': operation_id,
'state': constants.OPERATION_EXE_STATE_IN_PROGRESS,
}
log = objects.ScheduledOperationLog(context.get_admin_context(),
**log_info)
log.create()
return log