Merge "freezer api support for action"
This commit is contained in:
64
README.rst
64
README.rst
@@ -103,6 +103,16 @@ GET /v1/clients/{freezerc_id} Get client details
|
||||
UPDATE /v1/clients/{freezerc_id} Updates the specified client information
|
||||
DELETE /v1/clients/{freezerc_id} Deletes the specified client information
|
||||
|
||||
Freezer actions management
|
||||
---------------------------
|
||||
GET /v1/actions(?limit,offset) Lists registered actions
|
||||
POST /v1/actions Creates action entry
|
||||
|
||||
GET /v1/actions/{action_id} Get action details
|
||||
UPDATE /v1/actions/{action_id} Updates the specified action information
|
||||
DELETE /v1/actions/{action_id} Deletes the specified action information
|
||||
PATCH /v1/actions/{action_id} updates part of the document (such as status information)
|
||||
|
||||
Data Structures
|
||||
===============
|
||||
|
||||
@@ -175,3 +185,57 @@ client_type :=
|
||||
"client" : client_info document,
|
||||
"user_id": string, # owner of the information (OS X-User-Id, keystone provided, added by api)
|
||||
}
|
||||
|
||||
|
||||
Jobs and Actions
|
||||
----------------
|
||||
|
||||
job_info
|
||||
{
|
||||
parameters for freezer to execute a specific job.
|
||||
}
|
||||
|
||||
example backup job_info
|
||||
{
|
||||
"action" = "backup"
|
||||
"mode" = "fs"
|
||||
"src_file" = /home/tylerdurden/project_mayhem
|
||||
"backup_name" = project_mayhem_backup
|
||||
"container" = my_backup_container
|
||||
"max_backup_level" : int
|
||||
"always_backup_level": int
|
||||
"restart_always_backup": int
|
||||
"no_incremental" : bool
|
||||
"encrypt_pass_file" = private_key_file
|
||||
"log_file" = /var/log/freezer.log
|
||||
"hostname" = false
|
||||
"max_cpu_priority" = false
|
||||
}
|
||||
|
||||
example restore job_info
|
||||
{
|
||||
"action": "restore"
|
||||
"restore-abs-path": "/home/tylerdurden/project_mayhem"
|
||||
"container" : "my_backup_container"
|
||||
"backup-name": "project_mayhem_backup"
|
||||
"restore-from-host": "another_host"
|
||||
"max_cpu_priority": true
|
||||
}
|
||||
|
||||
action_info
|
||||
{
|
||||
"action_id": string uuid4, not analyzed
|
||||
"job": job_info list ?
|
||||
"client_id": string
|
||||
"description": string
|
||||
"time_created": int (timestamp)
|
||||
"time_started": int (timestamp)
|
||||
"time_ended": int (timestamp)
|
||||
"status": string: pending | notified(?) | started | abort_req | aborting | aborted | success | fail
|
||||
}
|
||||
|
||||
Action document (the actual document stored in elasticsearch)
|
||||
{
|
||||
"action": action_info
|
||||
"user_id": string, # owner of the information (OS X-User-Id, keystone provided, added by api)
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ Hudson (tjh@cryptsoft.com).
|
||||
|
||||
from freezer_api.api.v1 import backups
|
||||
from freezer_api.api.v1 import clients
|
||||
from freezer_api.api.v1 import actions
|
||||
from freezer_api.api.v1 import homedoc
|
||||
|
||||
VERSION = {
|
||||
@@ -53,4 +54,10 @@ def public_endpoints(storage_driver):
|
||||
('/clients/{client_id}',
|
||||
clients.ClientsResource(storage_driver)),
|
||||
|
||||
('/actions',
|
||||
actions.ActionsCollectionResource(storage_driver)),
|
||||
|
||||
('/actions/{action_id}',
|
||||
actions.ActionsResource(storage_driver)),
|
||||
|
||||
]
|
||||
|
||||
116
freezer_api/api/v1/actions.py
Normal file
116
freezer_api/api/v1/actions.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""
|
||||
Copyright 2015 Hewlett-Packard
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This product includes cryptographic software written by Eric Young
|
||||
(eay@cryptsoft.com). This product includes software written by Tim
|
||||
Hudson (tjh@cryptsoft.com).
|
||||
========================================================================
|
||||
"""
|
||||
|
||||
import time
|
||||
import uuid
|
||||
import falcon
|
||||
from freezer_api.common import exceptions
|
||||
|
||||
|
||||
class ActionsCollectionResource(object):
|
||||
"""
|
||||
Handler for endpoint: /v1/actions
|
||||
"""
|
||||
def __init__(self, storage_driver):
|
||||
self.db = storage_driver
|
||||
|
||||
def on_get(self, req, resp):
|
||||
# GET /v1/actions(?limit,offset) Lists actions
|
||||
user_id = req.get_header('X-User-ID')
|
||||
offset = req.get_param_as_int('offset') or 0
|
||||
limit = req.get_param_as_int('limit') or 10
|
||||
search = req.context.get('doc', {})
|
||||
obj_list = self.db.search_action(user_id=user_id, offset=offset,
|
||||
limit=limit, search=search)
|
||||
req.context['result'] = {'actions': obj_list}
|
||||
|
||||
def on_post(self, req, resp):
|
||||
# POST /v1/actions Creates action entry
|
||||
try:
|
||||
doc = req.context['doc']
|
||||
except KeyError:
|
||||
raise exceptions.BadDataFormat(
|
||||
message='Missing request body')
|
||||
user_id = req.get_header('X-User-ID')
|
||||
action_id = '{0}'.format(uuid.uuid4())
|
||||
time_created = int(time.time())
|
||||
time_started = -1
|
||||
time_ended = -1
|
||||
doc.update({
|
||||
'action_id': action_id,
|
||||
'time_created': time_created,
|
||||
'time_started': time_started,
|
||||
'time_ended': time_ended,
|
||||
'status': 'pending'
|
||||
})
|
||||
action_id = self.db.add_action(
|
||||
user_id=user_id, doc=doc)
|
||||
resp.status = falcon.HTTP_201
|
||||
req.context['result'] = {'action_id': action_id}
|
||||
|
||||
|
||||
class ActionsResource(object):
|
||||
"""
|
||||
Handler for endpoint: /v1/actions/{action_id}
|
||||
"""
|
||||
starting_states = ['started']
|
||||
ending_states = ['aborted', 'success', 'fail']
|
||||
|
||||
def __init__(self, storage_driver):
|
||||
self.db = storage_driver
|
||||
|
||||
def on_get(self, req, resp, action_id):
|
||||
# GET /v1/actions/{action_id} retrieves the specified action
|
||||
# search in body
|
||||
user_id = req.get_header('X-User-ID') or ''
|
||||
obj = self.db.get_action(user_id=user_id, action_id=action_id)
|
||||
if obj:
|
||||
req.context['result'] = obj
|
||||
else:
|
||||
resp.status = falcon.HTTP_404
|
||||
|
||||
def on_delete(self, req, resp, action_id):
|
||||
# DELETE /v1/actions/{action_id} Deletes the specified action
|
||||
user_id = req.get_header('X-User-ID')
|
||||
self.db.delete_action(
|
||||
user_id=user_id, action_id=action_id)
|
||||
req.context['result'] = {'action_id': action_id}
|
||||
resp.status = falcon.HTTP_204
|
||||
|
||||
def on_patch(self, req, resp, action_id):
|
||||
# PATCH /v1/actions/{action_id} updates the specified action
|
||||
user_id = req.get_header('X-User-ID') or ''
|
||||
patch = req.context.get('doc', {})
|
||||
|
||||
# Some knowledge of internal workings here:
|
||||
# status update triggers timestamp update
|
||||
status = patch.get('status', '')
|
||||
if status in ActionsResource.starting_states:
|
||||
patch.update({"time_started": int(time.time())})
|
||||
elif status in ActionsResource.ending_states:
|
||||
patch.update({"time_ended": int(time.time())})
|
||||
|
||||
new_version = self.db.update_action(user_id=user_id,
|
||||
action_id=action_id,
|
||||
patch=patch)
|
||||
req.context['result'] = {'action_id': action_id,
|
||||
'patch': patch,
|
||||
'version': new_version}
|
||||
@@ -47,8 +47,7 @@ class BackupsCollectionResource(object):
|
||||
doc = req.context['doc']
|
||||
except KeyError:
|
||||
raise exceptions.BadDataFormat(
|
||||
message='Missing request body',
|
||||
resp_body={'error': 'missing request body'})
|
||||
message='Missing request body')
|
||||
user_name = req.get_header('X-User-Name')
|
||||
user_id = req.get_header('X-User-ID')
|
||||
backup_id = self.db.add_backup(
|
||||
|
||||
@@ -45,8 +45,7 @@ class ClientsCollectionResource(object):
|
||||
doc = req.context['doc']
|
||||
except KeyError:
|
||||
raise exceptions.BadDataFormat(
|
||||
message='Missing request body',
|
||||
resp_body={'error': 'missing request body'})
|
||||
message='Missing request body')
|
||||
user_id = req.get_header('X-User-ID')
|
||||
client_id = self.db.add_client(
|
||||
user_id=user_id, doc=doc)
|
||||
|
||||
@@ -23,52 +23,60 @@ Hudson (tjh@cryptsoft.com).
|
||||
import falcon
|
||||
import logging
|
||||
|
||||
|
||||
class FreezerAPIException(Exception):
|
||||
class FreezerAPIException(falcon.HTTPError):
|
||||
"""
|
||||
Base Freezer API Exception
|
||||
"""
|
||||
json_message = ({'error': 'Unknown exception occurred'})
|
||||
message = "unknown error"
|
||||
|
||||
def __init__(self, message=None, resp_body={}):
|
||||
def __init__(self, message=''):
|
||||
if message:
|
||||
self.message = message
|
||||
self.resp_body = resp_body
|
||||
logging.error(message)
|
||||
Exception.__init__(self, message)
|
||||
|
||||
@staticmethod
|
||||
def handle(ex, req, resp, params):
|
||||
resp.status = falcon.HTTP_500
|
||||
req.context['result'] = {'error': 'internal server error'}
|
||||
raise falcon.HTTPError('500 unknown server error',
|
||||
title="Internal Server Error",
|
||||
description=FreezerAPIException.message)
|
||||
|
||||
|
||||
class BadDataFormat(FreezerAPIException):
|
||||
@staticmethod
|
||||
def handle(ex, req, resp, params):
|
||||
resp.status = falcon.HTTP_400
|
||||
ex.resp_body.update({'error': 'bad data format'})
|
||||
req.context['result'] = ex.resp_body
|
||||
raise falcon.HTTPBadRequest(
|
||||
title="Bad request format",
|
||||
description=ex.message)
|
||||
|
||||
|
||||
class DocumentExists(FreezerAPIException):
|
||||
@staticmethod
|
||||
def handle(ex, req, resp, params):
|
||||
resp.status = falcon.HTTP_409
|
||||
ex.resp_body.update({'error': 'document already exists'})
|
||||
req.context['result'] = ex.resp_body
|
||||
raise falcon.HTTPConflict(
|
||||
title="Document already existing",
|
||||
description=ex.message)
|
||||
|
||||
|
||||
class StorageEngineError(FreezerAPIException):
|
||||
@staticmethod
|
||||
def handle(ex, req, resp, params):
|
||||
resp.status = falcon.HTTP_500
|
||||
ex.resp_body.update({'error': 'storage engine'})
|
||||
req.context['result'] = ex.resp_body
|
||||
raise falcon.HTTPInternalServerError(
|
||||
title="Internal Storage Error",
|
||||
description=ex.message)
|
||||
|
||||
|
||||
class DocumentNotFound(FreezerAPIException):
|
||||
@staticmethod
|
||||
def handle(ex, req, resp, params):
|
||||
raise falcon.HTTPNotFound(
|
||||
title="Not Found",
|
||||
description=ex.message)
|
||||
|
||||
|
||||
exception_handlers_catalog = [
|
||||
BadDataFormat,
|
||||
DocumentExists,
|
||||
StorageEngineError
|
||||
StorageEngineError,
|
||||
DocumentNotFound
|
||||
]
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""
|
||||
Copyright 2014 Hewlett-Packard
|
||||
Copyright 2015 Hewlett-Packard
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
@@ -43,50 +43,63 @@ class TypeManager:
|
||||
@staticmethod
|
||||
def get_search_query(user_id, doc_id, search={}):
|
||||
base_filter = TypeManager.get_base_search_filter(user_id, search)
|
||||
return {"filter": {"bool": {"must": base_filter}}}
|
||||
query_filter = {"filter": {"bool": {"must": base_filter}}}
|
||||
return {'query': {'filtered': query_filter}}
|
||||
|
||||
def search(self, user_id, doc_id, search={}, offset=0, limit=10):
|
||||
def get(self, user_id, doc_id):
|
||||
try:
|
||||
res = self.es.get(index=self.index,
|
||||
doc_type=self.doc_type,
|
||||
id=doc_id)
|
||||
doc = res['_source']
|
||||
if doc['user_id'] != user_id:
|
||||
raise elasticsearch.TransportError()
|
||||
except elasticsearch.TransportError:
|
||||
raise exceptions.DocumentNotFound(
|
||||
message='No document found with ID {0}'.format(doc_id))
|
||||
except Exception as e:
|
||||
raise exceptions.StorageEngineError(
|
||||
message='Get operation failed: {0}'.format(e))
|
||||
return doc
|
||||
|
||||
def search(self, user_id, doc_id=None, search={}, offset=0, limit=10):
|
||||
try:
|
||||
query_dsl = self.get_search_query(user_id, doc_id, search)
|
||||
except:
|
||||
raise exceptions.StorageEngineError(
|
||||
message='search operation failed: query not valid',
|
||||
resp_body={'engine exception': 'invalid query'})
|
||||
message='search operation failed: query not valid')
|
||||
try:
|
||||
res = self.es.search(index=self.index, doc_type=self.doc_type,
|
||||
size=limit, from_=offset, body=query_dsl)
|
||||
except Exception as e:
|
||||
raise exceptions.StorageEngineError(
|
||||
message='search operation failed',
|
||||
resp_body={'engine exception': '{0}'.format(e)})
|
||||
message='search operation failed: {0}'.format(e))
|
||||
hit_list = res['hits']['hits']
|
||||
return [x['_source'] for x in hit_list]
|
||||
|
||||
def insert(self, doc):
|
||||
def insert(self, doc, doc_id=None):
|
||||
try:
|
||||
res = self.es.index(index=self.index, doc_type=self.doc_type,
|
||||
body=doc)
|
||||
body=doc, id=doc_id)
|
||||
created = res['created']
|
||||
except Exception as e:
|
||||
raise exceptions.StorageEngineError(
|
||||
message='index operation failed',
|
||||
resp_body={'engine exception': '{0}'.format(e)})
|
||||
return res['created']
|
||||
message='index operation failed {0}'.format(e))
|
||||
return created
|
||||
|
||||
def delete(self, user_id, doc_id):
|
||||
try:
|
||||
query_dsl = self.get_search_query(user_id, doc_id)
|
||||
except:
|
||||
raise exceptions.StorageEngineError(
|
||||
message='delete operation failed: query not valid',
|
||||
resp_body={'engine exception': 'invalid query'})
|
||||
message='Delete operation failed: query not valid')
|
||||
try:
|
||||
self.es.delete_by_query(index=self.index,
|
||||
doc_type=self.doc_type,
|
||||
body=query_dsl)
|
||||
except Exception as e:
|
||||
raise exceptions.StorageEngineError(
|
||||
message='delete operation failed',
|
||||
resp_body={'engine exception': '{0}'.format(e)})
|
||||
message='Delete operation failed: {0}'.format(e))
|
||||
return doc_id
|
||||
|
||||
|
||||
@@ -109,7 +122,8 @@ class BackupTypeManager(TypeManager):
|
||||
base_filter.append(
|
||||
{"range": {"timestamp": {"lte": int(search['time_before'])}}}
|
||||
)
|
||||
return {"filter": {"bool": {"must": base_filter}}}
|
||||
query_filter = {"filter": {"bool": {"must": base_filter}}}
|
||||
return {'query': {'filtered': query_filter}}
|
||||
|
||||
|
||||
class ClientTypeManager(TypeManager):
|
||||
@@ -121,7 +135,37 @@ class ClientTypeManager(TypeManager):
|
||||
base_filter = TypeManager.get_base_search_filter(user_id, search)
|
||||
if doc_id is not None:
|
||||
base_filter.append({"term": {"client_id": doc_id}})
|
||||
return {"filter": {"bool": {"must": base_filter}}}
|
||||
query_filter = {"filter": {"bool": {"must": base_filter}}}
|
||||
return {'query': {'filtered': query_filter}}
|
||||
|
||||
|
||||
class ActionTypeManager(TypeManager):
|
||||
def __init__(self, es, doc_type, index='freezer'):
|
||||
TypeManager.__init__(self, es, doc_type, index=index)
|
||||
|
||||
@staticmethod
|
||||
def get_search_query(user_id, doc_id, search={}):
|
||||
base_filter = TypeManager.get_base_search_filter(user_id, search)
|
||||
if doc_id is not None:
|
||||
base_filter.append({"term": {"action_id": doc_id}})
|
||||
query_filter = {"filter": {"bool": {"must": base_filter}}}
|
||||
return {'query': {'filtered': query_filter}}
|
||||
|
||||
def update(self, action_id, action_update_doc):
|
||||
update_doc = {"doc": action_update_doc}
|
||||
try:
|
||||
res = self.es.update(index=self.index, doc_type=self.doc_type,
|
||||
id=action_id, body=update_doc)
|
||||
version = res['_version']
|
||||
except elasticsearch.TransportError:
|
||||
raise exceptions.DocumentNotFound(
|
||||
message='Unable to find action to update '
|
||||
'with ID {0} '.format(action_id))
|
||||
except Exception as e:
|
||||
raise exceptions.StorageEngineError(
|
||||
message='Unable to update action, '
|
||||
'action ID: {0} '.format(action_id))
|
||||
return version
|
||||
|
||||
|
||||
class ElasticSearchEngine(object):
|
||||
@@ -132,6 +176,7 @@ class ElasticSearchEngine(object):
|
||||
logging.info('Using Elasticsearch host {0}'.format(hosts))
|
||||
self.backup_manager = BackupTypeManager(self.es, 'backups')
|
||||
self.client_manager = ClientTypeManager(self.es, 'clients')
|
||||
self.action_manager = ActionTypeManager(self.es, 'actions')
|
||||
|
||||
def get_backup(self, user_id, backup_id=None, offset=0, limit=10, search={}):
|
||||
return self.backup_manager.search(user_id,
|
||||
@@ -149,14 +194,12 @@ class ElasticSearchEngine(object):
|
||||
existing = self.backup_manager.search(user_id, backup_id)
|
||||
if existing: # len(existing) > 0
|
||||
raise exceptions.DocumentExists(
|
||||
message='Backup data already existing ({0})'.format(backup_id),
|
||||
resp_body={'backup_id': backup_id})
|
||||
message='Backup data already existing '
|
||||
'with ID {0}'.format(backup_id))
|
||||
if not self.backup_manager.insert(backup_metadata_doc.serialize()):
|
||||
raise exceptions.StorageEngineError(
|
||||
message='index operation failed',
|
||||
resp_body={'backup_id': backup_id})
|
||||
logging.info('Backup metadata indexed, backup_id: {0}'.
|
||||
format(backup_id))
|
||||
message='Index operation failed, '
|
||||
'backup ID: {0}'.format(backup_id))
|
||||
return backup_id
|
||||
|
||||
def delete_backup(self, user_id, backup_id):
|
||||
@@ -172,18 +215,17 @@ class ElasticSearchEngine(object):
|
||||
def add_client(self, user_id, doc):
|
||||
client_id = doc.get('client_id', None)
|
||||
if client_id is None:
|
||||
raise exceptions.BadDataFormat(message='Bad Data Format')
|
||||
raise exceptions.BadDataFormat(message='Missing client ID')
|
||||
existing = self.client_manager.search(user_id, client_id)
|
||||
if existing: # len(existing) > 0
|
||||
raise exceptions.DocumentExists(
|
||||
message='Client already registered ({0})'.format(client_id),
|
||||
resp_body={'client_id': client_id})
|
||||
message='Client already registered with ID {0}'.format(client_id))
|
||||
client_doc = {'client': doc,
|
||||
'user_id': user_id}
|
||||
if not self.client_manager.insert(client_doc):
|
||||
raise exceptions.StorageEngineError(
|
||||
message='index operation failed',
|
||||
resp_body={'client_id': client_id})
|
||||
message='Index operation failed, '
|
||||
'client ID: {0}'.format(client_id))
|
||||
logging.info('Client registered, client_id: {0}'.
|
||||
format(client_id))
|
||||
return client_id
|
||||
@@ -191,5 +233,42 @@ class ElasticSearchEngine(object):
|
||||
def delete_client(self, user_id, client_id):
|
||||
return self.client_manager.delete(user_id, client_id)
|
||||
|
||||
def get_action(self, user_id, action_id):
|
||||
return self.action_manager.get(user_id, action_id)
|
||||
|
||||
def search_action(self, user_id, offset=0, limit=10, search={}):
|
||||
return self.action_manager.search(user_id,
|
||||
search=search,
|
||||
offset=offset,
|
||||
limit=limit)
|
||||
|
||||
def add_action(self, user_id, doc):
|
||||
action_id = doc.get('action_id', None)
|
||||
if action_id is None:
|
||||
raise exceptions.BadDataFormat(message='Missing action ID')
|
||||
action_doc = {'action': doc,
|
||||
'user_id': user_id}
|
||||
if not self.action_manager.insert(action_doc, action_id):
|
||||
raise exceptions.StorageEngineError(
|
||||
message='Index operation failed, '
|
||||
' action ID: {0}'.format(action_id))
|
||||
logging.info('Action registered, action ID: {0}'.
|
||||
format(action_id))
|
||||
return action_id
|
||||
|
||||
def delete_action(self, user_id, action_id):
|
||||
return self.action_manager.delete(user_id, action_id)
|
||||
|
||||
def update_action(self, user_id, action_id, patch):
|
||||
if 'action_id' in patch:
|
||||
raise exceptions.BadDataFormat(
|
||||
message='Action ID modification is not allowed, '
|
||||
'action ID: {0}'.format(action_id))
|
||||
action_doc = self.action_manager.get(user_id, action_id)
|
||||
action_doc['action'].update(patch)
|
||||
version = self.action_manager.update(action_id, action_doc)
|
||||
logging.info('Action {0} updated to version {1}'.
|
||||
format(action_id, version))
|
||||
return version
|
||||
|
||||
|
||||
|
||||
@@ -170,6 +170,73 @@ fake_data_0_elasticsearch_miss = {
|
||||
"took": 1
|
||||
}
|
||||
|
||||
fake_action_0_user_id = "f4db4da085f043059441565720b217c7"
|
||||
fake_action_0_action_id = "e7181e5e-2c75-43f8-92c0-c037ae5f11e4"
|
||||
|
||||
fake_action_0_elasticsearch_not_found = {
|
||||
"_id": "e7181e5e-2c75-43f8-92c0-c037ae5f11e43",
|
||||
"_index": "freezer",
|
||||
"_type": "actions",
|
||||
"found": False
|
||||
}
|
||||
|
||||
fake_action_0 = {
|
||||
"action_id": "e7181e5e-2c75-43f8-92c0-c037ae5f11e4",
|
||||
"client_id": "mytenantid_myhostname",
|
||||
"description": "test action 4",
|
||||
"job": {
|
||||
"action": "restore",
|
||||
"backup-name": "project_mayhem_backup",
|
||||
"container": "my_backup_container",
|
||||
"max_cpu_priority": True,
|
||||
"restore-abs-path": "/home/tylerdurden/project_mayhem",
|
||||
"restore-from-host": "another_host"
|
||||
},
|
||||
"status": "pending",
|
||||
"time_created": 1431100962,
|
||||
"time_end": 0,
|
||||
"time_start": 0
|
||||
}
|
||||
|
||||
fake_action_0_doc = {
|
||||
"action": fake_action_0,
|
||||
"user_id": "f4db4da085f043059441565720b217c7"
|
||||
}
|
||||
|
||||
fake_action_0_elasticsearch_found = {
|
||||
"_id": "e7181e5e-2c75-43f8-92c0-c037ae5f11e4",
|
||||
"_index": "freezer",
|
||||
"_source": fake_action_0_doc,
|
||||
"_type": "actions",
|
||||
"_version": 1,
|
||||
"found": True
|
||||
}
|
||||
|
||||
|
||||
fake_action_1 = {
|
||||
"action_id": "1b05e367-7832-42df-850e-bc48eabee04e",
|
||||
"client_id": "mytenantid_myhostname",
|
||||
"description": "test action 4",
|
||||
"job": {
|
||||
"action": "restore",
|
||||
"backup-name": "project_mayhem_backup",
|
||||
"container": "my_backup_container",
|
||||
"max_cpu_priority": True,
|
||||
"restore-abs-path": "/home/tylerdurden/project_mayhem",
|
||||
"restore-from-host": "another_host"
|
||||
},
|
||||
"status": "pending",
|
||||
"time_created": 1431100962,
|
||||
"time_end": 0,
|
||||
"time_start": 0
|
||||
}
|
||||
|
||||
fake_action_1_doc = {
|
||||
"action": fake_action_1,
|
||||
"user_id": "f4db4da085f043059441565720b217c7"
|
||||
}
|
||||
|
||||
|
||||
fake_data_1_wrapped_backup_metadata = {
|
||||
'backup_id': 'freezer_container_alpha_important_data_backup_125235431_1',
|
||||
'user_id': 'qwerty1234',
|
||||
|
||||
239
tests/test_actions.py
Normal file
239
tests/test_actions.py
Normal file
@@ -0,0 +1,239 @@
|
||||
import unittest
|
||||
from mock import Mock, patch
|
||||
|
||||
import time
|
||||
import random
|
||||
import falcon
|
||||
|
||||
from common import *
|
||||
from freezer_api.common.exceptions import *
|
||||
|
||||
from freezer_api.api.v1 import actions as v1_actions
|
||||
|
||||
|
||||
class TestClientsCollectionResource(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.mock_db = Mock()
|
||||
self.mock_req = Mock()
|
||||
self.mock_req.get_header.return_value = fake_action_0_user_id
|
||||
self.mock_req.context = {}
|
||||
self.mock_req.status = falcon.HTTP_200
|
||||
self.resource = v1_actions.ActionsCollectionResource(self.mock_db)
|
||||
|
||||
def test_on_get_return_empty_list(self):
|
||||
self.mock_db.search_action.return_value = []
|
||||
expected_result = {'actions': []}
|
||||
self.resource.on_get(self.mock_req, self.mock_req)
|
||||
result = self.mock_req.context['result']
|
||||
self.assertEqual(result, expected_result)
|
||||
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
|
||||
|
||||
def test_on_get_return_correct_list(self):
|
||||
self.mock_db.search_action.return_value = [fake_action_0_doc, fake_action_1_doc]
|
||||
expected_result = {'actions': [fake_action_0_doc, fake_action_1_doc]}
|
||||
self.resource.on_get(self.mock_req, self.mock_req)
|
||||
result = self.mock_req.context['result']
|
||||
self.assertEqual(result, expected_result)
|
||||
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
|
||||
|
||||
def test_on_post_raises_when_missing_body(self):
|
||||
self.mock_db.add_action.return_value = fake_action_0_action_id
|
||||
self.assertRaises(BadDataFormat, self.resource.on_post, self.mock_req, self.mock_req)
|
||||
|
||||
def test_on_post_inserts_correct_data(self):
|
||||
action = fake_action_0.copy()
|
||||
self.mock_req.context['doc'] = action
|
||||
self.mock_db.add_action.return_value = fake_action_0_action_id
|
||||
expected_result = {'action_id': fake_action_0_action_id}
|
||||
self.resource.on_post(self.mock_req, self.mock_req)
|
||||
self.assertEqual(self.mock_req.status, falcon.HTTP_201)
|
||||
self.assertEqual(self.mock_req.context['result'], expected_result)
|
||||
assigned_action_id = self.mock_req.context['doc']['action_id']
|
||||
self.assertNotEqual(assigned_action_id, fake_action_0_action_id)
|
||||
|
||||
class TestClientsResource(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.mock_db = Mock()
|
||||
self.mock_req = Mock()
|
||||
self.mock_req.get_header.return_value = fake_action_0_user_id
|
||||
self.mock_req.context = {}
|
||||
self.mock_req.status = falcon.HTTP_200
|
||||
self.resource = v1_actions.ActionsResource(self.mock_db)
|
||||
|
||||
def test_create_resource(self):
|
||||
self.assertIsInstance(self.resource, v1_actions.ActionsResource)
|
||||
|
||||
def test_on_get_return_no_result_and_404_when_not_found(self):
|
||||
self.mock_db.get_action.return_value = None
|
||||
self.resource.on_get(self.mock_req, self.mock_req, fake_action_0_action_id)
|
||||
self.assertNotIn('result', self.mock_req.context)
|
||||
self.assertEqual(self.mock_req.status, falcon.HTTP_404)
|
||||
|
||||
def test_on_get_return_correct_data(self):
|
||||
self.mock_db.get_action.return_value = fake_action_0
|
||||
self.resource.on_get(self.mock_req, self.mock_req, fake_action_0_action_id)
|
||||
result = self.mock_req.context['result']
|
||||
self.assertEqual(result, fake_action_0)
|
||||
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
|
||||
|
||||
def test_on_delete_removes_proper_data(self):
|
||||
self.resource.on_delete(self.mock_req, self.mock_req, fake_action_0_action_id)
|
||||
result = self.mock_req.context['result']
|
||||
expected_result = {'action_id': fake_action_0_action_id}
|
||||
self.assertEquals(self.mock_req.status, falcon.HTTP_204)
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
@patch('freezer_api.api.v1.actions.time')
|
||||
def test_on_patch_ok_with_some_fields(self, mock_time):
|
||||
mock_time.time.return_value = int(time.time())
|
||||
new_version = random.randint(0, 99)
|
||||
|
||||
self.mock_db.update_action.return_value = new_version
|
||||
patch_doc = {'some_field': 'some_value',
|
||||
'because': 'size_matters'}
|
||||
self.mock_req.context['doc'] = patch_doc
|
||||
|
||||
expected_patch = patch_doc.copy()
|
||||
|
||||
expected_result = {'action_id': fake_action_0_action_id,
|
||||
'patch': expected_patch,
|
||||
'version': new_version}
|
||||
|
||||
self.resource.on_patch(self.mock_req, self.mock_req, fake_action_0_action_id)
|
||||
self.mock_db.update_action.assert_called_with(
|
||||
user_id=fake_action_0_user_id,
|
||||
action_id=fake_action_0_action_id,
|
||||
patch=patch_doc)
|
||||
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
|
||||
result = self.mock_req.context['result']
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
@patch('freezer_api.api.v1.actions.time')
|
||||
def test_on_patch_no_timestamp_on_unknown_status(self, mock_time):
|
||||
timestamp = int(time.time())
|
||||
mock_time.time.return_value = timestamp
|
||||
new_version = random.randint(0, 99)
|
||||
self.mock_db.update_action.return_value = new_version
|
||||
patch_doc = {'some_field': 'some_value',
|
||||
'status': 'happy'}
|
||||
self.mock_req.context['doc'] = patch_doc
|
||||
|
||||
expected_patch = patch_doc.copy()
|
||||
expected_result = {'action_id': fake_action_0_action_id,
|
||||
'patch': expected_patch,
|
||||
'version': new_version}
|
||||
|
||||
self.resource.on_patch(self.mock_req, self.mock_req, fake_action_0_action_id)
|
||||
self.mock_db.update_action.assert_called_with(
|
||||
user_id=fake_action_0_user_id,
|
||||
action_id=fake_action_0_action_id,
|
||||
patch=expected_patch)
|
||||
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
|
||||
result = self.mock_req.context['result']
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
@patch('freezer_api.api.v1.actions.time')
|
||||
def test_on_patch_adds_correct_start_time(self, mock_time):
|
||||
timestamp = int(time.time())
|
||||
mock_time.time.return_value = timestamp
|
||||
new_version = random.randint(0, 99)
|
||||
self.mock_db.update_action.return_value = new_version
|
||||
patch_doc = {'some_field': 'some_value',
|
||||
'status': 'started'}
|
||||
self.mock_req.context['doc'] = patch_doc
|
||||
|
||||
expected_patch = patch_doc.copy()
|
||||
expected_patch.update({"time_started": timestamp})
|
||||
|
||||
expected_result = {'action_id': fake_action_0_action_id,
|
||||
'patch': expected_patch,
|
||||
'version': new_version}
|
||||
|
||||
self.resource.on_patch(self.mock_req, self.mock_req, fake_action_0_action_id)
|
||||
self.mock_db.update_action.assert_called_with(
|
||||
user_id=fake_action_0_user_id,
|
||||
action_id=fake_action_0_action_id,
|
||||
patch=expected_patch)
|
||||
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
|
||||
result = self.mock_req.context['result']
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
@patch('freezer_api.api.v1.actions.time')
|
||||
def test_on_patch_adds_correct_end_time_on_abort(self, mock_time):
|
||||
timestamp = int(time.time())
|
||||
mock_time.time.return_value = timestamp
|
||||
new_version = random.randint(0, 99)
|
||||
self.mock_db.update_action.return_value = new_version
|
||||
patch_doc = {'some_field': 'some_value',
|
||||
'status': 'aborted'}
|
||||
self.mock_req.context['doc'] = patch_doc
|
||||
|
||||
expected_patch = patch_doc.copy()
|
||||
expected_patch.update({"time_ended": timestamp})
|
||||
|
||||
expected_result = {'action_id': fake_action_0_action_id,
|
||||
'patch': expected_patch,
|
||||
'version': new_version}
|
||||
|
||||
self.resource.on_patch(self.mock_req, self.mock_req, fake_action_0_action_id)
|
||||
self.mock_db.update_action.assert_called_with(
|
||||
user_id=fake_action_0_user_id,
|
||||
action_id=fake_action_0_action_id,
|
||||
patch=expected_patch)
|
||||
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
|
||||
result = self.mock_req.context['result']
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
@patch('freezer_api.api.v1.actions.time')
|
||||
def test_on_patch_adds_correct_end_time_on_success(self, mock_time):
|
||||
timestamp = int(time.time())
|
||||
mock_time.time.return_value = timestamp
|
||||
new_version = random.randint(0, 99)
|
||||
self.mock_db.update_action.return_value = new_version
|
||||
patch_doc = {'some_field': 'some_value',
|
||||
'status': 'success'}
|
||||
self.mock_req.context['doc'] = patch_doc
|
||||
|
||||
expected_patch = patch_doc.copy()
|
||||
expected_patch.update({"time_ended": timestamp})
|
||||
|
||||
expected_result = {'action_id': fake_action_0_action_id,
|
||||
'patch': expected_patch,
|
||||
'version': new_version}
|
||||
|
||||
self.resource.on_patch(self.mock_req, self.mock_req, fake_action_0_action_id)
|
||||
self.mock_db.update_action.assert_called_with(
|
||||
user_id=fake_action_0_user_id,
|
||||
action_id=fake_action_0_action_id,
|
||||
patch=expected_patch)
|
||||
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
|
||||
result = self.mock_req.context['result']
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
@patch('freezer_api.api.v1.actions.time')
|
||||
def test_on_patch_adds_correct_end_time_on_fail(self, mock_time):
|
||||
timestamp = int(time.time())
|
||||
mock_time.time.return_value = timestamp
|
||||
new_version = random.randint(0, 99)
|
||||
self.mock_db.update_action.return_value = new_version
|
||||
patch_doc = {'some_field': 'some_value',
|
||||
'status': 'fail'}
|
||||
self.mock_req.context['doc'] = patch_doc
|
||||
|
||||
expected_patch = patch_doc.copy()
|
||||
expected_patch.update({"time_ended": timestamp})
|
||||
|
||||
expected_result = {'action_id': fake_action_0_action_id,
|
||||
'patch': expected_patch,
|
||||
'version': new_version}
|
||||
|
||||
self.resource.on_patch(self.mock_req, self.mock_req, fake_action_0_action_id)
|
||||
self.mock_db.update_action.assert_called_with(
|
||||
user_id=fake_action_0_user_id,
|
||||
action_id=fake_action_0_action_id,
|
||||
patch=expected_patch)
|
||||
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
|
||||
result = self.mock_req.context['result']
|
||||
self.assertEqual(result, expected_result)
|
||||
@@ -15,7 +15,7 @@ class TestClientsCollectionResource(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.mock_db = Mock()
|
||||
self.mock_req = Mock()
|
||||
self.mock_req.get_header.return_value = {'X-User-ID': fake_data_0_user_id}
|
||||
self.mock_req.get_header.return_value = fake_data_0_user_id
|
||||
self.mock_req.context = {}
|
||||
self.mock_req.status = falcon.HTTP_200
|
||||
self.resource = v1_clients.ClientsCollectionResource(self.mock_db)
|
||||
@@ -37,7 +37,7 @@ class TestClientsCollectionResource(unittest.TestCase):
|
||||
self.assertEqual(self.mock_req.status, falcon.HTTP_200)
|
||||
|
||||
def test_on_post_raises_when_missing_body(self):
|
||||
self.mock_db.add_client.return_value = [fake_client_info_0['client_id']]
|
||||
self.mock_db.add_client.return_value = fake_client_info_0['client_id']
|
||||
self.assertRaises(BadDataFormat, self.resource.on_post, self.mock_req, self.mock_req)
|
||||
|
||||
def test_on_post_inserts_correct_data(self):
|
||||
@@ -54,7 +54,7 @@ class TestClientsResource(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.mock_db = Mock()
|
||||
self.mock_req = Mock()
|
||||
self.mock_req.get_header.return_value = {'X-User-ID': fake_data_0_user_id}
|
||||
self.mock_req.get_header.return_value = fake_data_0_user_id
|
||||
self.mock_req.context = {}
|
||||
self.mock_req.status = falcon.HTTP_200
|
||||
self.resource = v1_clients.ClientsResource(self.mock_db)
|
||||
|
||||
@@ -23,6 +23,8 @@ Hudson (tjh@cryptsoft.com).
|
||||
import unittest
|
||||
from mock import Mock, patch
|
||||
|
||||
from elasticsearch import TransportError
|
||||
|
||||
from freezer_api.storage import elastic
|
||||
from common import *
|
||||
from freezer_api.common.exceptions import *
|
||||
@@ -47,9 +49,33 @@ class TypeManager(unittest.TestCase):
|
||||
]}}}]
|
||||
self.assertEqual(q, expected_q)
|
||||
|
||||
def test_get_ok(self):
|
||||
self.mock_es.get.return_value = fake_action_0_elasticsearch_found
|
||||
res = self.type_manager.get(user_id=fake_action_0_user_id,
|
||||
doc_id=fake_action_0_action_id)
|
||||
self.assertEqual(res, fake_action_0_doc)
|
||||
|
||||
def test_get_raise_DocumentNotFound_when_doc_not_found(self):
|
||||
self.mock_es.get.side_effect = TransportError('regular test failure')
|
||||
self.assertRaises(DocumentNotFound, self.type_manager.get,
|
||||
user_id=fake_action_0_user_id,
|
||||
doc_id=fake_action_0_action_id)
|
||||
|
||||
def test_get_raise_StorageEngineError_when_db_raises(self):
|
||||
self.mock_es.get.side_effect = Exception('regular test failure')
|
||||
self.assertRaises(StorageEngineError, self.type_manager.get,
|
||||
user_id=fake_action_0_user_id,
|
||||
doc_id=fake_action_0_action_id)
|
||||
|
||||
def test_get_raises_DocumentNotFound_when_user_id_not_match(self):
|
||||
self.mock_es.get.return_value = fake_action_0_elasticsearch_found
|
||||
self.assertRaises(DocumentNotFound, self.type_manager.get,
|
||||
user_id='obluraschi',
|
||||
doc_id=fake_action_0_action_id)
|
||||
|
||||
def test_search_ok(self):
|
||||
self.mock_es.search.return_value = fake_data_0_elasticsearch_hit
|
||||
expected_q = {'filter':
|
||||
expected_q = {'query': {'filtered': {'filter':
|
||||
{'bool':
|
||||
{'must':
|
||||
[{'term': {'user_id': 'my_user_id'}},
|
||||
@@ -58,7 +84,7 @@ class TypeManager(unittest.TestCase):
|
||||
{'must':
|
||||
[{'match': {'some_field': 'some text'}},
|
||||
{'match': {'description': 'some other text'}}]}}}
|
||||
]}}}
|
||||
]}}}}}
|
||||
my_search = {'match': [{'some_field': 'some text'},
|
||||
{'description': 'some other text'}]}
|
||||
res = self.type_manager.search(user_id='my_user_id', doc_id='mydocid', search=my_search, offset=7, limit=19)
|
||||
@@ -70,30 +96,27 @@ class TypeManager(unittest.TestCase):
|
||||
self.assertRaises(StorageEngineError, self.type_manager.search, user_id='my_user_id', doc_id='mydocid')
|
||||
|
||||
def test_insert_ok(self):
|
||||
self.mock_es.index.return_value = {'created': True} # question: elasticsearch returns bool or string ?
|
||||
self.mock_es.index.return_value = {'created': True}
|
||||
test_doc = {'test_key_412': 'test_value_412'}
|
||||
res = self.type_manager.insert(doc=test_doc)
|
||||
self.assertEqual(res, True)
|
||||
self.mock_es.index.assert_called_with(index='freezer', doc_type='base_doc_type', body=test_doc)
|
||||
self.mock_es.index.assert_called_with(index='freezer', doc_type='base_doc_type', body=test_doc, id=None)
|
||||
|
||||
def test_insert_fails(self):
|
||||
self.mock_es.index.side_effect = Exception('regular test failure')
|
||||
test_doc = {'test_key_412': 'test_value_412'}
|
||||
self.assertRaises(StorageEngineError, self.type_manager.insert, doc=test_doc)
|
||||
self.mock_es.index.assert_called_with(index='freezer', doc_type='base_doc_type', body=test_doc)
|
||||
self.mock_es.index.assert_called_with(index='freezer', doc_type='base_doc_type', body=test_doc, id=None)
|
||||
|
||||
def test_delete(self):
|
||||
#self.mock_es.delete_by_query.return_value = True
|
||||
doc_id='mydocid345'
|
||||
res = self.type_manager.delete(user_id='my_user_id', doc_id=doc_id)
|
||||
self.assertEqual(res, doc_id)
|
||||
#self.mock_es.delete_by_query.assert_called_with(index='freezer', doc_type='base_doc_type', body=expected_q)
|
||||
|
||||
def test_delete_fails(self):
|
||||
self.mock_es.delete_by_query.side_effect = Exception('regular test failure')
|
||||
doc_id='mydocid345'
|
||||
self.assertRaises(StorageEngineError, self.type_manager.delete, user_id='my_user_id', doc_id=doc_id)
|
||||
#self.mock_es.delete_by_query.assert_called_with(index='freezer', doc_type='base_doc_type', body=expected_q)
|
||||
|
||||
|
||||
class TestBackupManager(unittest.TestCase):
|
||||
@@ -108,7 +131,7 @@ class TestBackupManager(unittest.TestCase):
|
||||
"time_after": 1428510506
|
||||
}
|
||||
q = self.backup_manager.get_search_query('my_user_id', 'my_doc_id', search=my_search)
|
||||
expected_q = {'filter':
|
||||
expected_q = {'query': {'filtered': {'filter':
|
||||
{'bool':
|
||||
{'must':
|
||||
[{'term': {'user_id': 'my_user_id'}},
|
||||
@@ -117,7 +140,7 @@ class TestBackupManager(unittest.TestCase):
|
||||
{'term': {'backup_id': 'my_doc_id'}},
|
||||
{'range': {'timestamp': {'gte': 1428510506}}},
|
||||
{'range': {'timestamp': {'lte': 1428510506}}}
|
||||
]}}}
|
||||
]}}}}}
|
||||
|
||||
self.assertEqual(q, expected_q)
|
||||
|
||||
@@ -132,7 +155,7 @@ class ClientTypeManager(unittest.TestCase):
|
||||
my_search = {'match': [{'some_field': 'some text'},
|
||||
{'description': 'some other text'}]}
|
||||
q = self.client_manager.get_search_query('my_user_id', 'my_doc_id', search=my_search)
|
||||
expected_q = {'filter':
|
||||
expected_q = {'query': {'filtered': {'filter':
|
||||
{'bool':
|
||||
{'must':
|
||||
[{'term': {'user_id': 'my_user_id'}},
|
||||
@@ -142,10 +165,62 @@ class ClientTypeManager(unittest.TestCase):
|
||||
[{'match': {'some_field': 'some text'}},
|
||||
{'match': {'description': 'some other text'}}]}}},
|
||||
{'term': {'client_id': 'my_doc_id'}}
|
||||
]}}}
|
||||
]}}}}}
|
||||
self.assertEqual(q, expected_q)
|
||||
|
||||
|
||||
class ActionTypeManager(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.mock_es = Mock()
|
||||
self.action_manager = elastic.ActionTypeManager(self.mock_es, 'clients')
|
||||
|
||||
def test_get_search_query(self):
|
||||
my_search = {'match': [{'some_field': 'some text'},
|
||||
{'description': 'some other text'}]}
|
||||
q = self.action_manager.get_search_query('my_user_id', 'my_doc_id', search=my_search)
|
||||
expected_q = {'query': {'filtered': {'filter':
|
||||
{'bool':
|
||||
{'must':
|
||||
[{'term': {'user_id': 'my_user_id'}},
|
||||
{'query':
|
||||
{'bool':
|
||||
{'must':
|
||||
[{'match': {'some_field': 'some text'}},
|
||||
{'match': {'description': 'some other text'}}]}}},
|
||||
{'term': {'action_id': 'my_doc_id'}}
|
||||
]}}}}}
|
||||
self.assertEqual(q, expected_q)
|
||||
|
||||
def test_update_ok(self):
|
||||
self.mock_es.update.return_value = {
|
||||
u'_id': u'd6c1e00d-b9c1-4eb3-8219-1e83c02af101',
|
||||
u'_index': u'freezer',
|
||||
u'_type': u'actions',
|
||||
u'_version': 3
|
||||
}
|
||||
res = self.action_manager.update(action_id=fake_action_0_action_id,
|
||||
action_update_doc={'status': 'sleepy'})
|
||||
self.assertEqual(res, 3)
|
||||
self.mock_es.update.assert_called_with(index=self.action_manager.index,
|
||||
doc_type=self.action_manager.doc_type,
|
||||
id=fake_action_0_action_id,
|
||||
body={"doc": {'status': 'sleepy'}})
|
||||
|
||||
def test_update_raise_DocumentNotFound_when_not_found(self):
|
||||
self.mock_es.update.side_effect = TransportError('regular test failure')
|
||||
self.assertRaises(DocumentNotFound, self.action_manager.update,
|
||||
action_id=fake_action_0_action_id,
|
||||
action_update_doc={'status': 'sleepy'})
|
||||
|
||||
def test_update_raise_StorageEngineError_when_db_raises(self):
|
||||
self.mock_es.update.side_effect = Exception('regular test failure')
|
||||
self.assertRaises(StorageEngineError, self.action_manager.update,
|
||||
action_id=fake_action_0_action_id,
|
||||
action_update_doc={'status': 'sleepy'})
|
||||
|
||||
|
||||
|
||||
class TestElasticSearchEngine_backup(unittest.TestCase):
|
||||
|
||||
@patch('freezer_api.storage.elastic.logging')
|
||||
@@ -273,7 +348,7 @@ class TestElasticSearchEngine_client(unittest.TestCase):
|
||||
self.eng = elastic.ElasticSearchEngine('http://elasticservaddr:1997')
|
||||
self.eng.client_manager = Mock()
|
||||
|
||||
def test_get_client_userid_and_backup_id_return_1elem_list_(self):
|
||||
def test_get_client_userid_and_client_id_return_1elem_list_(self):
|
||||
self.eng.client_manager.search.return_value = [fake_client_entry_0]
|
||||
my_search = {'match': [{'some_field': 'some text'},
|
||||
{'description': 'some other text'}]}
|
||||
@@ -363,3 +438,134 @@ class TestElasticSearchEngine_client(unittest.TestCase):
|
||||
self.assertRaises(StorageEngineError, self.eng.delete_client,
|
||||
user_id=fake_data_0_user_id,
|
||||
client_id=fake_client_info_0['client_id'])
|
||||
|
||||
class TestElasticSearchEngine_action(unittest.TestCase):
|
||||
|
||||
@patch('freezer_api.storage.elastic.logging')
|
||||
@patch('freezer_api.storage.elastic.elasticsearch')
|
||||
def setUp(self, mock_elasticsearch, mock_logging):
|
||||
mock_elasticsearch.Elasticsearch.return_value = Mock()
|
||||
self.eng = elastic.ElasticSearchEngine('http://elasticservaddr:1997')
|
||||
self.eng.action_manager = Mock()
|
||||
|
||||
def test_get_action_userid_and_action_id_return_doc(self):
|
||||
self.eng.action_manager.get.return_value = fake_action_0_doc
|
||||
res = self.eng.get_action(user_id=fake_client_entry_0['user_id'],
|
||||
action_id=fake_client_info_0['client_id'])
|
||||
self.assertEqual(res, fake_action_0_doc)
|
||||
self.eng.action_manager.get.assert_called_with(
|
||||
fake_client_entry_0['user_id'],
|
||||
fake_client_info_0['client_id'])
|
||||
|
||||
def test_get_action_userid_and_action_id_return_none(self):
|
||||
self.eng.action_manager.get.return_value = None
|
||||
res = self.eng.get_action(user_id=fake_client_entry_0['user_id'],
|
||||
action_id=fake_client_info_0['client_id'])
|
||||
self.assertEqual(res, None)
|
||||
self.eng.action_manager.get.assert_called_with(
|
||||
fake_client_entry_0['user_id'],
|
||||
fake_client_info_0['client_id'])
|
||||
|
||||
def test_get_action_with_userid_and_search_return_list(self):
|
||||
self.eng.action_manager.search.return_value = \
|
||||
[fake_action_0_doc, fake_action_0_doc]
|
||||
my_search = {'match': [{'some_field': 'some text'},
|
||||
{'description': 'some other text'}]}
|
||||
res = self.eng.search_action(user_id=fake_action_0_doc['user_id'],
|
||||
offset=6, limit=15,
|
||||
search=my_search)
|
||||
self.assertEqual(res, [fake_action_0_doc, fake_action_0_doc])
|
||||
self.eng.action_manager.search.assert_called_with(
|
||||
fake_action_0_doc['user_id'],
|
||||
search=my_search,
|
||||
limit=15, offset=6)
|
||||
|
||||
def test_get_action_with_userid_and_search_return_empty_list(self):
|
||||
self.eng.action_manager.search.return_value = []
|
||||
my_search = {'match': [{'some_field': 'some text'},
|
||||
{'description': 'some other text'}]}
|
||||
res = self.eng.search_action(user_id=fake_action_0_doc['user_id'],
|
||||
offset=6, limit=15,
|
||||
search=my_search)
|
||||
self.assertEqual(res, [])
|
||||
self.eng.action_manager.search.assert_called_with(
|
||||
fake_action_0_doc['user_id'],
|
||||
search=my_search,
|
||||
limit=15, offset=6)
|
||||
|
||||
def test_add_action_raises_BadDataFormat_when_data_is_malformed(self):
|
||||
doc = fake_action_0.copy()
|
||||
doc.pop('action_id')
|
||||
self.assertRaises(BadDataFormat, self.eng.add_action,
|
||||
user_id=fake_action_0_doc['user_id'],
|
||||
doc=doc)
|
||||
|
||||
def test_add_action_ok(self):
|
||||
self.eng.action_manager.insert.return_value = fake_action_0_action_id
|
||||
res = self.eng.add_action(user_id=fake_action_0_user_id,
|
||||
doc=fake_action_0)
|
||||
self.assertEqual(res, fake_action_0_action_id)
|
||||
self.eng.action_manager.insert.assert_called_with(
|
||||
{'action': fake_action_0,
|
||||
'user_id': fake_action_0_user_id},
|
||||
fake_action_0_action_id)
|
||||
|
||||
def test_add_action_raises_StorageEngineError_when_manager_insert_raises(self):
|
||||
self.eng.action_manager.get.return_value = None
|
||||
self.eng.action_manager.insert.side_effect = StorageEngineError('regular test failure')
|
||||
self.assertRaises(StorageEngineError, self.eng.add_action,
|
||||
user_id=fake_action_0_user_id,
|
||||
doc=fake_action_0)
|
||||
|
||||
def test_add_action_raises_StorageEngineError_when_manager_insert_fails_without_raise(self):
|
||||
self.eng.action_manager.get.return_value = None
|
||||
self.eng.action_manager.insert.return_value = False
|
||||
self.assertRaises(StorageEngineError, self.eng.add_action,
|
||||
user_id=fake_action_0_user_id,
|
||||
doc=fake_action_0)
|
||||
|
||||
def test_delete_action_ok(self):
|
||||
self.eng.action_manager.delete.return_value = fake_action_0['action_id']
|
||||
res = self.eng.delete_action(user_id=fake_action_0_user_id,
|
||||
action_id=fake_action_0_action_id)
|
||||
self.assertEqual(res, fake_action_0_action_id)
|
||||
|
||||
def test_delete_client_raises_StorageEngineError_when_es_delete_raises(self):
|
||||
self.eng.action_manager.delete.side_effect = StorageEngineError()
|
||||
self.assertRaises(StorageEngineError, self.eng.delete_action,
|
||||
user_id=fake_action_0_user_id,
|
||||
action_id=fake_action_0_action_id)
|
||||
|
||||
def test_update_client_raises_BadDataFormat_when_update_has_action_id(self):
|
||||
self.eng.action_manager.get.return_value = fake_action_0_doc
|
||||
patch = {'action_id': 'butterfly_caught'}
|
||||
self.assertRaises(BadDataFormat, self.eng.update_action,
|
||||
user_id=fake_action_0_user_id,
|
||||
action_id=fake_action_0_action_id,
|
||||
patch=patch)
|
||||
|
||||
def test_update_action_raises_DocumentNotFound_when_doc_not_exists(self):
|
||||
self.eng.action_manager.get.side_effect = DocumentNotFound('regular test failure')
|
||||
patch = {'some_field': 'black_milk'}
|
||||
self.assertRaises(DocumentNotFound, self.eng.update_action,
|
||||
user_id=fake_action_0_user_id,
|
||||
action_id=fake_action_0_action_id,
|
||||
patch=patch)
|
||||
|
||||
def test_update_action_raises_DocumentNotFound_when_update_raises_DocumentNotFound(self):
|
||||
self.eng.action_manager.get.return_value = fake_action_0_doc
|
||||
patch = {'some_field': 'black_milk'}
|
||||
self.eng.action_manager.update.side_effect = DocumentNotFound('regular test failure')
|
||||
self.assertRaises(DocumentNotFound, self.eng.update_action,
|
||||
user_id=fake_action_0_user_id,
|
||||
action_id=fake_action_0_action_id,
|
||||
patch=patch)
|
||||
|
||||
def test_update_action_returns_new_doc_version(self):
|
||||
self.eng.action_manager.get.return_value = fake_action_0_doc
|
||||
patch = {'some_field': 'group_four'}
|
||||
self.eng.action_manager.update.return_value = 11
|
||||
res = self.eng.update_action(user_id=fake_action_0_user_id,
|
||||
action_id=fake_action_0_action_id,
|
||||
patch=patch)
|
||||
self.assertEqual(res, 11)
|
||||
43
tests/test_exceptions.py
Normal file
43
tests/test_exceptions.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import unittest
|
||||
from mock import Mock, patch
|
||||
|
||||
import falcon
|
||||
|
||||
from common import *
|
||||
from freezer_api.common import exceptions
|
||||
|
||||
|
||||
class TestExceptions(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.ex = Mock()
|
||||
self.ex.message = 'test exception'
|
||||
self.mock_req = Mock()
|
||||
self.mock_req.context = {}
|
||||
self.exceptions = [e() for e in exceptions.exception_handlers_catalog]
|
||||
|
||||
def test_FreezerAPIException(self):
|
||||
e = exceptions.FreezerAPIException(message='testing')
|
||||
self.assertRaises(falcon.HTTPError,
|
||||
e.handle, self.ex, self.mock_req, self.mock_req, None)
|
||||
|
||||
def test_BadDataFormat(self):
|
||||
e = exceptions.BadDataFormat(message='testing')
|
||||
self.assertRaises(falcon.HTTPBadRequest,
|
||||
e.handle, self.ex, self.mock_req, self.mock_req, None)
|
||||
|
||||
def test_DocumentExists(self):
|
||||
e = exceptions.DocumentExists(message='testing')
|
||||
self.assertRaises(falcon.HTTPConflict,
|
||||
e.handle, self.ex, self.mock_req, self.mock_req, None)
|
||||
|
||||
|
||||
def test_StorageEngineError(self):
|
||||
e = exceptions.StorageEngineError(message='testing')
|
||||
self.assertRaises(falcon.HTTPInternalServerError,
|
||||
e.handle, self.ex, self.mock_req, self.mock_req, None)
|
||||
|
||||
def test_DocumentNotFound(self):
|
||||
e = exceptions.DocumentNotFound(message='testing')
|
||||
self.assertRaises(falcon.HTTPNotFound,
|
||||
e.handle, self.ex, self.mock_req, self.mock_req, None)
|
||||
Reference in New Issue
Block a user