Merge "Implement API v2 with fully support Mutli-tenancy"
This commit is contained in:
commit
3ea8f90881
|
@ -1,8 +1,11 @@
|
||||||
[app:api_versions]
|
[app:api_versions]
|
||||||
paste.app_factory = freezer_api.api.versions:api_versions
|
paste.app_factory = freezer_api.api.versions:api_versions
|
||||||
|
|
||||||
[app:freezer_app]
|
[app:appv1]
|
||||||
paste.app_factory = freezer_api.service:freezer_app_factory
|
paste.app_factory = freezer_api.service:freezer_appv1_factory
|
||||||
|
|
||||||
|
[app:appv2]
|
||||||
|
paste.app_factory = freezer_api.service:freezer_appv2_factory
|
||||||
|
|
||||||
[app:service_v1]
|
[app:service_v1]
|
||||||
use = egg:freezer-api#service_v1
|
use = egg:freezer-api#service_v1
|
||||||
|
@ -31,6 +34,7 @@ pipeline = healthcheck http_proxy_to_wsgi versionsNegotiator authtoken context b
|
||||||
pipeline = http_proxy_to_wsgi healthcheck freezer_app
|
pipeline = http_proxy_to_wsgi healthcheck freezer_app
|
||||||
|
|
||||||
[composite:backupapp]
|
[composite:backupapp]
|
||||||
paste.composite_factory = freezer_api.cmd.api:root_app_factory
|
paste.composite_factory = freezer_api.service:root_app_factory
|
||||||
/: api_versions
|
/: api_versions
|
||||||
/v1: freezer_app
|
/v1: appv1
|
||||||
|
/v2: appv2
|
||||||
|
|
|
@ -24,12 +24,12 @@ from freezer_api.api.v1 import sessions
|
||||||
|
|
||||||
|
|
||||||
VERSION = {
|
VERSION = {
|
||||||
'id': '1',
|
'id': 'v1',
|
||||||
'status': 'CURRENT',
|
'status': 'CURRENT',
|
||||||
'updated': '2015-03-23T13:45:00',
|
'updated': '2015-03-23T13:45:00',
|
||||||
'links': [
|
'links': [
|
||||||
{
|
{
|
||||||
'href': '/v1/',
|
'href': '{0}v1/',
|
||||||
'rel': 'self'
|
'rel': 'self'
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
|
@ -0,0 +1,83 @@
|
||||||
|
"""
|
||||||
|
(c) Copyright 2014,2015 Hewlett-Packard Development Company, L.P.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from freezer_api.api.v2 import actions
|
||||||
|
from freezer_api.api.v2 import backups
|
||||||
|
from freezer_api.api.v2 import clients
|
||||||
|
from freezer_api.api.v2 import homedoc
|
||||||
|
from freezer_api.api.v2 import jobs
|
||||||
|
from freezer_api.api.v2 import sessions
|
||||||
|
|
||||||
|
|
||||||
|
VERSION = {
|
||||||
|
'id': 'v2',
|
||||||
|
'status': 'DEVELOPMENT',
|
||||||
|
'updated': '2017-03-28T16:23:00',
|
||||||
|
'links': [
|
||||||
|
{
|
||||||
|
'href': '{0}v2/',
|
||||||
|
'rel': 'self'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def public_endpoints(storage_driver):
|
||||||
|
return [
|
||||||
|
('/',
|
||||||
|
homedoc.Resource()),
|
||||||
|
|
||||||
|
('/{project_id}/backups',
|
||||||
|
backups.BackupsCollectionResource(storage_driver)),
|
||||||
|
|
||||||
|
('/{project_id}/backups/{backup_id}',
|
||||||
|
backups.BackupsResource(storage_driver)),
|
||||||
|
|
||||||
|
('/{project_id}/clients',
|
||||||
|
clients.ClientsCollectionResource(storage_driver)),
|
||||||
|
|
||||||
|
('/{project_id}/clients/{client_id}',
|
||||||
|
clients.ClientsResource(storage_driver)),
|
||||||
|
|
||||||
|
('/{project_id}/jobs',
|
||||||
|
jobs.JobsCollectionResource(storage_driver)),
|
||||||
|
|
||||||
|
('/{project_id}/jobs/{job_id}',
|
||||||
|
jobs.JobsResource(storage_driver)),
|
||||||
|
|
||||||
|
('/{project_id}/jobs/{job_id}/event',
|
||||||
|
jobs.JobsEvent(storage_driver)),
|
||||||
|
|
||||||
|
('/{project_id}/actions',
|
||||||
|
actions.ActionsCollectionResource(storage_driver)),
|
||||||
|
|
||||||
|
('/{project_id}/actions/{action_id}',
|
||||||
|
actions.ActionsResource(storage_driver)),
|
||||||
|
|
||||||
|
('/{project_id}/sessions',
|
||||||
|
sessions.SessionsCollectionResource(storage_driver)),
|
||||||
|
|
||||||
|
('/{project_id}/sessions/{session_id}',
|
||||||
|
sessions.SessionsResource(storage_driver)),
|
||||||
|
|
||||||
|
('/{project_id}/sessions/{session_id}/action',
|
||||||
|
sessions.SessionsAction(storage_driver)),
|
||||||
|
|
||||||
|
('/{project_id}/sessions/{session_id}/jobs/{job_id}',
|
||||||
|
sessions.SessionsJob(storage_driver)),
|
||||||
|
|
||||||
|
]
|
|
@ -0,0 +1,111 @@
|
||||||
|
"""
|
||||||
|
Copyright 2015 Hewlett-Packard
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import falcon
|
||||||
|
|
||||||
|
from freezer_api.api.common import resource
|
||||||
|
from freezer_api.common import exceptions as freezer_api_exc
|
||||||
|
from freezer_api import policy
|
||||||
|
|
||||||
|
|
||||||
|
class ActionsCollectionResource(resource.BaseResource):
|
||||||
|
"""
|
||||||
|
Handler for endpoint: /v1/actions
|
||||||
|
"""
|
||||||
|
def __init__(self, storage_driver):
|
||||||
|
self.db = storage_driver
|
||||||
|
|
||||||
|
@policy.enforce('actions:get_all')
|
||||||
|
def on_get(self, req, resp, project_id):
|
||||||
|
# GET /v1/actions(?limit,offset) Lists actions
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
offset = req.get_param_as_int('offset', min=0) or 0
|
||||||
|
limit = req.get_param_as_int('limit', min=1) or 10
|
||||||
|
search = self.json_body(req)
|
||||||
|
obj_list = self.db.search_action(project_id=project_id,
|
||||||
|
user_id=user_id, offset=offset,
|
||||||
|
limit=limit, search=search)
|
||||||
|
resp.body = {'actions': obj_list}
|
||||||
|
|
||||||
|
@policy.enforce('actions:create')
|
||||||
|
def on_post(self, req, resp, project_id):
|
||||||
|
# POST /v1/actions Creates action entry
|
||||||
|
doc = self.json_body(req)
|
||||||
|
if not doc:
|
||||||
|
raise freezer_api_exc.BadDataFormat(
|
||||||
|
message='Missing request body')
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
action_id = self.db.add_action(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
doc=doc)
|
||||||
|
resp.status = falcon.HTTP_201
|
||||||
|
resp.body = {'action_id': action_id}
|
||||||
|
|
||||||
|
|
||||||
|
class ActionsResource(resource.BaseResource):
|
||||||
|
"""
|
||||||
|
Handler for endpoint: /v1/actions/{action_id}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, storage_driver):
|
||||||
|
self.db = storage_driver
|
||||||
|
|
||||||
|
@policy.enforce('actions:get')
|
||||||
|
def on_get(self, req, resp, project_id, action_id):
|
||||||
|
# GET /v1/actions/{action_id} retrieves the specified action
|
||||||
|
# search in body
|
||||||
|
user_id = req.get_header('X-User-ID') or ''
|
||||||
|
obj = self.db.get_action(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
action_id=action_id)
|
||||||
|
if obj:
|
||||||
|
resp.body = obj
|
||||||
|
else:
|
||||||
|
resp.status = falcon.HTTP_404
|
||||||
|
|
||||||
|
@policy.enforce('actions:delete')
|
||||||
|
def on_delete(self, req, resp, project_id, action_id):
|
||||||
|
# DELETE /v1/actions/{action_id} Deletes the specified action
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
self.db.delete_action(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
action_id=action_id)
|
||||||
|
resp.body = {'action_id': action_id}
|
||||||
|
resp.status = falcon.HTTP_204
|
||||||
|
|
||||||
|
@policy.enforce('actions:update')
|
||||||
|
def on_patch(self, req, resp, project_id, action_id):
|
||||||
|
# PATCH /v1/actions/{action_id} updates the specified action
|
||||||
|
user_id = req.get_header('X-User-ID') or ''
|
||||||
|
doc = self.json_body(req)
|
||||||
|
new_version = self.db.update_action(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
action_id=action_id,
|
||||||
|
patch_doc=doc)
|
||||||
|
resp.body = {'action_id': action_id, 'version': new_version}
|
||||||
|
|
||||||
|
@policy.enforce('actions:replace')
|
||||||
|
def on_post(self, req, resp, project_id, action_id):
|
||||||
|
# PUT /v1/actions/{job_id} creates/replaces the specified action
|
||||||
|
user_id = req.get_header('X-User-ID') or ''
|
||||||
|
doc = self.json_body(req)
|
||||||
|
new_version = self.db.replace_action(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
action_id=action_id,
|
||||||
|
doc=doc)
|
||||||
|
resp.status = falcon.HTTP_201
|
||||||
|
resp.body = {'action_id': action_id, 'version': new_version}
|
|
@ -0,0 +1,88 @@
|
||||||
|
"""
|
||||||
|
(c) Copyright 2014,2015 Hewlett-Packard Development Company, L.P.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import falcon
|
||||||
|
|
||||||
|
from freezer_api.api.common import resource
|
||||||
|
from freezer_api.common import exceptions as freezer_api_exc
|
||||||
|
from freezer_api import policy
|
||||||
|
|
||||||
|
|
||||||
|
class BackupsCollectionResource(resource.BaseResource):
|
||||||
|
"""
|
||||||
|
Handler for endpoint: /v1/backups
|
||||||
|
"""
|
||||||
|
def __init__(self, storage_driver):
|
||||||
|
self.db = storage_driver
|
||||||
|
|
||||||
|
@policy.enforce('backups:get_all')
|
||||||
|
def on_get(self, req, resp, project_id):
|
||||||
|
# GET /v1/backups(?limit,offset) Lists backups
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
offset = req.get_param_as_int('offset', min=0) or 0
|
||||||
|
limit = req.get_param_as_int('limit', min=1) or 10
|
||||||
|
search = self.json_body(req)
|
||||||
|
obj_list = self.db.search_backup(project_id=project_id,
|
||||||
|
user_id=user_id, offset=offset,
|
||||||
|
limit=limit, search=search)
|
||||||
|
resp.body = {'backups': obj_list}
|
||||||
|
|
||||||
|
@policy.enforce('backups:create')
|
||||||
|
def on_post(self, req, resp, project_id):
|
||||||
|
# POST /v1/backups Creates backup entry
|
||||||
|
doc = self.json_body(req)
|
||||||
|
if not doc:
|
||||||
|
raise freezer_api_exc.BadDataFormat(
|
||||||
|
message='Missing request body')
|
||||||
|
user_name = req.get_header('X-User-Name')
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
backup_id = self.db.add_backup(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
user_name=user_name,
|
||||||
|
doc=doc)
|
||||||
|
resp.status = falcon.HTTP_201
|
||||||
|
resp.body = {'backup_id': backup_id}
|
||||||
|
|
||||||
|
|
||||||
|
class BackupsResource(resource.BaseResource):
|
||||||
|
"""
|
||||||
|
Handler for endpoint: /v1/backups/{backup_id}
|
||||||
|
"""
|
||||||
|
def __init__(self, storage_driver):
|
||||||
|
self.db = storage_driver
|
||||||
|
|
||||||
|
@policy.enforce('backups:get')
|
||||||
|
def on_get(self, req, resp, project_id, backup_id):
|
||||||
|
# GET /v1/backups/{backup_id} Get backup details
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
obj = self.db.get_backup(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
backup_id=backup_id)
|
||||||
|
if obj:
|
||||||
|
resp.body = obj
|
||||||
|
else:
|
||||||
|
resp.status = falcon.HTTP_404
|
||||||
|
|
||||||
|
@policy.enforce('backups:delete')
|
||||||
|
def on_delete(self, req, resp, project_id, backup_id):
|
||||||
|
# DELETE /v1/backups/{backup_id} Deletes the specified backup
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
self.db.delete_backup(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
backup_id=backup_id)
|
||||||
|
resp.body = {'backup_id': backup_id}
|
||||||
|
resp.status = falcon.HTTP_204
|
|
@ -0,0 +1,88 @@
|
||||||
|
"""
|
||||||
|
(c) Copyright 2014,2015 Hewlett-Packard Development Company, L.P.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import falcon
|
||||||
|
|
||||||
|
from freezer_api.api.common import resource
|
||||||
|
from freezer_api.common import exceptions as freezer_api_exc
|
||||||
|
from freezer_api import policy
|
||||||
|
|
||||||
|
|
||||||
|
class ClientsCollectionResource(resource.BaseResource):
|
||||||
|
"""
|
||||||
|
Handler for endpoint: /v1/clients
|
||||||
|
"""
|
||||||
|
def __init__(self, storage_driver):
|
||||||
|
self.db = storage_driver
|
||||||
|
|
||||||
|
@policy.enforce('clients:get_all')
|
||||||
|
def on_get(self, req, resp, project_id):
|
||||||
|
# GET /v1/clients(?limit,offset) Lists clients
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
offset = req.get_param_as_int('offset', min=0) or 0
|
||||||
|
limit = req.get_param_as_int('limit', min=1) or 10
|
||||||
|
search = self.json_body(req)
|
||||||
|
obj_list = self.db.get_client(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
offset=offset,
|
||||||
|
limit=limit,
|
||||||
|
search=search)
|
||||||
|
resp.body = {'clients': obj_list}
|
||||||
|
|
||||||
|
@policy.enforce('clients:create')
|
||||||
|
def on_post(self, req, resp, project_id):
|
||||||
|
# POST /v1/clients Creates client entry
|
||||||
|
doc = self.json_body(req)
|
||||||
|
if not doc:
|
||||||
|
raise freezer_api_exc.BadDataFormat(
|
||||||
|
message='Missing request body')
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
client_id = self.db.add_client(
|
||||||
|
project_id=project_id, user_id=user_id, doc=doc)
|
||||||
|
resp.status = falcon.HTTP_201
|
||||||
|
resp.body = {'client_id': client_id}
|
||||||
|
|
||||||
|
|
||||||
|
class ClientsResource(resource.BaseResource):
|
||||||
|
"""
|
||||||
|
Handler for endpoint: /v1/clients/{client_id}
|
||||||
|
"""
|
||||||
|
def __init__(self, storage_driver):
|
||||||
|
self.db = storage_driver
|
||||||
|
|
||||||
|
@policy.enforce('clients:get')
|
||||||
|
def on_get(self, req, resp, project_id, client_id):
|
||||||
|
# GET /v1/clients(?limit,offset)
|
||||||
|
# search in body
|
||||||
|
user_id = req.get_header('X-User-ID') or ''
|
||||||
|
obj = self.db.get_client(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
client_id=client_id)
|
||||||
|
if obj:
|
||||||
|
resp.body = obj[0]
|
||||||
|
else:
|
||||||
|
resp.status = falcon.HTTP_404
|
||||||
|
|
||||||
|
@policy.enforce('clients:delete')
|
||||||
|
def on_delete(self, req, resp, project_id, client_id):
|
||||||
|
# DELETE /v1/clients/{client_id} Deletes the specified backup
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
self.db.delete_client(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
client_id=client_id)
|
||||||
|
resp.body = {'client_id': client_id}
|
||||||
|
resp.status = falcon.HTTP_204
|
|
@ -0,0 +1,48 @@
|
||||||
|
"""
|
||||||
|
(c) Copyright 2014,2015 Hewlett-Packard Development Company, L.P.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
http://tools.ietf.org/html/draft-nottingham-json-home-03
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
HOME_DOC = {
|
||||||
|
'resources': {
|
||||||
|
'rel/backups': {
|
||||||
|
'href-template': '/v2/{project_id}/backups/{backup_id}',
|
||||||
|
'href-vars': {
|
||||||
|
'project_id': 'param/project_id',
|
||||||
|
'backup_id': 'param/backup_id'
|
||||||
|
},
|
||||||
|
'hints': {
|
||||||
|
'allow': ['GET'],
|
||||||
|
'formats': {
|
||||||
|
'application/json': {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Resource(object):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
document = json.dumps(HOME_DOC, ensure_ascii=False, indent=4)
|
||||||
|
self.document_utf8 = document.encode('utf-8')
|
||||||
|
|
||||||
|
def on_get(self, req, resp, project_id=None):
|
||||||
|
resp.data = self.document_utf8
|
||||||
|
resp.content_type = 'application/json-home'
|
|
@ -0,0 +1,311 @@
|
||||||
|
"""
|
||||||
|
Copyright 2015 Hewlett-Packard
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import falcon
|
||||||
|
import six
|
||||||
|
|
||||||
|
from freezer_api.api.common import resource
|
||||||
|
from freezer_api.common import exceptions as freezer_api_exc
|
||||||
|
from freezer_api import policy
|
||||||
|
|
||||||
|
|
||||||
|
class JobsBaseResource(resource.BaseResource):
|
||||||
|
"""
|
||||||
|
Base class able to create actions contained in a job document
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(self, storage_driver):
|
||||||
|
self.db = storage_driver
|
||||||
|
|
||||||
|
def get_action(self, project_id, user_id, action_id):
|
||||||
|
found_action = None
|
||||||
|
try:
|
||||||
|
found_action = self.db.get_action(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
action_id=action_id)
|
||||||
|
except freezer_api_exc.DocumentNotFound:
|
||||||
|
pass
|
||||||
|
return found_action
|
||||||
|
|
||||||
|
def update_actions_in_job(self, project_id, user_id, job_doc):
|
||||||
|
"""
|
||||||
|
Looks into a job document and creates actions in the db.
|
||||||
|
Actions are given an action_id if they don't have one yet
|
||||||
|
"""
|
||||||
|
job = Job(job_doc)
|
||||||
|
for action in job.actions():
|
||||||
|
if action.action_id:
|
||||||
|
# action has action_id, let's see if it's in the db
|
||||||
|
found_action_doc = self.get_action(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
action_id=action.action_id)
|
||||||
|
if found_action_doc:
|
||||||
|
if action == Action(found_action_doc):
|
||||||
|
# action already present in the db, do nothing
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
# action is different, generate new action_id
|
||||||
|
action.action_id = ''
|
||||||
|
# action not found in db, leave current action_id
|
||||||
|
self.db.add_action(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
doc=action.doc)
|
||||||
|
|
||||||
|
|
||||||
|
class JobsCollectionResource(JobsBaseResource):
|
||||||
|
"""
|
||||||
|
Handler for endpoint: /v1/jobs
|
||||||
|
"""
|
||||||
|
|
||||||
|
@policy.enforce('jobs:get_all')
|
||||||
|
def on_get(self, req, resp, project_id):
|
||||||
|
# GET /v1/jobs(?limit,offset) Lists jobs
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
offset = req.get_param_as_int('offset', min=0) or 0
|
||||||
|
limit = req.get_param_as_int('limit', min=1) or 10
|
||||||
|
search = self.json_body(req)
|
||||||
|
obj_list = self.db.search_job(project_id=project_id,
|
||||||
|
user_id=user_id, offset=offset,
|
||||||
|
limit=limit, search=search)
|
||||||
|
resp.body = {'jobs': obj_list}
|
||||||
|
|
||||||
|
@policy.enforce('jobs:create')
|
||||||
|
def on_post(self, req, resp, project_id):
|
||||||
|
# POST /v1/jobs Creates job entry
|
||||||
|
try:
|
||||||
|
job = Job(self.json_body(req))
|
||||||
|
except KeyError:
|
||||||
|
raise freezer_api_exc.BadDataFormat(
|
||||||
|
message='Missing request body')
|
||||||
|
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
self.update_actions_in_job(project_id, user_id, job.doc)
|
||||||
|
job_id = self.db.add_job(project_id=project_id,
|
||||||
|
user_id=user_id, doc=job.doc)
|
||||||
|
resp.status = falcon.HTTP_201
|
||||||
|
resp.body = {'job_id': job_id}
|
||||||
|
|
||||||
|
|
||||||
|
class JobsResource(JobsBaseResource):
|
||||||
|
"""
|
||||||
|
Handler for endpoint: /v1/jobs/{job_id}
|
||||||
|
"""
|
||||||
|
|
||||||
|
@policy.enforce('jobs:get')
|
||||||
|
def on_get(self, req, resp, project_id, job_id):
|
||||||
|
# GET /v1/jobs/{job_id} retrieves the specified job
|
||||||
|
# search in body
|
||||||
|
user_id = req.get_header('X-User-ID') or ''
|
||||||
|
obj = self.db.get_job(project_id=project_id,
|
||||||
|
user_id=user_id, job_id=job_id)
|
||||||
|
if obj:
|
||||||
|
resp.body = obj
|
||||||
|
else:
|
||||||
|
resp.status = falcon.HTTP_404
|
||||||
|
|
||||||
|
@policy.enforce('jobs:delete')
|
||||||
|
def on_delete(self, req, resp, project_id, job_id):
|
||||||
|
# DELETE /v1/jobs/{job_id} Deletes the specified job
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
self.db.delete_job(project_id=project_id,
|
||||||
|
user_id=user_id, job_id=job_id)
|
||||||
|
resp.body = {'job_id': job_id}
|
||||||
|
resp.status = falcon.HTTP_204
|
||||||
|
|
||||||
|
@policy.enforce('jobs:update')
|
||||||
|
def on_patch(self, req, resp, project_id, job_id):
|
||||||
|
# PATCH /v1/jobs/{job_id} updates the specified job
|
||||||
|
user_id = req.get_header('X-User-ID') or ''
|
||||||
|
job = Job(self.json_body(req))
|
||||||
|
self.update_actions_in_job(project_id, user_id, job.doc)
|
||||||
|
new_version = self.db.update_job(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
job_id=job_id,
|
||||||
|
patch_doc=job.doc)
|
||||||
|
resp.body = {'job_id': job_id, 'version': new_version}
|
||||||
|
|
||||||
|
@policy.enforce('jobs:create')
|
||||||
|
def on_post(self, req, resp, project_id, job_id):
|
||||||
|
# PUT /v1/jobs/{job_id} creates/replaces the specified job
|
||||||
|
user_id = req.get_header('X-User-ID') or ''
|
||||||
|
job = Job(self.json_body(req))
|
||||||
|
self.update_actions_in_job(project_id, user_id, job.doc)
|
||||||
|
new_version = self.db.replace_job(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
job_id=job_id,
|
||||||
|
doc=job.doc)
|
||||||
|
resp.status = falcon.HTTP_201
|
||||||
|
resp.body = {'job_id': job_id, 'version': new_version}
|
||||||
|
|
||||||
|
|
||||||
|
class JobsEvent(resource.BaseResource):
|
||||||
|
"""
|
||||||
|
Handler for endpoint: /v1/jobs/{job_id}/event
|
||||||
|
|
||||||
|
Actions are passed in the body, for example:
|
||||||
|
{
|
||||||
|
"start": null
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
def __init__(self, storage_driver):
|
||||||
|
self.db = storage_driver
|
||||||
|
|
||||||
|
@policy.enforce('jobs:event:create')
|
||||||
|
def on_post(self, req, resp, project_id, job_id):
|
||||||
|
# POST /v1/jobs/{job_id}/event
|
||||||
|
# requests an event on the specified job
|
||||||
|
|
||||||
|
user_id = req.get_header('X-User-ID') or ''
|
||||||
|
doc = self.json_body(req)
|
||||||
|
|
||||||
|
try:
|
||||||
|
event, params = next(six.iteritems(doc))
|
||||||
|
except Exception:
|
||||||
|
raise freezer_api_exc.BadDataFormat("Bad event request format")
|
||||||
|
|
||||||
|
job_doc = self.db.get_job(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
job_id=job_id)
|
||||||
|
job = Job(job_doc)
|
||||||
|
result = job.execute_event(event, params)
|
||||||
|
|
||||||
|
if job.need_update:
|
||||||
|
self.db.replace_job(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
job_id=job_id,
|
||||||
|
doc=job.doc)
|
||||||
|
resp.status = falcon.HTTP_202
|
||||||
|
resp.body = {'result': result}
|
||||||
|
|
||||||
|
|
||||||
|
class Action(object):
|
||||||
|
def __init__(self, doc):
|
||||||
|
self.doc = doc
|
||||||
|
|
||||||
|
@property
|
||||||
|
def action_id(self):
|
||||||
|
return self.doc.get('action_id', '')
|
||||||
|
|
||||||
|
@action_id.setter
|
||||||
|
def action_id(self, value):
|
||||||
|
self.doc['action_id'] = value
|
||||||
|
|
||||||
|
def create_new_action_id(self):
|
||||||
|
self.doc['action_id'] = uuid.uuid4().hex
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
# return self.doc == other.doc
|
||||||
|
dont_care_keys = ['_version', 'user_id']
|
||||||
|
lh = self.doc.get('freezer_action', None)
|
||||||
|
rh = other.doc.get('freezer_action', None)
|
||||||
|
diffkeys = [k for k in lh if lh[k] != rh.get(k)]
|
||||||
|
diffkeys += [k for k in rh if rh[k] != lh.get(k)]
|
||||||
|
for k in diffkeys:
|
||||||
|
if k not in dont_care_keys:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self.__eq__(other))
|
||||||
|
|
||||||
|
|
||||||
|
class Job(object):
|
||||||
|
"""
|
||||||
|
A class with knowledge of the inner working of a job data structure.
|
||||||
|
|
||||||
|
Responibilities:
|
||||||
|
- manage the events that can be sent to a job. The result of handling
|
||||||
|
an event is a modification of the information contained in the
|
||||||
|
job document
|
||||||
|
- extract actions from a job (usage example: to be used to create actions)
|
||||||
|
"""
|
||||||
|
def __init__(self, doc):
|
||||||
|
self.doc = doc
|
||||||
|
if self.doc.get("action_defaults") is not None:
|
||||||
|
self.expand_default_properties()
|
||||||
|
self.event_result = ''
|
||||||
|
self.need_update = False
|
||||||
|
if 'job_schedule' not in doc:
|
||||||
|
doc['job_schedule'] = {}
|
||||||
|
self.job_schedule = doc['job_schedule']
|
||||||
|
self.event_handlers = {'start': self.start,
|
||||||
|
'stop': self.stop,
|
||||||
|
'abort': self.abort}
|
||||||
|
|
||||||
|
def execute_event(self, event, params):
|
||||||
|
handler = self.event_handlers.get(event, None)
|
||||||
|
if not handler:
|
||||||
|
raise freezer_api_exc.BadDataFormat("Bad Action Method")
|
||||||
|
try:
|
||||||
|
self.event_result = handler(params)
|
||||||
|
except freezer_api_exc.BadDataFormat:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.FreezerAPIException(e)
|
||||||
|
return self.event_result
|
||||||
|
|
||||||
|
@property
|
||||||
|
def job_status(self):
|
||||||
|
return self.job_schedule.get('status', '')
|
||||||
|
|
||||||
|
@job_status.setter
|
||||||
|
def job_status(self, value):
|
||||||
|
self.job_schedule['status'] = value
|
||||||
|
|
||||||
|
def start(self, params=None):
|
||||||
|
if self.job_schedule.get('event') != 'start':
|
||||||
|
self.job_schedule['event'] = 'start'
|
||||||
|
self.need_update = True
|
||||||
|
return 'success'
|
||||||
|
return 'start already requested'
|
||||||
|
|
||||||
|
def stop(self, params=None):
|
||||||
|
if self.job_schedule.get('event') != 'stop':
|
||||||
|
self.job_schedule['event'] = 'stop'
|
||||||
|
self.need_update = True
|
||||||
|
return 'success'
|
||||||
|
return 'stop already requested'
|
||||||
|
|
||||||
|
def abort(self, params=None):
|
||||||
|
if self.job_schedule.get('event') != 'abort':
|
||||||
|
self.job_schedule['event'] = 'abort'
|
||||||
|
self.need_update = True
|
||||||
|
return 'success'
|
||||||
|
return 'abort already requested'
|
||||||
|
|
||||||
|
def actions(self):
|
||||||
|
"""
|
||||||
|
Generator to iterate over the actions contained in a job
|
||||||
|
|
||||||
|
:return: yields Action objects
|
||||||
|
"""
|
||||||
|
for action_doc in self.doc.get('job_actions', []):
|
||||||
|
yield Action(action_doc)
|
||||||
|
|
||||||
|
def expand_default_properties(self):
|
||||||
|
action_defaults = self.doc.pop("action_defaults")
|
||||||
|
if isinstance(action_defaults, dict):
|
||||||
|
for key, val in six.iteritems(action_defaults):
|
||||||
|
for action in self.doc.get("job_actions"):
|
||||||
|
if action["freezer_action"].get(key) is None:
|
||||||
|
action["freezer_action"][key] = val
|
||||||
|
else:
|
||||||
|
raise freezer_api_exc.BadDataFormat(
|
||||||
|
message="action_defaults shouldbe a dictionary"
|
||||||
|
)
|
|
@ -0,0 +1,375 @@
|
||||||
|
"""
|
||||||
|
Copyright 2015 Hewlett-Packard
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
import falcon
|
||||||
|
import six
|
||||||
|
|
||||||
|
from freezer_api.api.common import resource
|
||||||
|
from freezer_api.common import exceptions as freezer_api_exc
|
||||||
|
from freezer_api import policy
|
||||||
|
|
||||||
|
|
||||||
|
class SessionsCollectionResource(resource.BaseResource):
|
||||||
|
"""
|
||||||
|
Handler for endpoint: /v1/sessions
|
||||||
|
"""
|
||||||
|
def __init__(self, storage_driver):
|
||||||
|
self.db = storage_driver
|
||||||
|
|
||||||
|
@policy.enforce('sessions:get_all')
|
||||||
|
def on_get(self, req, resp, project_id):
|
||||||
|
# GET /v1/sessions(?limit,offset) Lists sessions
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
offset = req.get_param_as_int('offset', min=0) or 0
|
||||||
|
limit = req.get_param_as_int('limit', min=1) or 10
|
||||||
|
search = self.json_body(req)
|
||||||
|
obj_list = self.db.search_session(project_id=project_id,
|
||||||
|
user_id=user_id, offset=offset,
|
||||||
|
limit=limit, search=search)
|
||||||
|
resp.body = {'sessions': obj_list}
|
||||||
|
|
||||||
|
@policy.enforce('sessions:create')
|
||||||
|
def on_post(self, req, resp, project_id):
|
||||||
|
# POST /v1/sessions Creates session entry
|
||||||
|
doc = self.json_body(req)
|
||||||
|
if not doc:
|
||||||
|
raise freezer_api_exc.BadDataFormat(
|
||||||
|
message='Missing request body')
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
session_id = self.db.add_session(project_id=project_id,
|
||||||
|
user_id=user_id, doc=doc)
|
||||||
|
resp.status = falcon.HTTP_201
|
||||||
|
resp.body = {'session_id': session_id}
|
||||||
|
|
||||||
|
|
||||||
|
class SessionsResource(resource.BaseResource):
|
||||||
|
"""
|
||||||
|
Handler for endpoint: /v1/sessions/{session_id}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, storage_driver):
|
||||||
|
self.db = storage_driver
|
||||||
|
|
||||||
|
@policy.enforce('sessions:get')
|
||||||
|
def on_get(self, req, resp, project_id, session_id):
|
||||||
|
# GET /v1/sessions/{session_id} retrieves the specified session
|
||||||
|
# search in body
|
||||||
|
user_id = req.get_header('X-User-ID') or ''
|
||||||
|
obj = self.db.get_session(project_id=project_id,
|
||||||
|
user_id=user_id, session_id=session_id)
|
||||||
|
if obj:
|
||||||
|
resp.body = obj
|
||||||
|
else:
|
||||||
|
resp.status = falcon.HTTP_404
|
||||||
|
|
||||||
|
@policy.enforce('sessions:delete')
|
||||||
|
def on_delete(self, req, resp, project_id, session_id):
|
||||||
|
# DELETE /v1/sessions/{session_id} Deletes the specified session
|
||||||
|
user_id = req.get_header('X-User-ID')
|
||||||
|
self.db.delete_session(project_id=project_id,
|
||||||
|
user_id=user_id, session_id=session_id)
|
||||||
|
resp.body = {'session_id': session_id}
|
||||||
|
resp.status = falcon.HTTP_204
|
||||||
|
|
||||||
|
@policy.enforce('sessions:update')
|
||||||
|
def on_patch(self, req, resp, project_id, session_id):
|
||||||
|
# PATCH /v1/sessions/{session_id} updates the specified session
|
||||||
|
user_id = req.get_header('X-User-ID') or ''
|
||||||
|
doc = self.json_body(req)
|
||||||
|
new_version = self.db.update_session(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
session_id=session_id,
|
||||||
|
patch_doc=doc)
|
||||||
|
resp.body = {'session_id': session_id, 'version': new_version}
|
||||||
|
|
||||||
|
@policy.enforce('sessions:replace')
|
||||||
|
def on_post(self, req, resp, project_id, session_id):
|
||||||
|
# PUT /v1/sessions/{session_id} creates/replaces the specified session
|
||||||
|
user_id = req.get_header('X-User-ID') or ''
|
||||||
|
doc = self.json_body(req)
|
||||||
|
if not doc:
|
||||||
|
raise freezer_api_exc.BadDataFormat(
|
||||||
|
message='Missing request body')
|
||||||
|
new_version = self.db.replace_session(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
session_id=session_id,
|
||||||
|
doc=doc)
|
||||||
|
resp.status = falcon.HTTP_201
|
||||||
|
resp.body = {'session_id': session_id, 'version': new_version}
|
||||||
|
|
||||||
|
|
||||||
|
class SessionsAction(resource.BaseResource):
|
||||||
|
"""
|
||||||
|
Handler for endpoint: /v1/sessions/{session_id}/action
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, storage_driver):
|
||||||
|
self.db = storage_driver
|
||||||
|
|
||||||
|
@policy.enforce('sessions:action:create')
|
||||||
|
def on_post(self, req, resp, project_id, session_id):
|
||||||
|
# POST /v1/sessions/{session_id}/action
|
||||||
|
# executes an action on the specified session
|
||||||
|
|
||||||
|
user_id = req.get_header('X-User-ID') or ''
|
||||||
|
doc = self.json_body(req)
|
||||||
|
|
||||||
|
try:
|
||||||
|
action, params = next(six.iteritems(doc))
|
||||||
|
except Exception:
|
||||||
|
raise freezer_api_exc.BadDataFormat("Bad action request format")
|
||||||
|
|
||||||
|
session_doc = self.db.get_session(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
session_id=session_id)
|
||||||
|
session = Session(session_doc)
|
||||||
|
session.execute_action(action, params)
|
||||||
|
|
||||||
|
if session.need_update:
|
||||||
|
self.db.update_session(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
session_id=session_id,
|
||||||
|
patch_doc=session.doc)
|
||||||
|
resp.status = falcon.HTTP_202
|
||||||
|
resp.body = {'result': session.action_result,
|
||||||
|
'session_tag': session.session_tag}
|
||||||
|
|
||||||
|
|
||||||
|
class Session(resource.BaseResource):
|
||||||
|
"""
|
||||||
|
A class to manage the actions that can be taken upon a
|
||||||
|
Session data structure.
|
||||||
|
It modifies information contained in its document
|
||||||
|
in accordance to the requested action
|
||||||
|
"""
|
||||||
|
def __init__(self, doc):
|
||||||
|
self.doc = doc
|
||||||
|
self.action_result = ''
|
||||||
|
self.need_update = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def session_tag(self):
|
||||||
|
return int(self.doc.get('session_tag', 0))
|
||||||
|
|
||||||
|
@session_tag.setter
|
||||||
|
def session_tag(self, value):
|
||||||
|
self.doc['session_tag'] = int(value)
|
||||||
|
|
||||||
|
def execute_action(self, action, params):
|
||||||
|
if action == 'start':
|
||||||
|
try:
|
||||||
|
self.start(params['job_id'], params['current_tag'])
|
||||||
|
except freezer_api_exc.BadDataFormat:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.FreezerAPIException(e)
|
||||||
|
elif action == 'end':
|
||||||
|
try:
|
||||||
|
self.end(params['job_id'], params['result'])
|
||||||
|
except freezer_api_exc.BadDataFormat:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.FreezerAPIException(e)
|
||||||
|
else:
|
||||||
|
raise freezer_api_exc.MethodNotImplemented("Bad Action Method")
|
||||||
|
|
||||||
|
def end(self, job_id, result):
|
||||||
|
"""
|
||||||
|
Apply the 'end' action to the session object
|
||||||
|
If the request can be accepted it modifies the relevant fields
|
||||||
|
and sets the need_update member to notify that the stored
|
||||||
|
document needs to be updated
|
||||||
|
"""
|
||||||
|
now = int(time.time())
|
||||||
|
self.set_job_end(job_id, result, now)
|
||||||
|
new_result = self.get_job_overall_result()
|
||||||
|
if self.doc.get('status', '') != 'completed':
|
||||||
|
if new_result in ['fail', 'success']:
|
||||||
|
self.doc['time_end'] = now
|
||||||
|
self.doc['result'] = new_result
|
||||||
|
self.doc['status'] = 'completed'
|
||||||
|
self.action_result = 'success'
|
||||||
|
self.need_update = True
|
||||||
|
|
||||||
|
def start(self, job_id, job_tag):
|
||||||
|
"""
|
||||||
|
Apply the 'start' action to the session object
|
||||||
|
If the request can be accepted it modifies the relevant fields
|
||||||
|
and sets the need_update member to notify that the stored
|
||||||
|
document needs to be updated
|
||||||
|
"""
|
||||||
|
job_tag = int(job_tag)
|
||||||
|
self.session_tag = int(self.session_tag)
|
||||||
|
now = int(time.time())
|
||||||
|
time_since_last_start = now - self.doc.get('time_start', 0)
|
||||||
|
|
||||||
|
if job_tag > self.session_tag:
|
||||||
|
raise freezer_api_exc.BadDataFormat(
|
||||||
|
'requested tag value too high. Session Tag: {0} '
|
||||||
|
'Job Tag: {1}'.format(self.session_tag, job_tag))
|
||||||
|
|
||||||
|
if time_since_last_start <= self.doc.get('hold_off', 60):
|
||||||
|
# session has been started not so long ago
|
||||||
|
# tag increments are not allowed during hold_off
|
||||||
|
if job_tag < self.session_tag:
|
||||||
|
self.action_result = 'success'
|
||||||
|
self.set_job_start(job_id, now)
|
||||||
|
self.need_update = True
|
||||||
|
else:
|
||||||
|
self.action_result = 'hold-off'
|
||||||
|
self.need_update = False
|
||||||
|
elif time_since_last_start > self.doc.get('hold_off', 60):
|
||||||
|
# out of hold_off window:
|
||||||
|
# - ok to trigger new action start (job_tag == session_tag)
|
||||||
|
# if job_tag < session_tag client is probably out-of-sync
|
||||||
|
if self.session_tag == job_tag:
|
||||||
|
self.session_tag += 1
|
||||||
|
self.doc['time_start'] = now
|
||||||
|
self.doc['status'] = 'running'
|
||||||
|
self.doc['result'] = ''
|
||||||
|
self.action_result = 'success'
|
||||||
|
self.set_job_start(job_id, now)
|
||||||
|
self.need_update = True
|
||||||
|
else:
|
||||||
|
self.action_result = 'out-of-sync'
|
||||||
|
self.need_update = False
|
||||||
|
|
||||||
|
def get_job_overall_result(self):
|
||||||
|
"""
|
||||||
|
check the status of all the jobs and return the overall session result
|
||||||
|
"""
|
||||||
|
for job in self.doc['jobs'].values():
|
||||||
|
if job['status'] != 'completed':
|
||||||
|
return 'running'
|
||||||
|
if job['result'] != 'success':
|
||||||
|
return 'fail'
|
||||||
|
return 'success'
|
||||||
|
|
||||||
|
def set_job_end(self, job_id, result, timestamp):
|
||||||
|
try:
|
||||||
|
job = self.doc['jobs'][job_id]
|
||||||
|
except Exception:
|
||||||
|
raise freezer_api_exc.BadDataFormat('job_id not found in session')
|
||||||
|
job['status'] = 'completed'
|
||||||
|
job['result'] = result
|
||||||
|
job['time_ended'] = timestamp
|
||||||
|
|
||||||
|
def set_job_start(self, job_id, timestamp):
|
||||||
|
try:
|
||||||
|
job = self.doc['jobs'][job_id]
|
||||||
|
except Exception:
|
||||||
|
raise freezer_api_exc.BadDataFormat('job_id not found in session')
|
||||||
|
job['status'] = 'running'
|
||||||
|
job['result'] = ''
|
||||||
|
job['time_started'] = timestamp
|
||||||
|
|
||||||
|
|
||||||
|
class SessionsJob(resource.BaseResource):
|
||||||
|
"""
|
||||||
|
Handler for endpoint: /v1/sessions/{session_id}/jobs/{job_id}
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, storage_driver):
|
||||||
|
self.db = storage_driver
|
||||||
|
|
||||||
|
@policy.enforce('sessions:job:add')
|
||||||
|
def on_put(self, req, resp, project_id, session_id, job_id):
|
||||||
|
"""
|
||||||
|
add a job to a session
|
||||||
|
|
||||||
|
:param req:
|
||||||
|
:param resp:
|
||||||
|
:param session_id:
|
||||||
|
:param job_id:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
|
||||||
|
user_id = req.get_header('X-User-ID', '')
|
||||||
|
|
||||||
|
# --- update session object
|
||||||
|
job_doc = self.db.get_job(project_id=project_id,
|
||||||
|
user_id=user_id, job_id=job_id)
|
||||||
|
|
||||||
|
job_schedule = job_doc.get('job_schedule', {})
|
||||||
|
session_update_doc = {
|
||||||
|
'jobs': {
|
||||||
|
job_id: {
|
||||||
|
'client_id': job_doc['client_id'],
|
||||||
|
'status': job_schedule.get('status', ''),
|
||||||
|
'result': job_schedule.get('result', ''),
|
||||||
|
'time_started': job_schedule.get('time_started', ''),
|
||||||
|
'time_ended': job_schedule.get('time_ended', '')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.db.update_session(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
session_id=session_id,
|
||||||
|
patch_doc=session_update_doc)
|
||||||
|
# --- update job object
|
||||||
|
session_doc = self.db.get_session(user_id=user_id,
|
||||||
|
session_id=session_id)
|
||||||
|
job_update_doc = {
|
||||||
|
'session_id': session_id,
|
||||||
|
'session_tag': session_doc['session_tag'],
|
||||||
|
'job_schedule': session_doc['schedule']
|
||||||
|
}
|
||||||
|
self.db.update_job(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
job_id=job_id,
|
||||||
|
patch_doc=job_update_doc)
|
||||||
|
resp.status = falcon.HTTP_204
|
||||||
|
|
||||||
|
@policy.enforce('sessions:job:remove')
|
||||||
|
def on_delete(self, req, resp, project_id, session_id, job_id):
|
||||||
|
"""
|
||||||
|
remove a job from the session
|
||||||
|
|
||||||
|
:param req:
|
||||||
|
:param resp:
|
||||||
|
:param session_id:
|
||||||
|
:param job_id:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
|
||||||
|
user_id = req.get_header('X-User-ID') or ''
|
||||||
|
|
||||||
|
session_doc = self.db.get_session(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
session_id=session_id)
|
||||||
|
session_doc['jobs'].pop(job_id, None)
|
||||||
|
|
||||||
|
# when replacing, db might raise a VersionConflictEngineException
|
||||||
|
self.db.replace_session(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
session_id=session_id,
|
||||||
|
doc=session_doc)
|
||||||
|
job_update_doc = {
|
||||||
|
'session_id': '',
|
||||||
|
'session_tag': 0,
|
||||||
|
'job_schedule': {
|
||||||
|
'event': 'stop'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.db.update_job(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
job_id=job_id,
|
||||||
|
patch_doc=job_update_doc)
|
||||||
|
resp.status = falcon.HTTP_204
|
|
@ -22,12 +22,15 @@ from oslo_log import log
|
||||||
|
|
||||||
from freezer_api.api.common import middleware
|
from freezer_api.api.common import middleware
|
||||||
from freezer_api.api import v1
|
from freezer_api.api import v1
|
||||||
|
from freezer_api.api import v2
|
||||||
|
|
||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
VERSIONS = {
|
VERSIONS = {
|
||||||
'versions': [
|
'versions': [
|
||||||
v1.VERSION
|
v1.VERSION,
|
||||||
|
v2.VERSION
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,11 +49,17 @@ def api_versions(conf=None):
|
||||||
|
|
||||||
class Resource(object):
|
class Resource(object):
|
||||||
|
|
||||||
def __init__(self):
|
def _build_versions(self, host_url):
|
||||||
self.versions = json.dumps(VERSIONS, ensure_ascii=False)
|
updated_versions = {'versions': []}
|
||||||
|
for version in VERSIONS['versions']:
|
||||||
|
version['links'][0]['href'] = version['links'][0]['href'].format(
|
||||||
|
host_url
|
||||||
|
)
|
||||||
|
updated_versions['versions'].append(version)
|
||||||
|
return json.dumps(updated_versions, ensure_ascii=False)
|
||||||
|
|
||||||
def on_get(self, req, resp):
|
def on_get(self, req, resp):
|
||||||
resp.data = self.versions
|
resp.data = self._build_versions(req.url)
|
||||||
|
|
||||||
resp.status = falcon.HTTP_300
|
resp.status = falcon.HTTP_300
|
||||||
|
|
||||||
|
|
|
@ -25,11 +25,11 @@ from oslo_config import cfg
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
from paste import deploy
|
from paste import deploy
|
||||||
from paste import httpserver
|
from paste import httpserver
|
||||||
from paste import urlmap
|
|
||||||
|
|
||||||
from freezer_api.api.common import middleware
|
from freezer_api.api.common import middleware
|
||||||
from freezer_api.api.common import utils
|
from freezer_api.api.common import utils
|
||||||
from freezer_api.api import v1
|
from freezer_api.api import v1
|
||||||
|
from freezer_api.api import v2
|
||||||
from freezer_api.common import _i18n
|
from freezer_api.common import _i18n
|
||||||
from freezer_api.common import config
|
from freezer_api.common import config
|
||||||
from freezer_api.common import exceptions as freezer_api_exc
|
from freezer_api.common import exceptions as freezer_api_exc
|
||||||
|
@ -49,7 +49,9 @@ def configure_app(app, db=None):
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
if not db:
|
if not db:
|
||||||
db = driver.get_db()
|
db = driver.get_db(
|
||||||
|
driver='freezer_api.storage.elastic.ElasticSearchEngine'
|
||||||
|
)
|
||||||
|
|
||||||
# setup freezer policy
|
# setup freezer policy
|
||||||
policy.setup_policy(CONF)
|
policy.setup_policy(CONF)
|
||||||
|
@ -113,11 +115,36 @@ def build_app_v1():
|
||||||
return app
|
return app
|
||||||
|
|
||||||
|
|
||||||
def root_app_factory(loader, global_conf, **local_conf):
|
def build_app_v2():
|
||||||
"""Allows freezer to launch multiple applications at a time.
|
"""Building routes and forming the root freezer-api app
|
||||||
It will allow freezer to manage multiple versions.
|
This uses the 'middleware' named argument to specify middleware for falcon
|
||||||
|
instead of the 'before' and 'after' hooks that were removed after 0.3.0
|
||||||
|
(both approaches were available for versions 0.2.0 - 0.3.0)
|
||||||
|
:return: falcon WSGI app
|
||||||
"""
|
"""
|
||||||
return urlmap.urlmap_factory(loader, global_conf, **local_conf)
|
# injecting FreezerContext & hooks
|
||||||
|
middleware_list = [utils.FuncMiddleware(hook) for hook in
|
||||||
|
utils.before_hooks()]
|
||||||
|
middleware_list.append(middleware.RequireJSON())
|
||||||
|
middleware_list.append(middleware.JSONTranslator())
|
||||||
|
|
||||||
|
app = falcon.API(middleware=middleware_list)
|
||||||
|
db = driver.get_db()
|
||||||
|
|
||||||
|
# setup freezer policy
|
||||||
|
policy.setup_policy(CONF)
|
||||||
|
|
||||||
|
for exception_class in freezer_api_exc.exception_handlers_catalog:
|
||||||
|
app.add_error_handler(exception_class, exception_class.handle)
|
||||||
|
|
||||||
|
endpoint_catalog = [
|
||||||
|
('', v2.public_endpoints(db))
|
||||||
|
]
|
||||||
|
for version_path, endpoints in endpoint_catalog:
|
||||||
|
for route, resource in endpoints:
|
||||||
|
app.add_route(version_path + route, resource)
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
@ -41,6 +41,10 @@ clients_mapping = {
|
||||||
"index": "not_analyzed",
|
"index": "not_analyzed",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"project_id": {
|
||||||
|
"index": "not_analyzed",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
"uuid": {
|
"uuid": {
|
||||||
"index": "not_analyzed",
|
"index": "not_analyzed",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
|
@ -217,6 +221,10 @@ backups_mapping = {
|
||||||
"job_id": {
|
"job_id": {
|
||||||
"index": "not_analyzed",
|
"index": "not_analyzed",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
|
},
|
||||||
|
"project_id": {
|
||||||
|
"index": "not_analyzed",
|
||||||
|
"type": "string"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -239,6 +247,10 @@ jobs_mapping = {
|
||||||
"index": "not_analyzed",
|
"index": "not_analyzed",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"project_id": {
|
||||||
|
"index": "not_analyzed",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
"freezer_action": {
|
"freezer_action": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"action": {
|
"action": {
|
||||||
|
@ -400,6 +412,10 @@ jobs_mapping = {
|
||||||
"user_id": {
|
"user_id": {
|
||||||
"index": "not_analyzed",
|
"index": "not_analyzed",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
|
},
|
||||||
|
"project_id": {
|
||||||
|
"index": "not_analyzed",
|
||||||
|
"type": "string"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,241 @@
|
||||||
|
"""
|
||||||
|
(c) Copyright 2014,2015 Hewlett-Packard Development Company, L.P.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import jsonschema
|
||||||
|
|
||||||
|
from freezer_api.common import exceptions as freezer_api_exc
|
||||||
|
from freezer_api.common import json_schemas
|
||||||
|
|
||||||
|
|
||||||
|
class BackupMetadataDoc(object):
|
||||||
|
"""
|
||||||
|
Wraps a backup_metadata dict and adds some utility methods,
|
||||||
|
and fields
|
||||||
|
"""
|
||||||
|
def __init__(self, project_id='', user_id='', user_name='', data={}):
|
||||||
|
self.project_id = project_id
|
||||||
|
self.user_id = user_id
|
||||||
|
self.user_name = user_name
|
||||||
|
self.backup_id = uuid.uuid4().hex
|
||||||
|
self.data = data
|
||||||
|
|
||||||
|
def is_valid(self):
|
||||||
|
try:
|
||||||
|
assert (self.project_id is not '')
|
||||||
|
assert (self.backup_id is not '')
|
||||||
|
assert (self.user_id is not '')
|
||||||
|
assert (self.data['container'] is not '')
|
||||||
|
assert (self.data['hostname'] is not '')
|
||||||
|
assert (self.data['backup_name'] is not '')
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def serialize(self):
|
||||||
|
return {'backup_id': self.backup_id,
|
||||||
|
'user_id': self.user_id,
|
||||||
|
'project_id': self.project_id,
|
||||||
|
'user_name': self.user_name,
|
||||||
|
'backup_metadata': self.data}
|
||||||
|
|
||||||
|
|
||||||
|
class JobDoc(object):
|
||||||
|
job_doc_validator = jsonschema.Draft4Validator(
|
||||||
|
schema=json_schemas.job_schema)
|
||||||
|
job_patch_validator = jsonschema.Draft4Validator(
|
||||||
|
schema=json_schemas.job_patch_schema)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate(doc):
|
||||||
|
try:
|
||||||
|
JobDoc.job_doc_validator.validate(doc)
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.BadDataFormat(str(e).splitlines()[0])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_patch(doc):
|
||||||
|
try:
|
||||||
|
JobDoc.job_patch_validator.validate(doc)
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.BadDataFormat(str(e).splitlines()[0])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_patch(doc):
|
||||||
|
# changes in user_id or job_id are not allowed
|
||||||
|
doc.pop('user_id', None)
|
||||||
|
doc.pop('job_id', None)
|
||||||
|
JobDoc.validate_patch(doc)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create(doc, project_id, user_id):
|
||||||
|
job_schedule = doc.get('job_schedule', {})
|
||||||
|
job_schedule.update({
|
||||||
|
'time_created': int(time.time()),
|
||||||
|
'time_started': -1,
|
||||||
|
'time_ended': -1
|
||||||
|
})
|
||||||
|
doc.update({
|
||||||
|
'user_id': user_id,
|
||||||
|
'project_id': project_id,
|
||||||
|
'job_id': uuid.uuid4().hex,
|
||||||
|
'job_schedule': job_schedule
|
||||||
|
})
|
||||||
|
JobDoc.validate(doc)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def update(doc, project_id, user_id, job_id):
|
||||||
|
doc.update({
|
||||||
|
'user_id': user_id,
|
||||||
|
'job_id': job_id,
|
||||||
|
'project_id': project_id
|
||||||
|
})
|
||||||
|
JobDoc.validate(doc)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
|
||||||
|
class ActionDoc(object):
|
||||||
|
action_doc_validator = jsonschema.Draft4Validator(
|
||||||
|
schema=json_schemas.action_schema)
|
||||||
|
action_patch_validator = jsonschema.Draft4Validator(
|
||||||
|
schema=json_schemas.action_patch_schema)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate(doc):
|
||||||
|
try:
|
||||||
|
ActionDoc.action_doc_validator.validate(doc)
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.BadDataFormat(str(e).splitlines()[0])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_patch(doc):
|
||||||
|
try:
|
||||||
|
ActionDoc.action_patch_validator.validate(doc)
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.BadDataFormat(str(e).splitlines()[0])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_patch(doc):
|
||||||
|
# changes in user_id or action_id are not allowed
|
||||||
|
doc.pop('user_id', None)
|
||||||
|
doc.pop('action_id', None)
|
||||||
|
ActionDoc.validate_patch(doc)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create(doc, user_id, project_id):
|
||||||
|
action_id = doc.get('action_id', uuid.uuid4().hex)
|
||||||
|
doc.update({
|
||||||
|
'user_id': user_id,
|
||||||
|
'project_id': project_id,
|
||||||
|
'action_id': action_id,
|
||||||
|
})
|
||||||
|
ActionDoc.validate(doc)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def update(doc, user_id, action_id, project_id):
|
||||||
|
doc.update({
|
||||||
|
'user_id': user_id,
|
||||||
|
'project_id': project_id,
|
||||||
|
'action_id': action_id,
|
||||||
|
})
|
||||||
|
ActionDoc.validate(doc)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
|
||||||
|
class SessionDoc(object):
|
||||||
|
session_doc_validator = jsonschema.Draft4Validator(
|
||||||
|
schema=json_schemas.session_schema)
|
||||||
|
session_patch_validator = jsonschema.Draft4Validator(
|
||||||
|
schema=json_schemas.session_patch_schema)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate(doc):
|
||||||
|
try:
|
||||||
|
SessionDoc.session_doc_validator.validate(doc)
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.BadDataFormat(str(e).splitlines()[0])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_patch(doc):
|
||||||
|
try:
|
||||||
|
SessionDoc.session_patch_validator.validate(doc)
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.BadDataFormat(str(e).splitlines()[0])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_patch(doc):
|
||||||
|
# changes in user_id or session_id are not allowed
|
||||||
|
doc.pop('user_id', None)
|
||||||
|
doc.pop('session_id', None)
|
||||||
|
SessionDoc.validate_patch(doc)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create(doc, user_id, hold_off=30, project_id=None):
|
||||||
|
doc.update({
|
||||||
|
'user_id': user_id,
|
||||||
|
'project_id': project_id,
|
||||||
|
'session_id': uuid.uuid4().hex,
|
||||||
|
'session_tag': doc.get('session_tag', 0),
|
||||||
|
'status': 'active',
|
||||||
|
'last_start': '',
|
||||||
|
'jobs': []
|
||||||
|
})
|
||||||
|
doc['hold_off'] = doc.get('hold_off', hold_off)
|
||||||
|
SessionDoc.validate(doc)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def update(doc, user_id, session_id, project_id):
|
||||||
|
doc.update({
|
||||||
|
'user_id': user_id,
|
||||||
|
'project_id': project_id,
|
||||||
|
'session_id': session_id,
|
||||||
|
})
|
||||||
|
SessionDoc.validate(doc)
|
||||||
|
return doc
|
||||||
|
|
||||||
|
|
||||||
|
class ClientDoc(object):
|
||||||
|
client_doc_validator = jsonschema.Draft4Validator(
|
||||||
|
schema=json_schemas.client_schema)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate(doc):
|
||||||
|
try:
|
||||||
|
ClientDoc.client_doc_validator.validate(doc)
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.BadDataFormat(str(e).splitlines()[0])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create(doc, project_id, user_id):
|
||||||
|
if 'uuid' not in doc:
|
||||||
|
doc.update({
|
||||||
|
'uuid': uuid.uuid4().hex
|
||||||
|
})
|
||||||
|
doc = {
|
||||||
|
'client': doc,
|
||||||
|
'user_id': user_id,
|
||||||
|
'project_id': project_id
|
||||||
|
}
|
||||||
|
ClientDoc.validate(doc)
|
||||||
|
return doc
|
|
@ -218,6 +218,11 @@ job_schema = {
|
||||||
"pattern": "^[\w-]+$",
|
"pattern": "^[\w-]+$",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"project_id": {
|
||||||
|
"id": "project_id",
|
||||||
|
"pattern": "^[\w-]+$",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
"description": {
|
"description": {
|
||||||
"id": "description",
|
"id": "description",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
|
@ -309,6 +314,11 @@ job_patch_schema = {
|
||||||
"pattern": "^[\w-]+$",
|
"pattern": "^[\w-]+$",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"project_id": {
|
||||||
|
"id": "project_id",
|
||||||
|
"pattern": "^[\w-]+$",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
"description": {
|
"description": {
|
||||||
"id": "description",
|
"id": "description",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
|
@ -336,6 +346,11 @@ additional_action_properties = {
|
||||||
"pattern": "^[\w-]+$",
|
"pattern": "^[\w-]+$",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"project_id": {
|
||||||
|
"id": "project_id",
|
||||||
|
"pattern": "^[\w-]+$",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -384,6 +399,11 @@ session_schema = {
|
||||||
"pattern": "^[\w-]+$",
|
"pattern": "^[\w-]+$",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"project_id": {
|
||||||
|
"id": "project_id",
|
||||||
|
"pattern": "^[\w-]+$",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
"session_tag": {
|
"session_tag": {
|
||||||
"id": "session_tag",
|
"id": "session_tag",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
|
@ -425,6 +445,11 @@ session_patch_schema = {
|
||||||
"pattern": "^[\w-]+$",
|
"pattern": "^[\w-]+$",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"project_id": {
|
||||||
|
"id": "project_id",
|
||||||
|
"pattern": "^[\w-]+$",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
"session_tag": {
|
"session_tag": {
|
||||||
"id": "session_tag",
|
"id": "session_tag",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
|
@ -479,6 +504,11 @@ client_schema = {
|
||||||
"id": "user_id",
|
"id": "user_id",
|
||||||
"pattern": "^[\w-]+$",
|
"pattern": "^[\w-]+$",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
|
},
|
||||||
|
"project_id": {
|
||||||
|
"id": "project_id",
|
||||||
|
"pattern": "^[\w-]+$",
|
||||||
|
"type": "string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": True,
|
"additionalProperties": True,
|
||||||
|
|
|
@ -100,7 +100,7 @@ class JobDoc(object):
|
||||||
def update(doc, user_id, job_id):
|
def update(doc, user_id, job_id):
|
||||||
doc.update({
|
doc.update({
|
||||||
'user_id': user_id,
|
'user_id': user_id,
|
||||||
'job_id': job_id,
|
'job_id': job_id
|
||||||
})
|
})
|
||||||
JobDoc.validate(doc)
|
JobDoc.validate(doc)
|
||||||
return doc
|
return doc
|
||||||
|
|
|
@ -18,6 +18,7 @@ import sys
|
||||||
|
|
||||||
import falcon
|
import falcon
|
||||||
from paste import deploy
|
from paste import deploy
|
||||||
|
from paste import urlmap
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
|
|
||||||
from freezer_api.cmd import api
|
from freezer_api.cmd import api
|
||||||
|
@ -29,7 +30,14 @@ from freezer_api.common import config
|
||||||
FALCON_MINVERSION_MIDDLEWARE = pkg_resources.parse_version('0.2.0b1')
|
FALCON_MINVERSION_MIDDLEWARE = pkg_resources.parse_version('0.2.0b1')
|
||||||
|
|
||||||
|
|
||||||
def freezer_app_factory(global_conf, **local_conf):
|
def root_app_factory(loader, global_conf, **local_conf):
|
||||||
|
"""Allows freezer to launch multiple applications at a time.
|
||||||
|
It will allow freezer to manage multiple versions.
|
||||||
|
"""
|
||||||
|
return urlmap.urlmap_factory(loader, global_conf, **local_conf)
|
||||||
|
|
||||||
|
|
||||||
|
def freezer_appv1_factory(global_conf, **local_conf):
|
||||||
current_version = pkg_resources.parse_version(
|
current_version = pkg_resources.parse_version(
|
||||||
falcon.__version__ if hasattr(falcon,
|
falcon.__version__ if hasattr(falcon,
|
||||||
'__version__') else falcon.version)
|
'__version__') else falcon.version)
|
||||||
|
@ -42,6 +50,10 @@ def freezer_app_factory(global_conf, **local_conf):
|
||||||
return api.build_app_v1()
|
return api.build_app_v1()
|
||||||
|
|
||||||
|
|
||||||
|
def freezer_appv2_factory(global_conf, **local_conf):
|
||||||
|
return api.build_app_v2()
|
||||||
|
|
||||||
|
|
||||||
def initialize_app(conf=None, name='main'):
|
def initialize_app(conf=None, name='main'):
|
||||||
""" initializing app for paste to deploy it """
|
""" initializing app for paste to deploy it """
|
||||||
|
|
||||||
|
|
|
@ -48,13 +48,16 @@ def register_storage_opts():
|
||||||
CONF.register_opts(_OPTS, group=opt_group)
|
CONF.register_opts(_OPTS, group=opt_group)
|
||||||
|
|
||||||
|
|
||||||
def get_db():
|
def get_db(driver=None):
|
||||||
"""Automatically loads the database driver to be used."""
|
"""Automatically loads the database driver to be used."""
|
||||||
storage = CONF.get('storage')
|
storage = CONF.get('storage')
|
||||||
|
if not driver:
|
||||||
|
driver = storage['driver']
|
||||||
driver_instance = importutils.import_object(
|
driver_instance = importutils.import_object(
|
||||||
storage['driver'],
|
driver,
|
||||||
backend=storage['backend']
|
backend=storage['backend']
|
||||||
)
|
)
|
||||||
|
|
||||||
return driver_instance
|
return driver_instance
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,640 @@
|
||||||
|
"""
|
||||||
|
Copyright 2015 Hewlett-Packard
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import elasticsearch
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from freezer_api.common import elasticv2_utils as utils
|
||||||
|
from freezer_api.common import exceptions as freezer_api_exc
|
||||||
|
|
||||||
|
from oslo_config import cfg
|
||||||
|
from oslo_log import log
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class TypeManagerV2(object):
|
||||||
|
def __init__(self, es, doc_type, index):
|
||||||
|
self.es = es
|
||||||
|
self.index = index
|
||||||
|
self.doc_type = doc_type
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_base_search_filter(project_id, user_id=None, search=None):
|
||||||
|
search = search or {}
|
||||||
|
project_id_filter = {"term": {"project_id": project_id}}
|
||||||
|
base_filter = [project_id_filter]
|
||||||
|
if user_id:
|
||||||
|
user_id_filter = {"term": {"user_id": user_id}}
|
||||||
|
base_filter.append(user_id_filter)
|
||||||
|
|
||||||
|
match_list = [{"match": m} for m in search.get('match', [])]
|
||||||
|
match_not_list = [{"match": m} for m in search.get('match_not', [])]
|
||||||
|
base_filter.append({"query": {"bool": {"must": match_list,
|
||||||
|
"must_not": match_not_list}}})
|
||||||
|
return base_filter
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_search_query(project_id, doc_id, user_id=None, search=None):
|
||||||
|
search = search or {}
|
||||||
|
try:
|
||||||
|
base_filter = TypeManagerV2.get_base_search_filter(
|
||||||
|
project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
search=search
|
||||||
|
)
|
||||||
|
query_filter = {"filter": {"bool": {"must": base_filter}}}
|
||||||
|
return {'query': {'filtered': query_filter}}
|
||||||
|
except Exception:
|
||||||
|
raise freezer_api_exc.StorageEngineError(
|
||||||
|
message='search operation failed: query not valid')
|
||||||
|
|
||||||
|
def get(self, project_id, doc_id, user_id=None):
|
||||||
|
try:
|
||||||
|
res = self.es.get(index=self.index,
|
||||||
|
doc_type=self.doc_type,
|
||||||
|
id=doc_id)
|
||||||
|
doc = res['_source']
|
||||||
|
except elasticsearch.TransportError:
|
||||||
|
raise freezer_api_exc.DocumentNotFound(
|
||||||
|
message='No document found with ID:{0}'.format(doc_id))
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.StorageEngineError(
|
||||||
|
message='Get operation failed: {}'.format(e))
|
||||||
|
if doc['project_id'] != project_id:
|
||||||
|
raise freezer_api_exc.AccessForbidden("You are not allowed to"
|
||||||
|
" access")
|
||||||
|
if user_id:
|
||||||
|
if doc['user_id'] != user_id:
|
||||||
|
raise freezer_api_exc.AccessForbidden(
|
||||||
|
"Document access forbidden"
|
||||||
|
)
|
||||||
|
if '_version' in res:
|
||||||
|
doc['_version'] = res['_version']
|
||||||
|
return doc
|
||||||
|
|
||||||
|
def search(self, project_id, user_id=None, doc_id=None, search=None,
|
||||||
|
offset=0, limit=10):
|
||||||
|
search = search or {}
|
||||||
|
query_dsl = self.get_search_query(
|
||||||
|
project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
doc_id=doc_id,
|
||||||
|
search=search
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
res = self.es.search(index=self.index, doc_type=self.doc_type,
|
||||||
|
size=limit, from_=offset, body=query_dsl)
|
||||||
|
except elasticsearch.ConnectionError:
|
||||||
|
raise freezer_api_exc.StorageEngineError(
|
||||||
|
message='unable to connect to db server')
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.StorageEngineError(
|
||||||
|
message='search operation failed: {0}'.format(e))
|
||||||
|
hit_list = res['hits']['hits']
|
||||||
|
return [x['_source'] for x in hit_list]
|
||||||
|
|
||||||
|
def insert(self, doc, doc_id=None):
|
||||||
|
try:
|
||||||
|
# remove _version from the document
|
||||||
|
doc.pop('_version', None)
|
||||||
|
res = self.es.index(index=self.index, doc_type=self.doc_type,
|
||||||
|
body=doc, id=doc_id)
|
||||||
|
created = res['created']
|
||||||
|
version = res['_version']
|
||||||
|
self.es.indices.refresh(index=self.index)
|
||||||
|
except elasticsearch.TransportError as e:
|
||||||
|
if e.status_code == 409:
|
||||||
|
raise freezer_api_exc.DocumentExists(message=e.error)
|
||||||
|
raise freezer_api_exc.StorageEngineError(
|
||||||
|
message='index operation failed {0}'.format(e))
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.StorageEngineError(
|
||||||
|
message='index operation failed {0}'.format(e))
|
||||||
|
return created, version
|
||||||
|
|
||||||
|
def delete(self, project_id, doc_id, user_id=None):
|
||||||
|
query_dsl = self.get_search_query(
|
||||||
|
project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
doc_id=doc_id
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
results = self.es.search(index=self.index,
|
||||||
|
doc_type=self.doc_type,
|
||||||
|
body=query_dsl)
|
||||||
|
results = results['hits']['hits']
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.StorageEngineError(
|
||||||
|
message='Scan operation failed: {0}'.format(e))
|
||||||
|
id = None
|
||||||
|
for res in results:
|
||||||
|
id = res.get('_id')
|
||||||
|
try:
|
||||||
|
self.es.delete(index=self.index, doc_type=self.doc_type, id=id)
|
||||||
|
self.es.indices.refresh(index=self.index)
|
||||||
|
except Exception as e:
|
||||||
|
raise freezer_api_exc.StorageEngineError(
|
||||||
|
message='Delete operation failed: {0}'.format(e))
|
||||||
|
return id
|
||||||
|
|
||||||
|
|
||||||
|
class BackupTypeManagerV2(TypeManagerV2):
|
||||||
|
def __init__(self, es, doc_type, index='freezer'):
|
||||||
|
TypeManagerV2.__init__(self, es, doc_type, index=index)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_search_query(project_id, doc_id, user_id=None, search=None):
|
||||||
|
search = search or {}
|
||||||
|
base_filter = TypeManagerV2.get_base_search_filter(
|
||||||
|
project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
search=search
|
||||||
|
)
|
||||||
|
if doc_id is not None:
|
||||||
|
base_filter.append({"term": {"backup_id": doc_id}})
|
||||||
|
|
||||||
|
if 'time_after' in search:
|
||||||
|
base_filter.append(
|
||||||
|
{"range": {"timestamp": {"gte": int(search['time_after'])}}}
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'time_before' in search:
|
||||||
|
base_filter.append(
|
||||||
|
{"range": {"timestamp": {"lte": int(search['time_before'])}}}
|
||||||
|
)
|
||||||
|
query_filter = {"filter": {"bool": {"must": base_filter}}}
|
||||||
|
return {'query': {'filtered': query_filter}}
|
||||||
|
|
||||||
|
|
||||||
|
class ClientTypeManagerV2(TypeManagerV2):
|
||||||
|
def __init__(self, es, doc_type, index='freezer'):
|
||||||
|
TypeManagerV2.__init__(self, es, doc_type, index=index)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_search_query(project_id, doc_id, user_id=None, search=None):
|
||||||
|
search = search or {}
|
||||||
|
base_filter = TypeManagerV2.get_base_search_filter(
|
||||||
|
project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
search=search
|
||||||
|
)
|
||||||
|
if doc_id is not None:
|
||||||
|
base_filter.append({"term": {"client.client_id": doc_id}})
|
||||||
|
query_filter = {"filter": {"bool": {"must": base_filter}}}
|
||||||
|
return {'query': {'filtered': query_filter}}
|
||||||
|
|
||||||
|
|
||||||
|
class JobTypeManagerV2(TypeManagerV2):
|
||||||
|
def __init__(self, es, doc_type, index='freezer'):
|
||||||
|
TypeManagerV2.__init__(self, es, doc_type, index=index)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_search_query(project_id, doc_id, user_id=None, search=None):
|
||||||
|
search = search or {}
|
||||||
|
base_filter = TypeManagerV2.get_base_search_filter(
|
||||||
|
project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
search=search
|
||||||
|
)
|
||||||
|
if doc_id is not None:
|
||||||
|
base_filter.append({"term": {"job_id": doc_id}})
|
||||||
|
query_filter = {"filter": {"bool": {"must": base_filter}}}
|
||||||
|
return {'query': {'filtered': query_filter}}
|
||||||
|
|
||||||
|
def update(self, job_id, job_update_doc):
|
||||||
|
# remove _version from the document
|
||||||
|
job_update_doc.pop('_version', 0)
|
||||||
|
update_doc = {"doc": job_update_doc}
|
||||||
|
try:
|
||||||
|
res = self.es.update(index=self.index, doc_type=self.doc_type,
|
||||||
|
id=job_id, body=update_doc)
|
||||||
|
version = res['_version']
|
||||||
|
self.es.indices.refresh(index=self.index)
|
||||||
|
except elasticsearch.TransportError as e:
|
||||||
|
if e.status_code == 409:
|
||||||
|
raise freezer_api_exc.DocumentExists(message=e.error)
|
||||||
|
raise freezer_api_exc.DocumentNotFound(
|
||||||
|
message='Unable to find job to update with id'
|
||||||
|
' {0} {1}'.format(job_id, e))
|
||||||
|
except Exception:
|
||||||
|
raise freezer_api_exc.StorageEngineError(
|
||||||
|
message='Unable to update job with id {0}'.format(job_id))
|
||||||
|
return version
|
||||||
|
|
||||||
|
|
||||||
|
class ActionTypeManagerV2(TypeManagerV2):
|
||||||
|
def __init__(self, es, doc_type, index='freezer'):
|
||||||
|
TypeManagerV2.__init__(self, es, doc_type, index=index)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_search_query(project_id, doc_id, user_id=None, search=None):
|
||||||
|
search = search or {}
|
||||||
|
base_filter = TypeManagerV2.get_base_search_filter(
|
||||||
|
project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
search=search
|
||||||
|
)
|
||||||
|
if doc_id is not None:
|
||||||
|
base_filter.append({"term": {"action_id": doc_id}})
|
||||||
|
query_filter = {"filter": {"bool": {"must": base_filter}}}
|
||||||
|
return {'query': {'filtered': query_filter}}
|
||||||
|
|
||||||
|
def update(self, action_id, action_update_doc):
|
||||||
|
# remove _version from the document
|
||||||
|
action_update_doc.pop('_version', 0)
|
||||||
|
update_doc = {"doc": action_update_doc}
|
||||||
|
try:
|
||||||
|
res = self.es.update(index=self.index, doc_type=self.doc_type,
|
||||||
|
id=action_id, body=update_doc)
|
||||||
|
version = res['_version']
|
||||||
|
self.es.indices.refresh(index=self.index)
|
||||||
|
except elasticsearch.TransportError as e:
|
||||||
|
if e.status_code == 409:
|
||||||
|
raise freezer_api_exc.DocumentExists(message=e.error)
|
||||||
|
raise freezer_api_exc.DocumentNotFound(
|
||||||
|
message='Unable to find action to update with id'
|
||||||
|
' {0}'.format(action_id))
|
||||||
|
except Exception:
|
||||||
|
raise freezer_api_exc.StorageEngineError(
|
||||||
|
message='Unable to update action with id'
|
||||||
|
' {0}'.format(action_id))
|
||||||
|
return version
|
||||||
|
|
||||||
|
|
||||||
|
class SessionTypeManagerV2(TypeManagerV2):
|
||||||
|
def __init__(self, es, doc_type, index='freezer'):
|
||||||
|
TypeManagerV2.__init__(self, es, doc_type, index=index)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_search_query(project_id, doc_id, user_id=None, search=None):
|
||||||
|
search = search or {}
|
||||||
|
base_filter = TypeManagerV2.get_base_search_filter(
|
||||||
|
project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
search=search
|
||||||
|
)
|
||||||
|
if doc_id is not None:
|
||||||
|
base_filter.append({"term": {"session_id": doc_id}})
|
||||||
|
query_filter = {"filter": {"bool": {"must": base_filter}}}
|
||||||
|
return {'query': {'filtered': query_filter}}
|
||||||
|
|
||||||
|
def update(self, session_id, session_update_doc):
|
||||||
|
# remove _version from the document
|
||||||
|
session_update_doc.pop('_version', 0)
|
||||||
|
update_doc = {"doc": session_update_doc}
|
||||||
|
try:
|
||||||
|
res = self.es.update(index=self.index, doc_type=self.doc_type,
|
||||||
|
id=session_id, body=update_doc)
|
||||||
|
version = res['_version']
|
||||||
|
self.es.indices.refresh(index=self.index)
|
||||||
|
except elasticsearch.TransportError as e:
|
||||||
|
if e.status_code == 409:
|
||||||
|
raise freezer_api_exc.DocumentExists(message=e.error)
|
||||||
|
raise freezer_api_exc.DocumentNotFound(
|
||||||
|
message='Unable to update session ID: {0}, '
|
||||||
|
'Error: {1}'.format(session_id, e))
|
||||||
|
except Exception:
|
||||||
|
raise freezer_api_exc.StorageEngineError(
|
||||||
|
message='Unable to update session with id'
|
||||||
|
' {0}'.format(session_id))
|
||||||
|
return version
|
||||||
|
|
||||||
|
|
||||||
|
class ElasticSearchEngineV2(object):
|
||||||
|
|
||||||
|
_OPTS = [
|
||||||
|
cfg.StrOpt('hosts',
|
||||||
|
default='http://localhost:9200',
|
||||||
|
deprecated_name='endpoint',
|
||||||
|
help='specify the storage hosts'),
|
||||||
|
cfg.StrOpt('index',
|
||||||
|
default='freezer',
|
||||||
|
help='specify the name of the elasticsearch index'),
|
||||||
|
cfg.IntOpt('timeout',
|
||||||
|
default=60,
|
||||||
|
help='specify the connection timeout'),
|
||||||
|
cfg.IntOpt('retries',
|
||||||
|
default=20,
|
||||||
|
help='number of retries to allow before raising and error'),
|
||||||
|
cfg.BoolOpt('use_ssl',
|
||||||
|
default=False,
|
||||||
|
help='explicitly turn on SSL'),
|
||||||
|
cfg.BoolOpt('verify_certs',
|
||||||
|
default=False,
|
||||||
|
help='turn on SSL certs verification'),
|
||||||
|
cfg.StrOpt('ca_certs',
|
||||||
|
help='path to CA certs on disk'),
|
||||||
|
cfg.IntOpt('number_of_replicas',
|
||||||
|
default=0,
|
||||||
|
help='Number of replicas for elk cluster. Default is 0. '
|
||||||
|
'Use 0 for no replicas. This should be set to (number '
|
||||||
|
'of node in the ES cluter -1).')
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self, backend):
|
||||||
|
"""backend: name of the section in the config file to load
|
||||||
|
elasticsearch opts
|
||||||
|
"""
|
||||||
|
self.index = None
|
||||||
|
self.es = None
|
||||||
|
self.backup_manager = None
|
||||||
|
self.client_manager = None
|
||||||
|
self.job_manager = None
|
||||||
|
self.action_manager = None
|
||||||
|
self.session_manager = None
|
||||||
|
# register elasticsearch opts
|
||||||
|
CONF.register_opts(self._OPTS, group=backend)
|
||||||
|
self.conf = dict(CONF.get(backend))
|
||||||
|
self.conf['hosts'] = self.conf['hosts'].split(',')
|
||||||
|
self.backend = backend
|
||||||
|
self._validate_opts()
|
||||||
|
self.init(**self.conf)
|
||||||
|
|
||||||
|
def _validate_opts(self):
|
||||||
|
if not 'hosts' or 'endpoint' in self.conf.keys():
|
||||||
|
raise ValueError("Couldn't find hosts in {0} section".format(
|
||||||
|
self.backend)
|
||||||
|
)
|
||||||
|
if self.conf.get('ca_certs'):
|
||||||
|
if not os.path.isfile(self.conf.get('ca_certs')):
|
||||||
|
raise Exception("File not found: ca_certs file ({0}) not "
|
||||||
|
"found".format(self.conf.get('ca_certs')))
|
||||||
|
|
||||||
|
def get_opts(self):
|
||||||
|
return self._OPTS
|
||||||
|
|
||||||
|
def init(self, index='freezer', **kwargs):
|
||||||
|
self.index = index
|
||||||
|
self.es = elasticsearch.Elasticsearch(**kwargs)
|
||||||
|
logging.info('Storage backend: Elasticsearch at'
|
||||||
|
' {0}'.format(kwargs['hosts']))
|
||||||
|
self.backup_manager = BackupTypeManagerV2(self.es, 'backups')
|
||||||
|
self.client_manager = ClientTypeManagerV2(self.es, 'clients')
|
||||||
|
self.job_manager = JobTypeManagerV2(self.es, 'jobs')
|
||||||
|
self.action_manager = ActionTypeManagerV2(self.es, 'actions')
|
||||||
|
self.session_manager = SessionTypeManagerV2(self.es, 'sessions')
|
||||||
|
|
||||||
|
def get_backup(self, user_id, backup_id, project_id=None):
|
||||||
|
return self.backup_manager.get(
|
||||||
|
project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
doc_id=backup_id
|
||||||
|
)
|
||||||
|
|
||||||
|
def search_backup(self, user_id, offset=0, limit=10, search=None,
|
||||||
|
project_id=None):
|
||||||
|
search = search or {}
|
||||||
|
return self.backup_manager.search(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
search=search,
|
||||||
|
offset=offset,
|
||||||
|
limit=limit)
|
||||||
|
|
||||||
|
def add_backup(self, project_id, user_id, user_name, doc):
|
||||||
|
# raises if data is malformed (HTTP_400) or already present (HTTP_409)
|
||||||
|
backup_metadata_doc = utils.BackupMetadataDoc(
|
||||||
|
project_id,
|
||||||
|
user_id,
|
||||||
|
user_name,
|
||||||
|
doc
|
||||||
|
)
|
||||||
|
if not backup_metadata_doc.is_valid():
|
||||||
|
raise freezer_api_exc.BadDataFormat(
|
||||||
|
message='Bad Data Format')
|
||||||
|
backup_id = backup_metadata_doc.backup_id
|
||||||
|
self.backup_manager.insert(backup_metadata_doc.serialize(), backup_id)
|
||||||
|
return backup_id
|
||||||
|
|
||||||
|
def delete_backup(self, project_id, user_id, backup_id):
|
||||||
|
return self.backup_manager.delete(project_id, user_id, backup_id)
|
||||||
|
|
||||||
|
def get_client(self, project_id, user_id, client_id=None,
|
||||||
|
offset=0, limit=10, search=None):
|
||||||
|
search = search or {}
|
||||||
|
return self.client_manager.search(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
doc_id=client_id,
|
||||||
|
search=search,
|
||||||
|
offset=offset,
|
||||||
|
limit=limit)
|
||||||
|
|
||||||
|
def add_client(self, project_id, user_id, doc):
|
||||||
|
client_doc = utils.ClientDoc.create(doc, project_id, user_id)
|
||||||
|
client_id = client_doc['client']['client_id']
|
||||||
|
existing = self.client_manager.search(
|
||||||
|
project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
doc_id=client_id
|
||||||
|
)
|
||||||
|
if existing:
|
||||||
|
raise freezer_api_exc.DocumentExists(
|
||||||
|
message='Client already registered with ID'
|
||||||
|
' {0}'.format(client_id))
|
||||||
|
self.client_manager.insert(client_doc)
|
||||||
|
logging.info('Client registered, client_id: {0}'.format(client_id))
|
||||||
|
return client_id
|
||||||
|
|
||||||
|
def delete_client(self, project_id, user_id, client_id):
|
||||||
|
return self.client_manager.delete(
|
||||||
|
project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
doc_id=client_id)
|
||||||
|
|
||||||
|
def get_job(self, project_id, user_id, job_id):
|
||||||
|
return self.job_manager.get(
|
||||||
|
project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
doc_id=job_id
|
||||||
|
)
|
||||||
|
|
||||||
|
def search_job(self, project_id, user_id, offset=0, limit=10, search=None):
|
||||||
|
search = search or {}
|
||||||
|
return self.job_manager.search(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
search=search,
|
||||||
|
offset=offset,
|
||||||
|
limit=limit)
|
||||||
|
|
||||||
|
def add_job(self, user_id, doc, project_id):
|
||||||
|
jobdoc = utils.JobDoc.create(doc, project_id, user_id)
|
||||||
|
job_id = jobdoc['job_id']
|
||||||
|
self.job_manager.insert(jobdoc, job_id)
|
||||||
|
logging.info('Job registered, job id: {0}'.format(job_id))
|
||||||
|
return job_id
|
||||||
|
|
||||||
|
def delete_job(self, user_id, job_id, project_id):
|
||||||
|
return self.job_manager.delete(user_id=user_id,
|
||||||
|
doc_id=job_id,
|
||||||
|
project_id=project_id)
|
||||||
|
|
||||||
|
def update_job(self, user_id, job_id, patch_doc, project_id):
|
||||||
|
valid_patch = utils.JobDoc.create_patch(patch_doc)
|
||||||
|
|
||||||
|
# check that document exists
|
||||||
|
assert (self.job_manager.get(user_id=user_id,
|
||||||
|
doc_id=job_id,
|
||||||
|
project_id=project_id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
version = self.job_manager.update(job_id, valid_patch)
|
||||||
|
logging.info('Job id {0} updated to version'
|
||||||
|
' {1}'.format(job_id, version))
|
||||||
|
return version
|
||||||
|
|
||||||
|
def replace_job(self, user_id, job_id, doc, project_id):
|
||||||
|
# check that no document exists with
|
||||||
|
# same job_id and different user_id
|
||||||
|
try:
|
||||||
|
self.job_manager.get(user_id=user_id,
|
||||||
|
doc_id=job_id,
|
||||||
|
project_id=project_id)
|
||||||
|
except freezer_api_exc.DocumentNotFound:
|
||||||
|
pass
|
||||||
|
|
||||||
|
valid_doc = utils.JobDoc.update(doc, project_id, user_id, job_id)
|
||||||
|
|
||||||
|
(created, version) = self.job_manager.insert(valid_doc, job_id)
|
||||||
|
if created:
|
||||||
|
logging.info('Job {0} created'.format(job_id))
|
||||||
|
else:
|
||||||
|
logging.info('Job {0} replaced with version'
|
||||||
|
' {1}'.format(job_id, version))
|
||||||
|
return version
|
||||||
|
|
||||||
|
def get_action(self, user_id, action_id, project_id):
|
||||||
|
return self.action_manager.get(user_id=user_id,
|
||||||
|
doc_id=action_id,
|
||||||
|
project_id=project_id
|
||||||
|
)
|
||||||
|
|
||||||
|
def search_action(self, user_id, offset=0, limit=10, search=None,
|
||||||
|
project_id=None):
|
||||||
|
search = search or {}
|
||||||
|
return self.action_manager.search(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
search=search,
|
||||||
|
offset=offset,
|
||||||
|
limit=limit)
|
||||||
|
|
||||||
|
def add_action(self, user_id, doc, project_id):
|
||||||
|
actiondoc = utils.ActionDoc.create(doc, user_id, project_id)
|
||||||
|
action_id = actiondoc['action_id']
|
||||||
|
self.action_manager.insert(actiondoc, action_id)
|
||||||
|
logging.info('Action registered, action id: {0}'.format(action_id))
|
||||||
|
return action_id
|
||||||
|
|
||||||
|
def delete_action(self, user_id, action_id, project_id):
|
||||||
|
return self.action_manager.delete(user_id=user_id,
|
||||||
|
doc_id=action_id,
|
||||||
|
project_id=project_id
|
||||||
|
)
|
||||||
|
|
||||||
|
def update_action(self, user_id, action_id, patch_doc, project_id):
|
||||||
|
valid_patch = utils.ActionDoc.create_patch(patch_doc)
|
||||||
|
|
||||||
|
# check that document exists
|
||||||
|
assert (self.action_manager.get(project_id=project_id,
|
||||||
|
user_id=user_id,
|
||||||
|
doc_id=action_id))
|
||||||
|
|
||||||
|
version = self.action_manager.update(action_id, valid_patch)
|
||||||
|
logging.info('Action {0} updated to version'
|
||||||
|
' {1}'.format(action_id, version))
|
||||||
|
return version
|
||||||
|
|
||||||
|
def replace_action(self, user_id, action_id, doc, project_id):
|
||||||
|
# check that no document exists with
|
||||||
|
# same action_id and different user_id
|
||||||
|
try:
|
||||||
|
self.action_manager.get(user_id=user_id,
|
||||||
|
doc_id=action_id,
|
||||||
|
project_id=project_id
|
||||||
|
)
|
||||||
|
except freezer_api_exc.DocumentNotFound:
|
||||||
|
pass
|
||||||
|
|
||||||
|
valid_doc = utils.ActionDoc.update(doc, user_id, action_id, project_id)
|
||||||
|
|
||||||
|
(created, version) = self.action_manager.insert(valid_doc, action_id)
|
||||||
|
if created:
|
||||||
|
logging.info('Action {0} created'.format(action_id))
|
||||||
|
else:
|
||||||
|
logging.info('Action {0} replaced with version'
|
||||||
|
' {1}'.format(action_id, version))
|
||||||
|
return version
|
||||||
|
|
||||||
|
def get_session(self, user_id, session_id, project_id):
|
||||||
|
return self.session_manager.get(user_id=user_id,
|
||||||
|
doc_id=session_id,
|
||||||
|
project_id=project_id)
|
||||||
|
|
||||||
|
def search_session(self, user_id, offset=0, limit=10, search=None,
|
||||||
|
project_id=None):
|
||||||
|
search = search or {}
|
||||||
|
return self.session_manager.search(user_id=user_id,
|
||||||
|
project_id=project_id,
|
||||||
|
search=search,
|
||||||
|
offset=offset,
|
||||||
|
limit=limit)
|
||||||
|
|
||||||
|
def add_session(self, user_id, doc, project_id):
|
||||||
|
session_doc = utils.SessionDoc.create(doc, user_id, project_id)
|
||||||
|
session_id = session_doc['session_id']
|
||||||
|
self.session_manager.insert(session_doc, session_id)
|
||||||
|
logging.info('Session registered, session id: {0}'.format(session_id))
|
||||||
|
return session_id
|
||||||
|
|
||||||
|
def delete_session(self, user_id, session_id, project_id):
|
||||||
|
return self.session_manager.delete(user_id=user_id,
|
||||||
|
doc_id=session_id,
|
||||||
|
project_id=project_id)
|
||||||
|
|
||||||
|
def update_session(self, user_id, session_id, patch_doc, project_id):
|
||||||
|
valid_patch = utils.SessionDoc.create_patch(patch_doc)
|
||||||
|
|
||||||
|
# check that document exists
|
||||||
|
assert (self.session_manager.get(user_id=user_id, doc_id=session_id,
|
||||||
|
project_id=project_id))
|
||||||
|
|
||||||
|
version = self.session_manager.update(session_id, valid_patch)
|
||||||
|
logging.info('Session {0} updated to version'
|
||||||
|
' {1}'.format(session_id, version))
|
||||||
|
return version
|
||||||
|
|
||||||
|
def replace_session(self, user_id, session_id, doc, project_id):
|
||||||
|
# check that no document exists with
|
||||||
|
# same session_id and different user_id
|
||||||
|
try:
|
||||||
|
self.session_manager.get(user_id=user_id, doc_id=session_id,
|
||||||
|
project_id=project_id)
|
||||||
|
except freezer_api_exc.DocumentNotFound:
|
||||||
|
pass
|
||||||
|
|
||||||
|
valid_doc = utils.SessionDoc.update(doc, user_id, session_id,
|
||||||
|
project_id)
|
||||||
|
|
||||||
|
(created, version) = self.session_manager.insert(valid_doc, session_id)
|
||||||
|
if created:
|
||||||
|
logging.info('Session {0} created'.format(session_id))
|
||||||
|
else:
|
||||||
|
logging.info('Session {0} replaced with version'
|
||||||
|
' {1}'.format(session_id, version))
|
||||||
|
return version
|
|
@ -40,6 +40,11 @@ class FreezerApiClient(rest_client.RestClient):
|
||||||
resp, response_body = self.get('/v1')
|
resp, response_body = self.get('/v1')
|
||||||
return resp, response_body
|
return resp, response_body
|
||||||
|
|
||||||
|
def get_version_v2(self):
|
||||||
|
|
||||||
|
resp, response_body = self.get('/v2')
|
||||||
|
return resp, response_body
|
||||||
|
|
||||||
def get_backups(self, backup_id=None, **params):
|
def get_backups(self, backup_id=None, **params):
|
||||||
|
|
||||||
if backup_id is None:
|
if backup_id is None:
|
||||||
|
|
|
@ -38,12 +38,12 @@ class TestFreezerApiVersion(base.BaseFreezerApiTest):
|
||||||
current_version = resp_body_json['versions'][0]
|
current_version = resp_body_json['versions'][0]
|
||||||
self.assertEqual(len(current_version), 4)
|
self.assertEqual(len(current_version), 4)
|
||||||
self.assertIn('id', current_version)
|
self.assertIn('id', current_version)
|
||||||
self.assertEqual(current_version['id'], '1')
|
self.assertEqual(current_version['id'], 'v1')
|
||||||
self.assertIn('links', current_version)
|
self.assertIn('links', current_version)
|
||||||
links = current_version['links'][0]
|
links = current_version['links'][0]
|
||||||
self.assertIn('href', links)
|
self.assertIn('href', links)
|
||||||
href = links['href']
|
href = links['href']
|
||||||
self.assertEqual('/v1/', href)
|
self.assertIn('/v1/', href)
|
||||||
self.assertIn('rel', links)
|
self.assertIn('rel', links)
|
||||||
rel = links['rel']
|
rel = links['rel']
|
||||||
self.assertEqual('self', rel)
|
self.assertEqual('self', rel)
|
||||||
|
@ -78,3 +78,33 @@ class TestFreezerApiVersion(base.BaseFreezerApiTest):
|
||||||
self.assertIn('formats', hints)
|
self.assertIn('formats', hints)
|
||||||
formats = hints['formats']
|
formats = hints['formats']
|
||||||
self.assertIn('application/json', formats)
|
self.assertIn('application/json', formats)
|
||||||
|
|
||||||
|
@test.attr(type="gate")
|
||||||
|
def test_api_version_v2(self):
|
||||||
|
resp, response_body = self.freezer_api_client.get_version_v2()
|
||||||
|
self.assertEqual(200, resp.status)
|
||||||
|
|
||||||
|
response_body_jason = json.loads(response_body)
|
||||||
|
self.assertIn('resources', response_body_jason)
|
||||||
|
resource = response_body_jason['resources']
|
||||||
|
self.assertIn('rel/backups', resource)
|
||||||
|
rel_backups = resource['rel/backups']
|
||||||
|
self.assertIn('href-template', rel_backups)
|
||||||
|
href_template = rel_backups['href-template']
|
||||||
|
self.assertEqual('/v2/{project_id}/backups/{backup_id}', href_template)
|
||||||
|
self.assertIn('href-vars', rel_backups)
|
||||||
|
href_vars = rel_backups['href-vars']
|
||||||
|
self.assertIn('backup_id', href_vars)
|
||||||
|
self.assertIn('project_id', href_vars)
|
||||||
|
backup_id = href_vars['backup_id']
|
||||||
|
self.assertEqual('param/backup_id', backup_id)
|
||||||
|
project_id = href_vars['project_id']
|
||||||
|
self.assertEqual('param/project_id', project_id)
|
||||||
|
self.assertIn('hints', rel_backups)
|
||||||
|
hints = rel_backups['hints']
|
||||||
|
self.assertIn('allow', hints)
|
||||||
|
allow = hints['allow']
|
||||||
|
self.assertEqual('GET', allow[0])
|
||||||
|
self.assertIn('formats', hints)
|
||||||
|
formats = hints['formats']
|
||||||
|
self.assertIn('application/json', formats)
|
||||||
|
|
|
@ -52,7 +52,7 @@ class TestService(unittest.TestCase):
|
||||||
with patch('falcon.' + version_attribute, version_string):
|
with patch('falcon.' + version_attribute, version_string):
|
||||||
# Attempt to invoke a mocked version of falcon to see what args
|
# Attempt to invoke a mocked version of falcon to see what args
|
||||||
# it was called with
|
# it was called with
|
||||||
service.freezer_app_factory(None)
|
service.freezer_appv1_factory(None)
|
||||||
|
|
||||||
# Check kwargs to see if the correct arguments are being passed
|
# Check kwargs to see if the correct arguments are being passed
|
||||||
_, named_args = mock_falcon.API.call_args
|
_, named_args = mock_falcon.API.call_args
|
||||||
|
@ -84,7 +84,7 @@ class TestService(unittest.TestCase):
|
||||||
with patch('falcon.' + version_attribute, version_string):
|
with patch('falcon.' + version_attribute, version_string):
|
||||||
# Attempt to invoke a mocked version of falcon to see what args
|
# Attempt to invoke a mocked version of falcon to see what args
|
||||||
# it was called with
|
# it was called with
|
||||||
service.freezer_app_factory(None)
|
service.freezer_appv1_factory(None)
|
||||||
|
|
||||||
# Check kwargs to see if the correct arguments are being passed
|
# Check kwargs to see if the correct arguments are being passed
|
||||||
_, kwargs = mock_falcon.API.call_args
|
_, kwargs = mock_falcon.API.call_args
|
||||||
|
@ -94,3 +94,35 @@ class TestService(unittest.TestCase):
|
||||||
self.assertNotIn('before', named_args)
|
self.assertNotIn('before', named_args)
|
||||||
self.assertNotIn('after', named_args)
|
self.assertNotIn('after', named_args)
|
||||||
self.assertIn('middleware', named_args)
|
self.assertIn('middleware', named_args)
|
||||||
|
|
||||||
|
@patch('freezer_api.cmd.api.v2')
|
||||||
|
@patch('freezer_api.cmd.api.driver')
|
||||||
|
@patch('freezer_api.cmd.api.falcon')
|
||||||
|
def test_on_old_falcon_builds_v2(self, mock_falcon, mock_driver, mock_v2):
|
||||||
|
"""Test that falcon versions that should use old middleware syntax do so
|
||||||
|
|
||||||
|
:param mock_falcon: The falcon import freezer-api will try to
|
||||||
|
start up
|
||||||
|
:param mock_driver: Database driver
|
||||||
|
:param mock_v1: List of endpoints for v1 for the freezer API.
|
||||||
|
"""
|
||||||
|
mock_driver.get_db.return_value = None
|
||||||
|
mock_v2.endpoints = []
|
||||||
|
|
||||||
|
# Iterate through all of the versions of falcon that should be using
|
||||||
|
# the old before=,after= invocation and ensure that freezer-api isn't
|
||||||
|
# trying to invoke it in the old style.
|
||||||
|
for version_string in self.falcon_versions_hooks:
|
||||||
|
version_attribute = '__version__' if hasattr(
|
||||||
|
falcon, '__version__') else 'version'
|
||||||
|
with patch('falcon.' + version_attribute, version_string):
|
||||||
|
# Attempt to invoke a mocked version of falcon to see what args
|
||||||
|
# it was called with
|
||||||
|
service.freezer_appv2_factory(None)
|
||||||
|
|
||||||
|
# Check kwargs to see if the correct arguments are being passed
|
||||||
|
_, named_args = mock_falcon.API.call_args
|
||||||
|
|
||||||
|
self.assertNotIn('before', named_args)
|
||||||
|
self.assertNotIn('after', named_args)
|
||||||
|
self.assertIn('middleware', named_args)
|
||||||
|
|
|
@ -23,6 +23,7 @@ import falcon
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from freezer_api.api import v1
|
from freezer_api.api import v1
|
||||||
|
from freezer_api.api import v2
|
||||||
from freezer_api.api import versions
|
from freezer_api.api import versions
|
||||||
|
|
||||||
|
|
||||||
|
@ -35,5 +36,5 @@ class TestVersionResource(unittest.TestCase):
|
||||||
def test_on_get_return_versions(self):
|
def test_on_get_return_versions(self):
|
||||||
self.resource.on_get(self.req, self.req)
|
self.resource.on_get(self.req, self.req)
|
||||||
self.assertEqual(self.req.status, falcon.HTTP_300)
|
self.assertEqual(self.req.status, falcon.HTTP_300)
|
||||||
expected_result = json.dumps({'versions': [v1.VERSION]})
|
expected_result = json.dumps({'versions': [v1.VERSION, v2.VERSION]})
|
||||||
self.assertEqual(self.req.data, expected_result)
|
self.assertEqual(self.req.data, expected_result)
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
---
|
||||||
|
prelude: >
|
||||||
|
Currently freezer-api v1 doesn't fully support multi-tenancy. In previous
|
||||||
|
releases, we implemented oslo.policy to support multi-tenancy, but we need
|
||||||
|
to add a new parameter to freezer-api which is the project_id. We need to
|
||||||
|
modify the structure of freezer-api url so it accepts project_id parameter.
|
||||||
|
Example of the old structure::
|
||||||
|
http://host_ip_address:9090/v1/jobs
|
||||||
|
|
||||||
|
Example of the new structure::
|
||||||
|
http://host_ip_address:9090/v2/project_id/jobs
|
||||||
|
|
||||||
|
features:
|
||||||
|
- |
|
||||||
|
Added Freezer-api v2 which changes the structure of freezer-api urls to
|
||||||
|
accept the newly added parameter, project_id.
|
||||||
|
|
||||||
|
Fully support role based multi-tenancy with api v2.
|
||||||
|
|
||||||
|
|
||||||
|
issues:
|
||||||
|
- |
|
||||||
|
It might break the backward compatibility.
|
||||||
|
|
||||||
|
upgrade:
|
||||||
|
- |
|
||||||
|
List upgrade notes here, or remove this section. All of the list items in
|
||||||
|
this section are combined when the release notes are rendered, so the text
|
||||||
|
needs to be worded so that it does not depend on any information only
|
||||||
|
available in another section, such as the prelude. This may mean repeating
|
||||||
|
some details.
|
||||||
|
deprecations:
|
||||||
|
- |
|
||||||
|
Freezer api v1 will be deprecated with Pike release. It will be supported
|
||||||
|
for another release, then will be removed completely.
|
Loading…
Reference in New Issue