Migration from falcon to pecan
Persistent tasks Devstack installation scripts Link with sticks project (ticket creation) Change-Id: I91246b23c7c39ee81a366dc099d4de1329d3d689
This commit is contained in:
5
.gitignore
vendored
5
.gitignore
vendored
@@ -15,8 +15,6 @@ var
|
||||
sdist
|
||||
develop-eggs
|
||||
.installed.cfg
|
||||
lib
|
||||
lib64
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
@@ -49,4 +47,5 @@ ChangeLog
|
||||
# Editors
|
||||
*~
|
||||
.*.swp
|
||||
.*sw?
|
||||
.*sw?
|
||||
.idea
|
||||
|
||||
@@ -13,6 +13,9 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import eventlet
|
||||
|
||||
eventlet.monkey_patch()
|
||||
|
||||
import pbr.version
|
||||
|
||||
|
||||
@@ -16,8 +16,9 @@
|
||||
|
||||
from pecan import rest
|
||||
|
||||
from cerberus.api.v1.controllers import alerts as alerts_api
|
||||
from cerberus.api.v1.controllers import plugins as plugins_api
|
||||
from cerberus.api.v1.controllers import security_alarms as \
|
||||
security_alarms_api
|
||||
from cerberus.api.v1.controllers import security_reports as \
|
||||
security_reports_api
|
||||
from cerberus.api.v1.controllers import tasks as tasks_api
|
||||
@@ -25,7 +26,7 @@ from cerberus.api.v1.controllers import tasks as tasks_api
|
||||
|
||||
class V1Controller(rest.RestController):
|
||||
"""API version 1 controller. """
|
||||
alerts = alerts_api.AlertsController()
|
||||
plugins = plugins_api.PluginsController()
|
||||
security_alarms = security_alarms_api.SecurityAlarmsController()
|
||||
security_reports = security_reports_api.SecurityReportsController()
|
||||
tasks = tasks_api.TasksController()
|
||||
|
||||
@@ -28,6 +28,7 @@ LOG = log.getLogger(__name__)
|
||||
class BaseController(rest.RestController):
|
||||
|
||||
def __init__(self):
|
||||
super(BaseController, self).__init__()
|
||||
transport = messaging.get_transport(cfg.CONF)
|
||||
target = messaging.Target(topic='test_rpc', server='server1')
|
||||
self.client = messaging.RPCClient(transport, target)
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
#
|
||||
# Copyright (c) 2014 EUROGICIEL
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import json
|
||||
|
||||
JSON_HOME = {
|
||||
'resources': {
|
||||
# -----------------------------------------------------------------
|
||||
# Plugins
|
||||
# -----------------------------------------------------------------
|
||||
'rel/plugins': {
|
||||
'href-template': '/v1/plugins{?id}',
|
||||
'href-vars': {
|
||||
'method_name': 'param/method_name',
|
||||
'task_name': 'param/task_name',
|
||||
'task_type': 'param/task_type',
|
||||
'task_period': 'param/task_period',
|
||||
},
|
||||
'hints': {
|
||||
'allow': ['GET', 'POST'],
|
||||
'formats': {
|
||||
'application/json': {},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class Resource(object):
|
||||
def __init__(self):
|
||||
document = json.dumps(JSON_HOME, ensure_ascii=False, indent=4)
|
||||
self.document_utf8 = document.encode('utf-8')
|
||||
|
||||
def on_get(self, req, resp):
|
||||
resp.data = self.document_utf8
|
||||
resp.content_type = 'application/json-home'
|
||||
resp.cache_control = ['max-age=86400']
|
||||
@@ -13,14 +13,19 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
#
|
||||
|
||||
|
||||
import json
|
||||
import pecan
|
||||
from webob import exc
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from oslo import messaging
|
||||
|
||||
from cerberus.api.v1.controllers import base
|
||||
from cerberus.api.v1.datamodels import plugin as plugin_models
|
||||
from cerberus.common import errors
|
||||
from cerberus import db
|
||||
from cerberus.db.sqlalchemy import models
|
||||
@@ -49,36 +54,44 @@ class PluginsController(base.BaseController):
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
plugins_info = []
|
||||
plugins_info = {}
|
||||
for plugin_info in db_plugins_info:
|
||||
plugins_info.append(models.PluginInfoJsonSerializer().
|
||||
serialize(plugin_info))
|
||||
plugins_full_info = []
|
||||
for plugin in plugins:
|
||||
for plugin_info in plugins_info:
|
||||
if (plugin.get('name') == plugin_info.get('name')):
|
||||
plugins_full_info.append(dict(plugin.items()
|
||||
+ plugin_info.items()))
|
||||
return plugins_full_info
|
||||
plugins_info[plugin_info.name] = models.\
|
||||
PluginInfoJsonSerializer().serialize(plugin_info)
|
||||
|
||||
for key in plugins:
|
||||
if key in plugins_info:
|
||||
if isinstance(plugins_info[key], dict) and isinstance(
|
||||
plugins[key], dict):
|
||||
plugins_info[key].update(plugins[key])
|
||||
|
||||
pluginResources = []
|
||||
|
||||
for k, v in plugins_info.items():
|
||||
pluginResources.append(
|
||||
plugin_models.PluginResource(v))
|
||||
|
||||
return plugin_models.PluginResourceCollection(plugins=pluginResources)
|
||||
|
||||
def _plugins(self):
|
||||
""" Get a list of plugins loaded by Cerberus Manager """
|
||||
""" Get a dict of plugins loaded by Cerberus Manager """
|
||||
ctx = pecan.request.context.to_dict()
|
||||
try:
|
||||
plugins = self.client.call(ctx, 'get_plugins')
|
||||
except messaging.RemoteError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
plugins_ = []
|
||||
plugins_ = {}
|
||||
for plugin in plugins:
|
||||
plugin_ = json.loads(plugin)
|
||||
plugins_.append(plugin_)
|
||||
plugins_[plugin_['name']] = plugin_
|
||||
return plugins_
|
||||
|
||||
@pecan.expose("json")
|
||||
@wsme_pecan.wsexpose(plugin_models.PluginResourceCollection)
|
||||
def get_all(self):
|
||||
""" Get a list of plugins loaded by Cerberus manager
|
||||
:return: a list of plugins loaded by Cerberus manager
|
||||
:return: PluginResourceCollection : a list of plugins loaded by
|
||||
Cerberus manager
|
||||
:raises:
|
||||
HTTPServiceUnavailable: an error occurred in Cerberus Manager or
|
||||
the service is unavailable
|
||||
@@ -93,7 +106,7 @@ class PluginsController(base.BaseController):
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise exc.HTTPNotFound()
|
||||
return {'plugins': plugins}
|
||||
return plugins
|
||||
|
||||
def get_plugin(self, uuid):
|
||||
""" Get information about plugin loaded by Cerberus"""
|
||||
@@ -108,10 +121,12 @@ class PluginsController(base.BaseController):
|
||||
db_plugin_info = db.plugin_info_get_from_uuid(uuid)
|
||||
plugin_info = models.PluginInfoJsonSerializer().\
|
||||
serialize(db_plugin_info)
|
||||
|
||||
plugin_info.update(plugin)
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
return dict(plugin_info.items() + plugin.items())
|
||||
return plugin_models.PluginResource(plugin_info)
|
||||
|
||||
def _plugin(self, uuid):
|
||||
""" Get a specific plugin thanks to its identifier """
|
||||
@@ -127,7 +142,8 @@ class PluginsController(base.BaseController):
|
||||
raise errors.PluginNotFound(uuid)
|
||||
return json.loads(plugin)
|
||||
|
||||
@pecan.expose("json")
|
||||
@wsme_pecan.wsexpose(plugin_models.PluginResource,
|
||||
wtypes.text)
|
||||
def get_one(self, uuid):
|
||||
""" Get details of a specific plugin whose identifier is uuid
|
||||
:param uuid: the identifier of the plugin
|
||||
@@ -146,4 +162,4 @@ class PluginsController(base.BaseController):
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise exc.HTTPNotFound()
|
||||
return {'plugin': plugin}
|
||||
return plugin
|
||||
|
||||
116
cerberus/api/v1/controllers/security_alarms.py
Normal file
116
cerberus/api/v1/controllers/security_alarms.py
Normal file
@@ -0,0 +1,116 @@
|
||||
#
|
||||
# Copyright (c) 2015 EUROGICIEL
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import pecan
|
||||
from webob import exc
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from cerberus.api.v1.controllers import base
|
||||
from cerberus.api.v1.datamodels import security_alarm as alarm_models
|
||||
from cerberus.common import errors
|
||||
from cerberus import db
|
||||
from cerberus.db.sqlalchemy import models
|
||||
from cerberus.openstack.common import log
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class SecurityAlarmsController(base.BaseController):
|
||||
|
||||
@pecan.expose()
|
||||
def _lookup(self, alarm_id, *remainder):
|
||||
return SecurityAlarmController(alarm_id), remainder
|
||||
|
||||
def list_security_alarms(self):
|
||||
""" List all the security alarms of all projects or just one. """
|
||||
try:
|
||||
security_alarms = db.security_alarm_get_all()
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise errors.DbError(
|
||||
"Security alarms could not be retrieved"
|
||||
)
|
||||
return security_alarms
|
||||
|
||||
@wsme_pecan.wsexpose(alarm_models.SecurityAlarmResourceCollection)
|
||||
def get_all(self):
|
||||
""" Get stored security alarms.
|
||||
:return: list of security alarms for one or all projects depending on
|
||||
context of the token.
|
||||
"""
|
||||
try:
|
||||
security_alarms = self.list_security_alarms()
|
||||
except errors.DbError:
|
||||
raise exc.HTTPNotFound()
|
||||
|
||||
alarms_resource = []
|
||||
# todo(eglamn3) : no need to serialize here
|
||||
for security_alarm in security_alarms:
|
||||
alarms_resource.append(
|
||||
alarm_models.SecurityAlarmResource(
|
||||
models.SecurityAlarmJsonSerializer().
|
||||
serialize(security_alarm)))
|
||||
|
||||
return alarm_models.SecurityAlarmResourceCollection(
|
||||
security_alarms=alarms_resource)
|
||||
|
||||
|
||||
class SecurityAlarmController(base.BaseController):
|
||||
|
||||
_custom_actions = {
|
||||
'tickets': ['PUT']
|
||||
}
|
||||
|
||||
def __init__(self, alarm_id):
|
||||
super(SecurityAlarmController, self).__init__()
|
||||
pecan.request.context['alarm_id'] = alarm_id
|
||||
self._uuid = alarm_id
|
||||
|
||||
def get_security_alarm(self, alarm_id):
|
||||
try:
|
||||
security_alarm = db.security_alarm_get(alarm_id)
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise errors.DbError(
|
||||
"Security alarm %s could not be retrieved" % alarm_id
|
||||
)
|
||||
return security_alarm
|
||||
|
||||
@wsme_pecan.wsexpose(alarm_models.SecurityAlarmResource,
|
||||
wtypes.text)
|
||||
def get(self):
|
||||
"""Get security alarm in db"""
|
||||
try:
|
||||
security_alarm = self.get_security_alarm(self._uuid)
|
||||
except errors.DbError:
|
||||
raise exc.HTTPNotFound()
|
||||
s_alarm = models.SecurityAlarmJsonSerializer().\
|
||||
serialize(security_alarm)
|
||||
|
||||
return alarm_models.SecurityAlarmResource(initial_data=s_alarm)
|
||||
|
||||
@pecan.expose("json")
|
||||
def tickets(self, ticket_id):
|
||||
"""Modify the ticket id associated to a security alarm in db.
|
||||
|
||||
:param ticket_id: the ticket_id to store in db.
|
||||
|
||||
"""
|
||||
try:
|
||||
db.security_alarm_update_ticket_id(self._uuid, ticket_id)
|
||||
except Exception:
|
||||
raise exc.HTTPNotFound()
|
||||
@@ -16,8 +16,11 @@
|
||||
|
||||
import pecan
|
||||
from webob import exc
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from cerberus.api.v1.controllers import base
|
||||
from cerberus.api.v1.datamodels import security_report as report_models
|
||||
from cerberus.common import errors
|
||||
from cerberus import db
|
||||
from cerberus.db.sqlalchemy import models
|
||||
@@ -28,6 +31,10 @@ LOG = log.getLogger(__name__)
|
||||
|
||||
class SecurityReportsController(base.BaseController):
|
||||
|
||||
@pecan.expose()
|
||||
def _lookup(self, report_id, *remainder):
|
||||
return SecurityReportController(report_id), remainder
|
||||
|
||||
def list_security_reports(self, project_id=None):
|
||||
""" List all the security reports of all projects or just one. """
|
||||
try:
|
||||
@@ -40,7 +47,7 @@ class SecurityReportsController(base.BaseController):
|
||||
)
|
||||
return security_reports
|
||||
|
||||
@pecan.expose("json")
|
||||
@wsme_pecan.wsexpose(report_models.SecurityAlarmResourceCollection)
|
||||
def get_all(self):
|
||||
""" Get stored security reports.
|
||||
:return: list of security reports for one or all projects depending on
|
||||
@@ -54,11 +61,33 @@ class SecurityReportsController(base.BaseController):
|
||||
security_reports = self.list_security_reports(ctx.tenant_id)
|
||||
except errors.DbError:
|
||||
raise exc.HTTPNotFound()
|
||||
json_security_reports = []
|
||||
|
||||
reports_resource = []
|
||||
# todo(eglamn3) : no need to serialize here
|
||||
for security_report in security_reports:
|
||||
json_security_reports.append(models.SecurityReportJsonSerializer().
|
||||
serialize(security_report))
|
||||
return {'security_reports': json_security_reports}
|
||||
reports_resource.append(
|
||||
report_models.SecurityAlarmResource(
|
||||
models.SecurityReportJsonSerializer().
|
||||
serialize(security_report)))
|
||||
|
||||
return report_models.SecurityAlarmResourceCollection(
|
||||
security_reports=reports_resource)
|
||||
|
||||
|
||||
class SecurityReportController(base.BaseController):
|
||||
|
||||
_custom_actions = {
|
||||
'tickets': ['PUT']
|
||||
}
|
||||
|
||||
def __init__(self, report_id):
|
||||
super(SecurityReportController, self).__init__()
|
||||
pecan.request.context['report_id'] = report_id
|
||||
try:
|
||||
self._id = int(report_id)
|
||||
except ValueError:
|
||||
raise exc.HTTPBadRequest(
|
||||
explanation='Security report id must be an integer')
|
||||
|
||||
def get_security_report(self, id):
|
||||
try:
|
||||
@@ -70,19 +99,27 @@ class SecurityReportsController(base.BaseController):
|
||||
)
|
||||
return security_report
|
||||
|
||||
@pecan.expose("json")
|
||||
def get_one(self, id):
|
||||
"""
|
||||
Get security reports in db
|
||||
:param req: the HTTP request
|
||||
:param resp: the HTTP response
|
||||
:return:
|
||||
"""
|
||||
@wsme_pecan.wsexpose(report_models.SecurityAlarmResource,
|
||||
wtypes.text)
|
||||
def get(self):
|
||||
"""Get security report in db. """
|
||||
try:
|
||||
security_report = self.get_security_report(id)
|
||||
security_report = self.get_security_report(self._id)
|
||||
except errors.DbError:
|
||||
raise exc.HTTPNotFound()
|
||||
s_report = models.SecurityReportJsonSerializer().\
|
||||
serialize(security_report)
|
||||
|
||||
return {'security_report': s_report}
|
||||
return report_models.SecurityAlarmResource(initial_data=s_report)
|
||||
|
||||
@pecan.expose("json")
|
||||
def tickets(self, ticket_id):
|
||||
"""Modify the ticket id associated to a security report in db.
|
||||
|
||||
:param ticket_id: the ticket_id to store in db.
|
||||
|
||||
"""
|
||||
try:
|
||||
db.security_report_update_ticket_id(self._id, ticket_id)
|
||||
except Exception:
|
||||
raise exc.HTTPNotFound()
|
||||
|
||||
@@ -17,12 +17,14 @@
|
||||
import json
|
||||
import pecan
|
||||
from webob import exc
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from oslo.messaging import rpc
|
||||
|
||||
from cerberus.api.v1.controllers import base
|
||||
from cerberus.common import errors
|
||||
from cerberus.api.v1.datamodels import task as task_models
|
||||
from cerberus.openstack.common import log
|
||||
|
||||
|
||||
@@ -33,21 +35,73 @@ action_kind = ["stop", "restart", "force_delete"]
|
||||
action_kind_enum = wtypes.Enum(str, *action_kind)
|
||||
|
||||
|
||||
class Task(wtypes.Base):
|
||||
""" Representation of a task.
|
||||
"""
|
||||
name = wtypes.text
|
||||
period = wtypes.IntegerType()
|
||||
method = wtypes.text
|
||||
plugin_id = wtypes.text
|
||||
type = wtypes.text
|
||||
class ActionController(base.BaseController):
|
||||
_custom_actions = {
|
||||
'stop': ['POST'],
|
||||
'force_delete': ['POST'],
|
||||
'restart': ['POST'],
|
||||
}
|
||||
|
||||
@wsme_pecan.wsexpose(None, wtypes.text)
|
||||
def stop(self, task_id):
|
||||
"""Stop task"""
|
||||
try:
|
||||
self.stop_task(task_id)
|
||||
except rpc.RemoteError:
|
||||
raise exc.HTTPBadRequest(
|
||||
explanation="Task can not be stopped")
|
||||
|
||||
@wsme_pecan.wsexpose(None, wtypes.text)
|
||||
def force_delete(self, task_id):
|
||||
"""Force delete task"""
|
||||
try:
|
||||
self.force_delete_task(task_id)
|
||||
except rpc.RemoteError as e:
|
||||
raise exc.HTTPBadRequest(explanation=e.value)
|
||||
|
||||
@wsme_pecan.wsexpose(None, wtypes.text)
|
||||
def restart(self, task_id):
|
||||
"""Restart delete task"""
|
||||
try:
|
||||
self.restart_task(task_id)
|
||||
except rpc.RemoteError as e:
|
||||
raise exc.HTTPBadRequest(explanation=e.value)
|
||||
|
||||
def stop_task(self, task_id):
|
||||
ctx = pecan.request.context.to_dict()
|
||||
try:
|
||||
self.client.call(ctx, 'stop_task', task_id=task_id)
|
||||
except rpc.RemoteError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
|
||||
def force_delete_task(self, task_id):
|
||||
ctx = pecan.request.context.to_dict()
|
||||
try:
|
||||
self.client.call(ctx,
|
||||
'force_delete_recurrent_task',
|
||||
task_id=task_id)
|
||||
except rpc.RemoteError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
|
||||
def restart_task(self, task_id):
|
||||
ctx = pecan.request.context.to_dict()
|
||||
try:
|
||||
self.client.call(ctx,
|
||||
'restart_recurrent_task',
|
||||
task_id=task_id)
|
||||
except rpc.RemoteError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
|
||||
|
||||
class TasksController(base.BaseController):
|
||||
|
||||
@pecan.expose()
|
||||
def _lookup(self, task_id, *remainder):
|
||||
return TaskController(task_id), remainder
|
||||
action = ActionController()
|
||||
|
||||
def __init__(self):
|
||||
super(TasksController, self).__init__()
|
||||
|
||||
def list_tasks(self):
|
||||
ctx = pecan.request.context.to_dict()
|
||||
@@ -56,14 +110,15 @@ class TasksController(base.BaseController):
|
||||
except rpc.RemoteError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
tasks_ = []
|
||||
tasks_resource = []
|
||||
for task in tasks:
|
||||
task_ = json.loads(task)
|
||||
tasks_.append(task_)
|
||||
return tasks_
|
||||
tasks_resource.append(
|
||||
task_models.TaskResource(json.loads(task)))
|
||||
|
||||
@pecan.expose("json")
|
||||
def get(self):
|
||||
return task_models.TaskResourceCollection(tasks=tasks_resource)
|
||||
|
||||
@wsme_pecan.wsexpose(task_models.TaskResourceCollection)
|
||||
def get_all(self):
|
||||
""" List tasks
|
||||
:return: list of tasks
|
||||
:raises:
|
||||
@@ -73,36 +128,44 @@ class TasksController(base.BaseController):
|
||||
tasks = self.list_tasks()
|
||||
except rpc.RemoteError:
|
||||
raise exc.HTTPServiceUnavailable()
|
||||
return {'tasks': tasks}
|
||||
|
||||
def create_task(self, body):
|
||||
return tasks
|
||||
|
||||
def get_task(self, task_id):
|
||||
ctx = pecan.request.context.to_dict()
|
||||
|
||||
task = body.get('task', None)
|
||||
if task is None:
|
||||
LOG.exception("Task object not provided in request")
|
||||
raise errors.TaskObjectNotProvided()
|
||||
|
||||
plugin_id = task.get('plugin_id', None)
|
||||
if plugin_id is None:
|
||||
LOG.exception("Plugin id not provided in request")
|
||||
raise errors.PluginIdNotProvided()
|
||||
|
||||
method_ = task.get('method', None)
|
||||
if method_ is None:
|
||||
LOG.exception("Method not provided in request")
|
||||
raise errors.MethodNotProvided()
|
||||
|
||||
try:
|
||||
task['id'] = self.client.call(
|
||||
task = self.client.call(ctx, 'get_task', task_id=task_id)
|
||||
except rpc.RemoteError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
return json.loads(task)
|
||||
|
||||
@wsme_pecan.wsexpose(task_models.TaskResource,
|
||||
wtypes.text)
|
||||
def get(self, task_id):
|
||||
""" Get details of a task"""
|
||||
try:
|
||||
task = self.get_task(task_id)
|
||||
except rpc.RemoteError:
|
||||
raise exc.HTTPNotFound()
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
return task_models.TaskResource(initial_data=task)
|
||||
|
||||
def create_task(self, task):
|
||||
ctx = pecan.request.context.to_dict()
|
||||
try:
|
||||
if task.period is wsme.Unset:
|
||||
task.period = None
|
||||
task.id = self.client.call(
|
||||
ctx,
|
||||
'add_task',
|
||||
uuid=plugin_id,
|
||||
method_=method_,
|
||||
task_period=task.get('period', None),
|
||||
task_name=task.get('name', "unknown"),
|
||||
task_type=task.get('type', "unique")
|
||||
'create_task',
|
||||
plugin_id=task.plugin_id,
|
||||
method_=task.method,
|
||||
task_period=task.period,
|
||||
task_name=task.name,
|
||||
task_type=task.type,
|
||||
persistent=task.persistent
|
||||
)
|
||||
except rpc.RemoteError as e:
|
||||
LOG.exception(e)
|
||||
@@ -110,203 +173,36 @@ class TasksController(base.BaseController):
|
||||
|
||||
return task
|
||||
|
||||
@pecan.expose("json")
|
||||
def post(self):
|
||||
"""Ask Cerberus Manager to call a function of a plugin whose identifier
|
||||
is uuid, either once or periodically.
|
||||
:return:
|
||||
:raises:
|
||||
HTTPBadRequest: the request is not correct
|
||||
"""
|
||||
body_ = pecan.request.body
|
||||
@wsme_pecan.wsexpose(task_models.TaskResource,
|
||||
body=task_models.TaskResource)
|
||||
def post(self, task):
|
||||
"""Create a task"""
|
||||
|
||||
try:
|
||||
body = json.loads(body_.decode('utf-8'))
|
||||
except (ValueError, UnicodeDecodeError) as e:
|
||||
LOG.exception(e)
|
||||
raise exc.HTTPBadRequest()
|
||||
try:
|
||||
task = self.create_task(body)
|
||||
except errors.TaskObjectNotProvided:
|
||||
raise exc.HTTPBadRequest(
|
||||
explanation='The task object is required.')
|
||||
except errors.PluginIdNotProvided:
|
||||
raise exc.HTTPBadRequest(
|
||||
explanation='Plugin id must be provided as a string')
|
||||
except errors.MethodNotProvided:
|
||||
raise exc.HTTPBadRequest(
|
||||
explanation='Method must be provided as a string')
|
||||
task = self.create_task(task)
|
||||
except rpc.RemoteError as e:
|
||||
LOG.exception(e)
|
||||
raise exc.HTTPBadRequest(explanation=e.value)
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise exc.HTTPBadRequest()
|
||||
return {'task': task}
|
||||
return task
|
||||
|
||||
|
||||
class TaskController(base.BaseController):
|
||||
"""Manages operation on a single task."""
|
||||
|
||||
_custom_actions = {
|
||||
'action': ['POST']
|
||||
}
|
||||
|
||||
def __init__(self, task_id):
|
||||
super(TaskController, self).__init__()
|
||||
pecan.request.context['task_id'] = task_id
|
||||
@wsme_pecan.wsexpose(None, wtypes.text)
|
||||
def delete(self, task_id):
|
||||
"""Delete a task"""
|
||||
try:
|
||||
self._id = int(task_id)
|
||||
except ValueError:
|
||||
raise exc.HTTPBadRequest(
|
||||
explanation='Task id must be an integer')
|
||||
|
||||
def get_task(self, id):
|
||||
ctx = pecan.request.context.to_dict()
|
||||
try:
|
||||
task = self.client.call(ctx, 'get_task', id=int(id))
|
||||
except ValueError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
except rpc.RemoteError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
return json.loads(task)
|
||||
|
||||
@pecan.expose("json")
|
||||
def get(self):
|
||||
""" Get details of a task whose id is id
|
||||
:param id: the id of the task
|
||||
:return:
|
||||
:raises:
|
||||
HTTPBadRequest
|
||||
"""
|
||||
try:
|
||||
task = self.get_task(self._id)
|
||||
except ValueError:
|
||||
raise exc.HTTPBadRequest(
|
||||
explanation='Task id must be an integer')
|
||||
except rpc.RemoteError:
|
||||
raise exc.HTTPNotFound()
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
return {'task': task}
|
||||
|
||||
@pecan.expose("json")
|
||||
def post(self):
|
||||
"""
|
||||
Enable to perform certain actions on a specific task (e.g; stop it)
|
||||
:param req: the HTTP request, including the action to perform
|
||||
:param resp: the HTTP response, including a description and the task id
|
||||
:param id: the identifier of the task on which an action has to be
|
||||
performed
|
||||
:return:
|
||||
:raises:
|
||||
HTTPError: Incorrect JSON or not UTF-8 encoded
|
||||
HTTPBadRequest: id not integer or task does not exist
|
||||
"""
|
||||
body_ = pecan.request.body
|
||||
try:
|
||||
body = json.loads(body_.decode('utf-8'))
|
||||
except (ValueError, UnicodeDecodeError) as e:
|
||||
LOG.exception(e)
|
||||
raise exc.HTTPBadRequest()
|
||||
|
||||
if 'stop' in body:
|
||||
try:
|
||||
self.stop_task(self._id)
|
||||
except ValueError:
|
||||
raise exc.HTTPBadRequest(
|
||||
explanation="Task id must be an integer")
|
||||
except rpc.RemoteError:
|
||||
raise exc.HTTPBadRequest(
|
||||
explanation="Task can not be stopped")
|
||||
elif 'forceDelete' in body:
|
||||
try:
|
||||
self.force_delete(self._id)
|
||||
except ValueError:
|
||||
raise exc.HTTPBadRequest(
|
||||
explanation="Task id must be an integer")
|
||||
except rpc.RemoteError as e:
|
||||
raise exc.HTTPBadRequest(explanation=e.value)
|
||||
|
||||
elif 'restart' in body:
|
||||
try:
|
||||
self.restart(self._id)
|
||||
except ValueError:
|
||||
raise exc.HTTPBadRequest(
|
||||
explanation="Task id must be an integer")
|
||||
except rpc.RemoteError as e:
|
||||
raise exc.HTTPBadRequest(explanation=e.value)
|
||||
else:
|
||||
raise exc.HTTPBadRequest()
|
||||
|
||||
def stop_task(self, id):
|
||||
ctx = pecan.request.context.to_dict()
|
||||
try:
|
||||
self.client.call(ctx, 'stop_task', id=int(id))
|
||||
except ValueError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
except rpc.RemoteError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
|
||||
def force_delete(self, id):
|
||||
ctx = pecan.request.context.to_dict()
|
||||
try:
|
||||
self.client.call(ctx,
|
||||
'force_delete_recurrent_task',
|
||||
id=int(id))
|
||||
except ValueError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
except rpc.RemoteError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
|
||||
def restart(self, id):
|
||||
ctx = pecan.request.context.to_dict()
|
||||
try:
|
||||
self.client.call(ctx,
|
||||
'restart_recurrent_task',
|
||||
id=int(id))
|
||||
except ValueError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
except rpc.RemoteError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
|
||||
def delete_task(self, id):
|
||||
ctx = pecan.request.context.to_dict()
|
||||
try:
|
||||
self.client.call(ctx, 'delete_recurrent_task', id=int(id))
|
||||
except ValueError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
except rpc.RemoteError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
|
||||
@pecan.expose("json")
|
||||
def delete(self):
|
||||
"""
|
||||
Delete a task specified by its identifier. If the task is running, it
|
||||
has to be stopped.
|
||||
:param req: the HTTP request
|
||||
:param resp: the HTTP response, including a description and the task id
|
||||
:param id: the identifier of the task to be deleted
|
||||
:return:
|
||||
:raises:
|
||||
HTTPBadRequest: id not an integer or task can't be deleted
|
||||
"""
|
||||
try:
|
||||
self.delete_task(self._id)
|
||||
except ValueError:
|
||||
raise exc.HTTPBadRequest(explanation="Task id must be an integer")
|
||||
self.delete_task(task_id)
|
||||
except rpc.RemoteError as e:
|
||||
raise exc.HTTPBadRequest(explanation=e.value)
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
|
||||
def delete_task(self, task_id):
|
||||
ctx = pecan.request.context.to_dict()
|
||||
try:
|
||||
self.client.call(ctx, 'delete_recurrent_task', task_id=task_id)
|
||||
except rpc.RemoteError as e:
|
||||
LOG.exception(e)
|
||||
raise
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Copyright 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Copyright (c) 2015 EUROGICIEL
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
@@ -9,13 +9,18 @@
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
NOTIFICATIONS = []
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
|
||||
|
||||
def notify(_context, message):
|
||||
"""Test notifier, stores notifications in memory for unittests."""
|
||||
NOTIFICATIONS.append(message)
|
||||
class Base(wtypes.Base):
|
||||
|
||||
def as_dict_from_keys(self, keys):
|
||||
return dict((k, getattr(self, k))
|
||||
for k in keys
|
||||
if hasattr(self, k) and
|
||||
getattr(self, k) != wsme.Unset)
|
||||
72
cerberus/api/v1/datamodels/plugin.py
Normal file
72
cerberus/api/v1/datamodels/plugin.py
Normal file
@@ -0,0 +1,72 @@
|
||||
#
|
||||
# Copyright (c) 2015 EUROGICIEL
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
#
|
||||
|
||||
from cerberus.api.v1.datamodels import base
|
||||
from wsme import types as wtypes
|
||||
|
||||
|
||||
class PluginResource(base.Base):
|
||||
"""Type describing a plugin.
|
||||
|
||||
"""
|
||||
|
||||
name = wtypes.text
|
||||
"""Name of the plugin."""
|
||||
|
||||
id = wtypes.IntegerType()
|
||||
"""Id of the plugin."""
|
||||
|
||||
uuid = wtypes.text
|
||||
"""Uuid of the plugin."""
|
||||
|
||||
methods = [wtypes.text]
|
||||
"""Hook methods."""
|
||||
|
||||
version = wtypes.text
|
||||
"""Version of the plugin."""
|
||||
|
||||
provider = wtypes.text
|
||||
"""Provider of the plugin."""
|
||||
|
||||
subscribed_events = [wtypes.text]
|
||||
"""Subscribed events of the plugin."""
|
||||
|
||||
type = wtypes.text
|
||||
"""Type of the plugin."""
|
||||
|
||||
tool_name = wtypes.text
|
||||
"""Tool name of the plugin."""
|
||||
|
||||
description = wtypes.text
|
||||
"""Description of the plugin."""
|
||||
|
||||
def as_dict(self):
|
||||
return self.as_dict_from_keys(['name', 'id', 'uuid', 'methods',
|
||||
'version', 'provider',
|
||||
'subscribed_events', 'type',
|
||||
'tool_name', 'description'])
|
||||
|
||||
def __init__(self, initial_data):
|
||||
super(PluginResource, self).__init__()
|
||||
for key in initial_data:
|
||||
setattr(self, key, initial_data[key])
|
||||
|
||||
|
||||
class PluginResourceCollection(base.Base):
|
||||
"""A list of Plugins."""
|
||||
|
||||
plugins = [PluginResource]
|
||||
77
cerberus/api/v1/datamodels/security_alarm.py
Normal file
77
cerberus/api/v1/datamodels/security_alarm.py
Normal file
@@ -0,0 +1,77 @@
|
||||
#
|
||||
# Copyright (c) 2015 EUROGICIEL
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import datetime
|
||||
|
||||
from cerberus.api.v1.datamodels import base
|
||||
from wsme import types as wtypes
|
||||
|
||||
|
||||
class SecurityAlarmResource(base.Base):
|
||||
""" Representation of a security alarm.
|
||||
"""
|
||||
|
||||
id = wtypes.IntegerType()
|
||||
"""Security alarm id."""
|
||||
|
||||
plugin_id = wtypes.wsattr(wtypes.text)
|
||||
"""Associated plugin id."""
|
||||
|
||||
alarm_id = wtypes.wsattr(wtypes.text)
|
||||
"""Associated alarm id."""
|
||||
|
||||
timestamp = datetime.datetime
|
||||
"""creation date."""
|
||||
|
||||
status = wtypes.wsattr(wtypes.text)
|
||||
"""Status."""
|
||||
|
||||
severity = wtypes.wsattr(wtypes.text)
|
||||
"""Severity."""
|
||||
|
||||
project_id = wtypes.wsattr(wtypes.text)
|
||||
"""Associated project id."""
|
||||
|
||||
component_id = wtypes.wsattr(wtypes.text)
|
||||
"""Component id."""
|
||||
|
||||
summary = wtypes.wsattr(wtypes.text)
|
||||
"""Summary."""
|
||||
|
||||
description = wtypes.wsattr(wtypes.text)
|
||||
"""Description."""
|
||||
|
||||
ticket_id = wtypes.wsattr(wtypes.text)
|
||||
"""Associated ticket id."""
|
||||
|
||||
def as_dict(self):
|
||||
return self.as_dict_from_keys(
|
||||
['id', 'plugin_id', 'alarm_id', 'timestamp',
|
||||
'status', 'severity', 'component_id', 'project_id',
|
||||
'summary', 'description', 'ticket_id']
|
||||
)
|
||||
|
||||
def __init__(self, initial_data=None):
|
||||
super(SecurityAlarmResource, self).__init__()
|
||||
if initial_data is not None:
|
||||
for key in initial_data:
|
||||
setattr(self, key, initial_data[key])
|
||||
|
||||
|
||||
class SecurityAlarmResourceCollection(base.Base):
|
||||
"""A list of Security alarms."""
|
||||
|
||||
security_alarms = [SecurityAlarmResource]
|
||||
88
cerberus/api/v1/datamodels/security_report.py
Normal file
88
cerberus/api/v1/datamodels/security_report.py
Normal file
@@ -0,0 +1,88 @@
|
||||
#
|
||||
# Copyright (c) 2015 EUROGICIEL
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import datetime
|
||||
|
||||
from cerberus.api.v1.datamodels import base
|
||||
from wsme import types as wtypes
|
||||
|
||||
|
||||
class SecurityAlarmResource(base.Base):
|
||||
""" Representation of a security report.
|
||||
"""
|
||||
|
||||
id = wtypes.IntegerType()
|
||||
"""Security report id."""
|
||||
|
||||
plugin_id = wtypes.wsattr(wtypes.text)
|
||||
"""Associated plugin id."""
|
||||
|
||||
report_id = wtypes.wsattr(wtypes.text)
|
||||
"""Associated report id."""
|
||||
|
||||
component_id = wtypes.wsattr(wtypes.text)
|
||||
"""Associated component id."""
|
||||
|
||||
component_type = wtypes.wsattr(wtypes.text)
|
||||
"""Component type."""
|
||||
|
||||
component_name = wtypes.wsattr(wtypes.text)
|
||||
"""Component name."""
|
||||
|
||||
project_id = wtypes.wsattr(wtypes.text)
|
||||
"""Associated project id."""
|
||||
|
||||
title = wtypes.wsattr(wtypes.text)
|
||||
"""Title of report."""
|
||||
|
||||
description = wtypes.wsattr(wtypes.text)
|
||||
"""Description."""
|
||||
|
||||
security_rating = float
|
||||
"""Security rating."""
|
||||
|
||||
vulnerabilities = wtypes.wsattr(wtypes.text)
|
||||
"""Associated report id."""
|
||||
|
||||
vulnerabilities_number = wtypes.IntegerType()
|
||||
"""Total of Vulnerabilities."""
|
||||
|
||||
last_report_date = datetime.datetime
|
||||
"""Last report date."""
|
||||
|
||||
ticket_id = wtypes.wsattr(wtypes.text, mandatory=True)
|
||||
"""Associated ticket id."""
|
||||
|
||||
def as_dict(self):
|
||||
return self.as_dict_from_keys(
|
||||
['id', 'plugin_id', 'report_id', 'component_id',
|
||||
'component_type', 'component_name', 'project_id',
|
||||
'title', 'description', 'security_rating',
|
||||
'vulnerabilities', 'vulnerabilities_number',
|
||||
'last_report_date', 'ticket_id']
|
||||
)
|
||||
|
||||
def __init__(self, initial_data=None):
|
||||
super(SecurityAlarmResource, self).__init__()
|
||||
if initial_data is not None:
|
||||
for key in initial_data:
|
||||
setattr(self, key, initial_data[key])
|
||||
|
||||
|
||||
class SecurityAlarmResourceCollection(base.Base):
|
||||
"""A list of Security reports."""
|
||||
|
||||
security_reports = [SecurityAlarmResource]
|
||||
63
cerberus/api/v1/datamodels/task.py
Normal file
63
cerberus/api/v1/datamodels/task.py
Normal file
@@ -0,0 +1,63 @@
|
||||
#
|
||||
# Copyright (c) 2015 EUROGICIEL
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from cerberus.api.v1.datamodels import base
|
||||
from wsme import types as wtypes
|
||||
|
||||
|
||||
class TaskResource(base.Base):
|
||||
""" Representation of a task.
|
||||
"""
|
||||
name = wtypes.wsattr(wtypes.text, default="unknown")
|
||||
"""Name of the task."""
|
||||
|
||||
period = wtypes.IntegerType()
|
||||
"""Period if periodic."""
|
||||
|
||||
method = wtypes.wsattr(wtypes.text, mandatory=True)
|
||||
"""Hook methods."""
|
||||
|
||||
state = wtypes.wsattr(wtypes.text)
|
||||
"""Running or not."""
|
||||
|
||||
id = wtypes.IntegerType()
|
||||
"""Associated task id."""
|
||||
|
||||
plugin_id = wtypes.wsattr(wtypes.text, mandatory=True)
|
||||
"""Associated plugin id."""
|
||||
|
||||
type = wtypes.wsattr(wtypes.text, default="unique")
|
||||
"""Type of the task."""
|
||||
|
||||
persistent = wtypes.wsattr(wtypes.text, default="false")
|
||||
"""If task must persist."""
|
||||
|
||||
def as_dict(self):
|
||||
return self.as_dict_from_keys(['name', 'period', 'method', 'state',
|
||||
'id', 'plugin_id', 'type',
|
||||
'persistent'])
|
||||
|
||||
def __init__(self, initial_data=None):
|
||||
super(TaskResource, self).__init__()
|
||||
if initial_data is not None:
|
||||
for key in initial_data:
|
||||
setattr(self, key, initial_data[key])
|
||||
|
||||
|
||||
class TaskResourceCollection(base.Base):
|
||||
"""A list of Tasks."""
|
||||
|
||||
tasks = [TaskResource]
|
||||
@@ -14,7 +14,6 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import eventlet
|
||||
import sys
|
||||
|
||||
from oslo.config import cfg
|
||||
@@ -24,9 +23,6 @@ from cerberus import manager
|
||||
from cerberus.openstack.common import log
|
||||
from cerberus.openstack.common import service
|
||||
|
||||
|
||||
eventlet.monkey_patch()
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
147
cerberus/common/cerberus_impl_rabbit.py
Normal file
147
cerberus/common/cerberus_impl_rabbit.py
Normal file
@@ -0,0 +1,147 @@
|
||||
#
|
||||
# Copyright (c) 2015 EUROGICIEL
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import functools
|
||||
import json
|
||||
import kombu
|
||||
import logging
|
||||
|
||||
from oslo.messaging._drivers import amqp as rpc_amqp
|
||||
from oslo.messaging._drivers import amqpdriver
|
||||
from oslo.messaging._drivers import common as rpc_common
|
||||
from oslo.messaging._drivers import impl_rabbit
|
||||
from oslo.messaging.openstack.common.gettextutils import _ # noqa
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_queue_arguments(conf):
|
||||
"""Construct the arguments for declaring a queue.
|
||||
|
||||
If the rabbit_ha_queues option is set, we declare a mirrored queue
|
||||
as described here:
|
||||
|
||||
http://www.rabbitmq.com/ha.html
|
||||
|
||||
Setting x-ha-policy to all means that the queue will be mirrored
|
||||
to all nodes in the cluster.
|
||||
"""
|
||||
return {'x-ha-policy': 'all'} if conf.rabbit_ha_queues else {}
|
||||
|
||||
|
||||
class CerberusRabbitMessage(dict):
|
||||
|
||||
def __init__(self, raw_message):
|
||||
if isinstance(raw_message.payload, unicode):
|
||||
message = rpc_common.deserialize_msg(
|
||||
json.loads(raw_message.payload))
|
||||
else:
|
||||
message = rpc_common.deserialize_msg(raw_message.payload)
|
||||
super(CerberusRabbitMessage, self).__init__(message)
|
||||
self._raw_message = raw_message
|
||||
|
||||
def acknowledge(self):
|
||||
self._raw_message.ack()
|
||||
|
||||
def requeue(self):
|
||||
self._raw_message.requeue()
|
||||
|
||||
|
||||
class CerberusConsumerBase(impl_rabbit.ConsumerBase):
|
||||
|
||||
def _callback_handler(self, message, callback):
|
||||
"""Call callback with deserialized message.
|
||||
|
||||
Messages that are processed and ack'ed.
|
||||
"""
|
||||
|
||||
try:
|
||||
callback(CerberusRabbitMessage(message))
|
||||
except Exception:
|
||||
LOG.exception(_("Failed to process message"
|
||||
" ... skipping it."))
|
||||
message.ack()
|
||||
|
||||
|
||||
class CerberusTopicConsumer(CerberusConsumerBase):
|
||||
"""Consumer class for 'topic'."""
|
||||
|
||||
def __init__(self, conf, channel, topic, callback, tag, exchange_name,
|
||||
name=None, **kwargs):
|
||||
"""Init a 'topic' queue.
|
||||
|
||||
:param channel: the amqp channel to use
|
||||
:param topic: the topic to listen on
|
||||
:paramtype topic: str
|
||||
:param callback: the callback to call when messages are received
|
||||
:param tag: a unique ID for the consumer on the channel
|
||||
:param exchange_name: the exchange name to use
|
||||
:param name: optional queue name, defaults to topic
|
||||
:paramtype name: str
|
||||
|
||||
Other kombu options may be passed as keyword arguments
|
||||
"""
|
||||
# Default options
|
||||
options = {'durable': conf.amqp_durable_queues,
|
||||
'queue_arguments': _get_queue_arguments(conf),
|
||||
'auto_delete': conf.amqp_auto_delete,
|
||||
'exclusive': False}
|
||||
options.update(kwargs)
|
||||
exchange = kombu.entity.Exchange(name=exchange_name,
|
||||
type='topic',
|
||||
durable=options['durable'],
|
||||
auto_delete=options['auto_delete'])
|
||||
super(CerberusTopicConsumer, self).__init__(channel,
|
||||
callback,
|
||||
tag,
|
||||
name=name or topic,
|
||||
exchange=exchange,
|
||||
routing_key=topic,
|
||||
**options)
|
||||
|
||||
|
||||
class CerberusConnection(impl_rabbit.Connection):
|
||||
|
||||
def __init__(self, conf, url):
|
||||
super(CerberusConnection, self).__init__(conf, url)
|
||||
|
||||
def declare_topic_consumer(self, exchange_name, topic, callback=None,
|
||||
queue_name=None):
|
||||
"""Create a 'topic' consumer."""
|
||||
self.declare_consumer(functools.partial(CerberusTopicConsumer,
|
||||
name=queue_name,
|
||||
exchange_name=exchange_name,
|
||||
),
|
||||
topic, callback)
|
||||
|
||||
|
||||
class CerberusRabbitDriver(amqpdriver.AMQPDriverBase):
|
||||
|
||||
def __init__(self, conf, url,
|
||||
default_exchange=None,
|
||||
allowed_remote_exmods=None):
|
||||
conf.register_opts(impl_rabbit.rabbit_opts)
|
||||
conf.register_opts(rpc_amqp.amqp_opts)
|
||||
|
||||
connection_pool = rpc_amqp.get_connection_pool(conf,
|
||||
url,
|
||||
CerberusConnection)
|
||||
|
||||
super(CerberusRabbitDriver, self).__init__(conf, url,
|
||||
connection_pool,
|
||||
default_exchange,
|
||||
allowed_remote_exmods)
|
||||
@@ -142,13 +142,25 @@ class CerberusException(Exception):
|
||||
return self.args[0]
|
||||
|
||||
|
||||
class AlertExists(CerberusException):
|
||||
class DBException(CerberusException):
|
||||
msg_fmt = _("Database error.")
|
||||
|
||||
|
||||
class AlertExists(DBException):
|
||||
msg_fmt = _("Alert %(alert_id)s already exists.")
|
||||
|
||||
|
||||
class ReportExists(CerberusException):
|
||||
class ReportExists(DBException):
|
||||
msg_fmt = _("Report %(report_id)s already exists.")
|
||||
|
||||
|
||||
class PluginInfoExists(CerberusException):
|
||||
class PluginInfoExists(DBException):
|
||||
msg_fmt = _("Plugin info %(id)s already exists.")
|
||||
|
||||
|
||||
class AlarmExists(DBException):
|
||||
msg_fmt = _("Alarm %(alarm_id)s already exists.")
|
||||
|
||||
|
||||
class TaskExists(DBException):
|
||||
msg_fmt = _("Task %(task_id)s already exists.")
|
||||
|
||||
66
cerberus/common/loopingcall.py
Normal file
66
cerberus/common/loopingcall.py
Normal file
@@ -0,0 +1,66 @@
|
||||
#
|
||||
# Copyright (c) 2015 EUROGICIEL
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import sys
|
||||
|
||||
from eventlet import event
|
||||
from eventlet import greenthread
|
||||
|
||||
from cerberus.openstack.common.gettextutils import _LE, _LW # noqa
|
||||
from cerberus.openstack.common import log as logging
|
||||
from cerberus.openstack.common import loopingcall
|
||||
from cerberus.openstack.common import timeutils
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CerberusFixedIntervalLoopingCall(loopingcall.FixedIntervalLoopingCall):
|
||||
"""A fixed interval looping call."""
|
||||
|
||||
def start(self, interval, initial_delay=None):
|
||||
self._running = True
|
||||
done = event.Event()
|
||||
|
||||
def _inner():
|
||||
if initial_delay:
|
||||
greenthread.sleep(initial_delay)
|
||||
|
||||
try:
|
||||
while self._running:
|
||||
start = timeutils.utcnow()
|
||||
self.f(*self.args, **self.kw)
|
||||
end = timeutils.utcnow()
|
||||
if not self._running:
|
||||
break
|
||||
delay = interval - timeutils.delta_seconds(start, end)
|
||||
if delay <= 0:
|
||||
LOG.warn(_LW('task run outlasted interval by %s sec') %
|
||||
-delay)
|
||||
greenthread.sleep(delay if delay > 0 else 0)
|
||||
except loopingcall.LoopingCallDone as e:
|
||||
self.stop()
|
||||
done.send(e.retvalue)
|
||||
except Exception:
|
||||
LOG.exception(_LE('in fixed duration looping call'))
|
||||
done.send_exception(*sys.exc_info())
|
||||
return
|
||||
else:
|
||||
done.send(True)
|
||||
|
||||
self.done = done
|
||||
|
||||
self.gt = greenthread.spawn(_inner)
|
||||
return self.done
|
||||
32
cerberus/common/service.py
Normal file
32
cerberus/common/service.py
Normal file
@@ -0,0 +1,32 @@
|
||||
#
|
||||
# Copyright (c) 2015 EUROGICIEL
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from cerberus.common import threadgroup
|
||||
from cerberus.openstack.common import service
|
||||
|
||||
|
||||
class CerberusService(service.Service):
|
||||
|
||||
def __init__(self, threads=1000):
|
||||
super(CerberusService, self).__init__(threads)
|
||||
self.tg = threadgroup.CerberusThreadGroup(threads)
|
||||
|
||||
|
||||
class CerberusServices(service.Services):
|
||||
|
||||
def __init__(self):
|
||||
super(CerberusServices, self).__init__()
|
||||
self.tg = threadgroup.CerberusThreadGroup()
|
||||
60
cerberus/common/threadgroup.py
Normal file
60
cerberus/common/threadgroup.py
Normal file
@@ -0,0 +1,60 @@
|
||||
#
|
||||
# Copyright (c) 2015 EUROGICIEL
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from cerberus.common import loopingcall
|
||||
from cerberus.db.sqlalchemy import api as db_api
|
||||
from cerberus.openstack.common import threadgroup
|
||||
|
||||
|
||||
class CerberusThread(threadgroup.Thread):
|
||||
def __init__(self, f, thread, group, *args, **kwargs):
|
||||
super(CerberusThread, self).__init__(thread, group)
|
||||
self.f = f
|
||||
self.args = args
|
||||
self.kw = kwargs
|
||||
|
||||
|
||||
class CerberusThreadGroup(threadgroup.ThreadGroup):
|
||||
|
||||
def add_stopped_timer(self, callback, *args, **kwargs):
|
||||
pulse = loopingcall.CerberusFixedIntervalLoopingCall(callback,
|
||||
*args,
|
||||
**kwargs)
|
||||
self.timers.append(pulse)
|
||||
return pulse
|
||||
|
||||
def add_timer(self, interval, callback, initial_delay=None,
|
||||
*args, **kwargs):
|
||||
pulse = loopingcall.CerberusFixedIntervalLoopingCall(callback,
|
||||
*args,
|
||||
**kwargs)
|
||||
pulse.start(interval=interval,
|
||||
initial_delay=initial_delay)
|
||||
self.timers.append(pulse)
|
||||
return pulse
|
||||
|
||||
def add_thread(self, callback, *args, **kwargs):
|
||||
gt = self.pool.spawn(callback, *args, **kwargs)
|
||||
th = CerberusThread(callback, gt, self, *args, **kwargs)
|
||||
self.threads.append(th)
|
||||
return th
|
||||
|
||||
def thread_done(self, thread):
|
||||
self.threads.remove(thread)
|
||||
try:
|
||||
db_api.delete_task(thread.kw.get('task_id'))
|
||||
except Exception:
|
||||
raise
|
||||
@@ -51,16 +51,6 @@ def db_sync(engine, version=None):
|
||||
return IMPL.db_sync(engine, version=version)
|
||||
|
||||
|
||||
def alert_create(values):
|
||||
"""Create an instance from the values dictionary."""
|
||||
return IMPL.alert_create(values)
|
||||
|
||||
|
||||
def alert_get_all():
|
||||
"""Get all alerts"""
|
||||
return IMPL.alert_get_all()
|
||||
|
||||
|
||||
def security_report_create(values):
|
||||
"""Create an instance from the values dictionary."""
|
||||
return IMPL.security_report_create(values)
|
||||
@@ -71,6 +61,11 @@ def security_report_update_last_report_date(id, date):
|
||||
return IMPL.security_report_update_last_report_date(id, date)
|
||||
|
||||
|
||||
def security_report_update_ticket_id(id, ticket_id):
|
||||
"""Create an instance from the values dictionary."""
|
||||
return IMPL.security_report_update_ticket_id(id, ticket_id)
|
||||
|
||||
|
||||
def security_report_get_all(project_id=None):
|
||||
"""Get all alerts"""
|
||||
return IMPL.security_report_get_all(project_id=project_id)
|
||||
@@ -113,3 +108,24 @@ def security_alarm_get_all():
|
||||
|
||||
def security_alarm_get(id):
|
||||
return IMPL.security_alarm_get(id)
|
||||
|
||||
|
||||
def security_alarm_update_ticket_id(alarm_id, ticket_id):
|
||||
"""Create an instance from the values dictionary."""
|
||||
return IMPL.security_alarm_update_ticket_id(alarm_id, ticket_id)
|
||||
|
||||
|
||||
def create_task(values):
|
||||
return IMPL.create_task(values)
|
||||
|
||||
|
||||
def delete_task(id):
|
||||
IMPL.delete_task(id)
|
||||
|
||||
|
||||
def update_state_task(id, running):
|
||||
IMPL.update_state_task(id, running)
|
||||
|
||||
|
||||
def get_all_tasks():
|
||||
return IMPL.get_all_tasks()
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import sqlalchemy
|
||||
import sys
|
||||
import threading
|
||||
|
||||
@@ -108,7 +107,7 @@ def _security_report_get_all(project_id=None):
|
||||
filter(models.SecurityReport.project_id == project_id).all()
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise e
|
||||
raise exception.DBException()
|
||||
|
||||
|
||||
def _security_report_get(id):
|
||||
@@ -131,9 +130,12 @@ def security_report_create(values):
|
||||
security_report_ref.update(values)
|
||||
try:
|
||||
security_report_ref.save()
|
||||
except sqlalchemy.exc.OperationalError as e:
|
||||
except db_exc.DBDuplicateEntry as e:
|
||||
LOG.exception(e)
|
||||
raise db_exc.ColumnError
|
||||
raise exception.ReportExists(id=values['id'])
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise exception.DBException()
|
||||
return security_report_ref
|
||||
|
||||
|
||||
@@ -145,9 +147,22 @@ def security_report_update_last_report_date(id, date):
|
||||
report.last_report_date = date
|
||||
try:
|
||||
report.save(session)
|
||||
except sqlalchemy.exc.OperationalError as e:
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise db_exc.ColumnError
|
||||
raise exception.DBException()
|
||||
|
||||
|
||||
def security_report_update_ticket_id(id, ticket_id):
|
||||
session = get_session()
|
||||
report = model_query(models.SecurityReport, read_deleted="no",
|
||||
session=session).filter(models.SecurityReport.id
|
||||
== id).first()
|
||||
report.ticket_id = ticket_id
|
||||
try:
|
||||
report.save(session)
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise exception.DBException()
|
||||
|
||||
|
||||
def security_report_get_all(project_id=None):
|
||||
@@ -218,9 +233,9 @@ def plugin_version_update(id, version):
|
||||
plugin.version = version
|
||||
try:
|
||||
plugin.save(session)
|
||||
except sqlalchemy.exc.OperationalError as e:
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise db_exc.ColumnError
|
||||
raise exception.DBException()
|
||||
|
||||
|
||||
def db_sync(engine, version=None):
|
||||
@@ -236,20 +251,16 @@ def db_version(engine):
|
||||
def _security_alarm_get_all():
|
||||
|
||||
session = get_session()
|
||||
try:
|
||||
return model_query(models.SecurityAlarm, read_deleted="no",
|
||||
session=session).all()
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise e
|
||||
return model_query(models.SecurityAlarm, read_deleted="no",
|
||||
session=session).all()
|
||||
|
||||
|
||||
def _security_alarm_get(id):
|
||||
def _security_alarm_get(alarm_id):
|
||||
|
||||
session = get_session()
|
||||
return model_query(models.SecurityAlarm, read_deleted="no",
|
||||
session=session).filter(models.SecurityAlarm.
|
||||
id == id).first()
|
||||
alarm_id == alarm_id).first()
|
||||
|
||||
|
||||
def security_alarm_create(values):
|
||||
@@ -257,9 +268,8 @@ def security_alarm_create(values):
|
||||
security_alarm_ref.update(values)
|
||||
try:
|
||||
security_alarm_ref.save()
|
||||
except sqlalchemy.exc.OperationalError as e:
|
||||
LOG.exception(e)
|
||||
raise db_exc.ColumnError
|
||||
except db_exc.DBDuplicateEntry:
|
||||
raise exception.AlarmExists(id=values['id'])
|
||||
return security_alarm_ref
|
||||
|
||||
|
||||
@@ -269,3 +279,51 @@ def security_alarm_get_all():
|
||||
|
||||
def security_alarm_get(id):
|
||||
return _security_alarm_get(id)
|
||||
|
||||
|
||||
def security_alarm_update_ticket_id(alarm_id, ticket_id):
|
||||
session = get_session()
|
||||
alarm = model_query(models.SecurityAlarm, read_deleted="no",
|
||||
session=session).filter(models.SecurityAlarm.alarm_id
|
||||
== alarm_id).first()
|
||||
alarm.ticket_id = ticket_id
|
||||
try:
|
||||
alarm.save(session)
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise exception.DBException()
|
||||
|
||||
|
||||
def create_task(values):
|
||||
task_ref = models.Task()
|
||||
task_ref.update(values)
|
||||
try:
|
||||
task_ref.save()
|
||||
except db_exc.DBDuplicateEntry:
|
||||
raise exception.TaskExists(id=values['uuid'])
|
||||
return task_ref
|
||||
|
||||
|
||||
def delete_task(id):
|
||||
session = get_session()
|
||||
task = model_query(models.Task, read_deleted="no",
|
||||
session=session).filter_by(uuid=id)
|
||||
task.delete()
|
||||
|
||||
|
||||
def update_state_task(id, running):
|
||||
session = get_session()
|
||||
task = model_query(models.Task, read_deleted="no",
|
||||
session=session).filter_by(uuid=id).first()
|
||||
task.running = running
|
||||
try:
|
||||
task.save(session)
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise exception.DBException()
|
||||
|
||||
|
||||
def get_all_tasks():
|
||||
session = get_session()
|
||||
return model_query(models.Task, read_deleted="no",
|
||||
session=session).all()
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
SQLAlchemy models for cerberus data.
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, String, Integer, DateTime, Float, Text
|
||||
from sqlalchemy import Boolean, Column, String, Integer, DateTime, Float, Text
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
from oslo.config import cfg
|
||||
@@ -46,36 +46,6 @@ class CerberusBase(models.SoftDeleteMixin,
|
||||
super(CerberusBase, self).save(session=session)
|
||||
|
||||
|
||||
class Alert(BASE, CerberusBase):
|
||||
"""Security alert"""
|
||||
|
||||
__tablename__ = 'alert'
|
||||
__table_args__ = ()
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
title = Column(String(255))
|
||||
status = Column(String(255))
|
||||
severity = Column(Integer)
|
||||
acknowledged_at = Column(DateTime)
|
||||
plugin_id = Column(String(255))
|
||||
description = Column(String(255))
|
||||
resource_id = Column(String(255))
|
||||
issue_link = Column(String(255))
|
||||
|
||||
|
||||
class AlertJsonSerializer(serialize.JsonSerializer):
|
||||
"""Alert serializer"""
|
||||
|
||||
__attributes__ = ['id', 'title', 'description', 'status', 'severity',
|
||||
'created_at', 'deleted_at', 'updated_at',
|
||||
'acknowledged_at', 'plugin_id', 'resource_id',
|
||||
'issue_link', 'deleted']
|
||||
__required__ = ['id', 'title']
|
||||
__attribute_serializer__ = dict(created_at='date', deleted_at='date',
|
||||
updated_at='date', acknowledged_at='date')
|
||||
__object_class__ = Alert
|
||||
|
||||
|
||||
class PluginInfo(BASE, CerberusBase):
|
||||
"""Plugin info"""
|
||||
|
||||
@@ -122,6 +92,7 @@ class SecurityReport(BASE, CerberusBase):
|
||||
vulnerabilities = Column(Text)
|
||||
vulnerabilities_number = Column(Integer)
|
||||
last_report_date = Column(DateTime)
|
||||
ticket_id = Column(String(255))
|
||||
|
||||
|
||||
class SecurityReportJsonSerializer(serialize.JsonSerializer):
|
||||
@@ -130,8 +101,9 @@ class SecurityReportJsonSerializer(serialize.JsonSerializer):
|
||||
__attributes__ = ['id', 'title', 'description', 'plugin_id', 'report_id',
|
||||
'component_id', 'component_type', 'component_name',
|
||||
'project_id', 'security_rating', 'vulnerabilities',
|
||||
'vulnerabilities_number', 'last_report_date', 'deleted',
|
||||
'created_at', 'deleted_at', 'updated_at']
|
||||
'vulnerabilities_number', 'last_report_date',
|
||||
'ticket_id', 'deleted', 'created_at', 'deleted_at',
|
||||
'updated_at']
|
||||
__required__ = ['id', 'title', 'component_id']
|
||||
__attribute_serializer__ = dict(created_at='date', deleted_at='date',
|
||||
acknowledged_at='date')
|
||||
@@ -152,20 +124,49 @@ class SecurityAlarm(BASE, CerberusBase):
|
||||
timestamp = Column(DateTime)
|
||||
status = Column(String(255))
|
||||
severity = Column(String(255))
|
||||
project_id = Column(String(255))
|
||||
component_id = Column(String(255))
|
||||
summary = Column(String(255))
|
||||
description = Column(String(255))
|
||||
ticket_id = Column(String(255))
|
||||
|
||||
|
||||
class SecurityAlarmJsonSerializer(serialize.JsonSerializer):
|
||||
"""Security report serializer"""
|
||||
|
||||
__attributes__ = ['id', 'plugin_id', 'alarm_id', 'timestamp', 'status',
|
||||
'severity', 'component_id', 'summary',
|
||||
'project_id', 'security_rating', 'vulnerabilities',
|
||||
'description', 'deleted', 'created_at', 'deleted_at',
|
||||
'updated_at']
|
||||
'severity', 'project_id', 'component_id', 'summary',
|
||||
'description', 'ticket_id', 'deleted', 'created_at',
|
||||
'deleted_at', 'updated_at']
|
||||
__required__ = ['id', 'title']
|
||||
__attribute_serializer__ = dict(created_at='date', deleted_at='date',
|
||||
acknowledged_at='date')
|
||||
__object_class__ = SecurityAlarm
|
||||
|
||||
|
||||
class Task(BASE, CerberusBase):
|
||||
"""Tasks for security purposes (e.g: daily scans...)
|
||||
"""
|
||||
__tablename__ = 'task'
|
||||
__table_args__ = ()
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(255))
|
||||
method = Column(String(255))
|
||||
type = Column(String(255))
|
||||
period = Column(Integer)
|
||||
plugin_id = Column(String(255))
|
||||
running = Column(Boolean)
|
||||
uuid = Column(String(255))
|
||||
|
||||
|
||||
class TaskJsonSerializer(serialize.JsonSerializer):
|
||||
"""Security report serializer"""
|
||||
|
||||
__attributes__ = ['id', 'name', 'method', 'type', 'period',
|
||||
'plugin_id', 'running', 'uuid', 'deleted', 'created_at',
|
||||
'deleted_at', 'updated_at']
|
||||
__required__ = ['id', ]
|
||||
__attribute_serializer__ = dict(created_at='date', deleted_at='date',
|
||||
acknowledged_at='date')
|
||||
__object_class__ = Task
|
||||
|
||||
@@ -22,10 +22,10 @@ from oslo import messaging
|
||||
from stevedore import extension
|
||||
|
||||
from cerberus.common import errors
|
||||
from cerberus.db.sqlalchemy import api
|
||||
from cerberus.common import service
|
||||
from cerberus.db.sqlalchemy import api as db_api
|
||||
from cerberus.openstack.common import log
|
||||
from cerberus.openstack.common import loopingcall
|
||||
from cerberus.openstack.common import service
|
||||
from cerberus.openstack.common import threadgroup
|
||||
from plugins import base
|
||||
|
||||
@@ -33,21 +33,14 @@ from plugins import base
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
OPTS = [
|
||||
|
||||
cfg.MultiStrOpt('messaging_urls',
|
||||
default=[],
|
||||
help="Messaging URLs to listen for notifications. "
|
||||
"Example: transport://user:pass@host1:port"
|
||||
"[,hostN:portN]/virtual_host "
|
||||
"(DEFAULT/transport_url is used if empty)"),
|
||||
cfg.ListOpt('notification-topics', default=['designate']),
|
||||
cfg.ListOpt('cerberus_control_exchange', default=['cerberus']),
|
||||
cfg.ListOpt('notification_topics', default=['notifications', ],
|
||||
help='AMQP topic used for OpenStack notifications'),
|
||||
]
|
||||
|
||||
cfg.CONF.register_opts(OPTS)
|
||||
|
||||
|
||||
class CerberusManager(service.Service):
|
||||
class CerberusManager(service.CerberusService):
|
||||
|
||||
TASK_NAMESPACE = 'cerberus.plugins'
|
||||
|
||||
@@ -59,32 +52,49 @@ class CerberusManager(service.Service):
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
self.task_id = 0
|
||||
super(CerberusManager, self).__init__()
|
||||
|
||||
def _register_plugin(self, extension):
|
||||
# Record plugin in database
|
||||
"""Register plugin in database
|
||||
|
||||
:param extension: stevedore extension containing the plugin to register
|
||||
:return:
|
||||
"""
|
||||
|
||||
version = extension.entry_point.dist.version
|
||||
plugin = extension.obj
|
||||
db_plugin_info = api.plugin_info_get(plugin._name)
|
||||
db_plugin_info = db_api.plugin_info_get(plugin._name)
|
||||
if db_plugin_info is None:
|
||||
db_plugin_info = api.plugin_info_create({'name': plugin._name,
|
||||
'uuid': uuid.uuid4(),
|
||||
'version': version,
|
||||
'provider':
|
||||
plugin.PROVIDER,
|
||||
'type': plugin.TYPE,
|
||||
'description':
|
||||
plugin.DESCRIPTION,
|
||||
'tool_name':
|
||||
plugin.TOOL_NAME
|
||||
})
|
||||
db_plugin_info = db_api.plugin_info_create({'name': plugin._name,
|
||||
'uuid': uuid.uuid4(),
|
||||
'version': version,
|
||||
'provider':
|
||||
plugin.PROVIDER,
|
||||
'type': plugin.TYPE,
|
||||
'description':
|
||||
plugin.DESCRIPTION,
|
||||
'tool_name':
|
||||
plugin.TOOL_NAME})
|
||||
else:
|
||||
api.plugin_version_update(db_plugin_info.id, version)
|
||||
db_api.plugin_version_update(db_plugin_info.id, version)
|
||||
|
||||
plugin._uuid = db_plugin_info.uuid
|
||||
|
||||
def add_stored_tasks(self):
|
||||
"""Add stored tasks when Cerberus starts"""
|
||||
tasks = db_api.get_all_tasks()
|
||||
for task in tasks:
|
||||
kwargs = {}
|
||||
kwargs['task_name'] = task.name
|
||||
kwargs['task_type'] = task.type
|
||||
kwargs['task_period'] = task.period
|
||||
kwargs['task_id'] = task.uuid
|
||||
kwargs['running'] = task.running
|
||||
kwargs['persistent'] = 'True'
|
||||
self._add_task(task.plugin_id, task.method, **kwargs)
|
||||
|
||||
def start(self):
|
||||
"""Start Cerberus Manager"""
|
||||
|
||||
self.rpc_server = None
|
||||
self.notification_server = None
|
||||
@@ -109,6 +119,8 @@ class CerberusManager(service.Service):
|
||||
targets.extend(handler.get_targets(cfg.CONF))
|
||||
plugins.append(handler)
|
||||
|
||||
self.add_stored_tasks()
|
||||
|
||||
transport = messaging.get_transport(cfg.CONF)
|
||||
|
||||
if transport:
|
||||
@@ -124,39 +136,48 @@ class CerberusManager(service.Service):
|
||||
self.rpc_server.start()
|
||||
self.notification_server.start()
|
||||
|
||||
def _get_unique_task(self, id):
|
||||
def _get_unique_task(self, task_id):
|
||||
"""Get unique task (executed once) thanks to its identifier
|
||||
|
||||
:param task_id: the uique identifier of the task
|
||||
:return: the task or None if there is not any task with this id
|
||||
"""
|
||||
|
||||
try:
|
||||
unique_task = next(
|
||||
thread for thread in self.tg.threads
|
||||
if (thread.kw.get('task_id', None) == id))
|
||||
if (thread.kw.get('task_id', None) == task_id))
|
||||
except StopIteration:
|
||||
return None
|
||||
return unique_task
|
||||
|
||||
def _get_recurrent_task(self, id):
|
||||
def _get_recurrent_task(self, task_id):
|
||||
"""Get recurrent task thanks to its identifier
|
||||
|
||||
:param task_id: the uique identifier of the task
|
||||
:return: the task or None if there is not any task with this id
|
||||
"""
|
||||
try:
|
||||
recurrent_task = next(timer for timer in self.tg.timers if
|
||||
(timer.kw.get('task_id', None) == id))
|
||||
(timer.kw.get('task_id', None) == task_id))
|
||||
except StopIteration:
|
||||
return None
|
||||
return recurrent_task
|
||||
|
||||
def _add_unique_task(self, callback, *args, **kwargs):
|
||||
"""
|
||||
Add a simple task executing only once without delay
|
||||
"""Add an unique task (executed once) without delay
|
||||
|
||||
:param callback: Callable function to call when it's necessary
|
||||
:param args: list of positional arguments to call the callback with
|
||||
:param kwargs: dict of keyword arguments to call the callback with
|
||||
:return the thread object that is created
|
||||
"""
|
||||
self.tg.add_thread(callback, *args, **kwargs)
|
||||
return self.tg.add_thread(callback, *args, **kwargs)
|
||||
|
||||
def _add_stopped_reccurent_task(self, callback, period, initial_delay=None,
|
||||
*args, **kwargs):
|
||||
"""Add a recurrent task (executed periodically) without starting it
|
||||
|
||||
def _add_recurrent_task(self, callback, period, initial_delay=None, *args,
|
||||
**kwargs):
|
||||
"""
|
||||
Add a recurrent task executing periodically with or without an initial
|
||||
delay
|
||||
:param callback: Callable function to call when it's necessary
|
||||
:param period: the time in seconds during two executions of the task
|
||||
:param initial_delay: the time after the first execution of the task
|
||||
@@ -164,13 +185,27 @@ class CerberusManager(service.Service):
|
||||
:param args: list of positional arguments to call the callback with
|
||||
:param kwargs: dict of keyword arguments to call the callback with
|
||||
"""
|
||||
self.tg.add_timer(period, callback, initial_delay, *args, **kwargs)
|
||||
return self.tg.add_stopped_timer(callback, initial_delay,
|
||||
*args, **kwargs)
|
||||
|
||||
def _add_recurrent_task(self, callback, period, initial_delay=None, *args,
|
||||
**kwargs):
|
||||
"""Add a recurrent task (executed periodically)
|
||||
|
||||
:param callback: Callable function to call when it's necessary
|
||||
:param period: the time in seconds during two executions of the task
|
||||
:param initial_delay: the time after the first execution of the task
|
||||
occurs
|
||||
:param args: list of positional arguments to call the callback with
|
||||
:param kwargs: dict of keyword arguments to call the callback with
|
||||
"""
|
||||
return self.tg.add_timer(period, callback, initial_delay, *args,
|
||||
**kwargs)
|
||||
|
||||
def get_plugins(self, ctx):
|
||||
'''
|
||||
This method is designed to be called by an rpc client.
|
||||
E.g: Cerberus-api
|
||||
It is used to get information about plugins
|
||||
'''List plugins loaded by Cerberus manager
|
||||
|
||||
This method is called by the Cerberus-api rpc client
|
||||
'''
|
||||
json_plugins = []
|
||||
for extension in self.cerberus_manager:
|
||||
@@ -179,10 +214,10 @@ class CerberusManager(service.Service):
|
||||
json_plugins.append(res)
|
||||
return json_plugins
|
||||
|
||||
def _get_plugin_from_uuid(self, uuid):
|
||||
def _get_plugin_from_uuid(self, plugin_id):
|
||||
for extension in self.cerberus_manager:
|
||||
plugin = extension.obj
|
||||
if (plugin._uuid == uuid):
|
||||
if plugin._uuid == plugin_id:
|
||||
return plugin
|
||||
return None
|
||||
|
||||
@@ -193,26 +228,21 @@ class CerberusManager(service.Service):
|
||||
else:
|
||||
return None
|
||||
|
||||
def add_task(self, ctx, uuid, method_, *args, **kwargs):
|
||||
'''
|
||||
This method is designed to be called by an rpc client.
|
||||
E.g: Cerberus-api
|
||||
It is used to call a method of a plugin back
|
||||
:param ctx: a request context dict supplied by client
|
||||
:param uuid: the uuid of the plugin to call method onto
|
||||
def _add_task(self, plugin_id, method_, *args, **kwargs):
|
||||
'''Add a task in the Cerberus manager
|
||||
|
||||
:param plugin_id: the uuid of the plugin to call method onto
|
||||
:param method_: the method to call back
|
||||
:param task_type: the type of task to create
|
||||
:param args: some extra arguments
|
||||
:param kwargs: some extra keyworded arguments
|
||||
'''
|
||||
self.task_id += 1
|
||||
kwargs['task_id'] = self.task_id
|
||||
kwargs['plugin_id'] = uuid
|
||||
kwargs['plugin_id'] = plugin_id
|
||||
task_type = kwargs.get('task_type', "unique")
|
||||
plugin = self._get_plugin_from_uuid(uuid)
|
||||
plugin = self._get_plugin_from_uuid(plugin_id)
|
||||
|
||||
if plugin is None:
|
||||
raise errors.PluginNotFound(uuid)
|
||||
raise errors.PluginNotFound(plugin_id)
|
||||
|
||||
if (task_type.lower() == 'recurrent'):
|
||||
try:
|
||||
@@ -221,10 +251,17 @@ class CerberusManager(service.Service):
|
||||
LOG.exception(e)
|
||||
raise errors.TaskPeriodNotInteger()
|
||||
try:
|
||||
self._add_recurrent_task(getattr(plugin, method_),
|
||||
task_period,
|
||||
*args,
|
||||
**kwargs)
|
||||
if kwargs.get('running', True) is True:
|
||||
task = self._add_recurrent_task(getattr(plugin, method_),
|
||||
task_period,
|
||||
*args,
|
||||
**kwargs)
|
||||
else:
|
||||
task = self._add_stopped_reccurent_task(
|
||||
getattr(plugin, method_),
|
||||
task_period,
|
||||
*args,
|
||||
**kwargs)
|
||||
except TypeError as e:
|
||||
LOG.exception(e)
|
||||
raise errors.MethodNotString()
|
||||
@@ -235,7 +272,7 @@ class CerberusManager(service.Service):
|
||||
plugin.__class__.__name__)
|
||||
else:
|
||||
try:
|
||||
self._add_unique_task(
|
||||
task = self._add_unique_task(
|
||||
getattr(plugin, method_),
|
||||
*args,
|
||||
**kwargs)
|
||||
@@ -246,111 +283,186 @@ class CerberusManager(service.Service):
|
||||
LOG.exception(e)
|
||||
raise errors.MethodNotCallable(method_,
|
||||
plugin.__class__.__name__)
|
||||
return self.task_id
|
||||
|
||||
def _stop_recurrent_task(self, id):
|
||||
return task
|
||||
|
||||
def _store_task(self, task, method_):
|
||||
try:
|
||||
task_period_ = task.kw.get('task_period', None)
|
||||
if task_period_ is not None:
|
||||
task_period = int(task_period_)
|
||||
else:
|
||||
task_period = task_period_
|
||||
|
||||
db_api.create_task({'name': task.kw.get('task_name',
|
||||
'Unknown'),
|
||||
'method': str(method_),
|
||||
'type': task.kw['task_type'],
|
||||
'period': task_period,
|
||||
'plugin_id': task.kw['plugin_id'],
|
||||
'running': True,
|
||||
'uuid': task.kw['task_id']})
|
||||
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
pass
|
||||
|
||||
def create_task(self, ctx, plugin_id, method_, *args, **kwargs):
|
||||
"""Create a task
|
||||
|
||||
This method is called by a rpc client. It adds a task in the manager
|
||||
and stores it if the task is persistent
|
||||
|
||||
:param ctx: a request context dict supplied by client
|
||||
:param plugin_id: the uuid of the plugin to call method onto
|
||||
:param method_: the method to call back
|
||||
:param args: some extra arguments
|
||||
:param kwargs: some extra keyworded arguments
|
||||
"""
|
||||
Stop the recurrent task but does not remove it from the ThreadGroup.
|
||||
I.e, the task still exists and could be restarted
|
||||
Plus, if the task is running, wait for the end of its execution
|
||||
:param id: the id of the recurrent task to stop
|
||||
task_id = uuid.uuid4()
|
||||
try:
|
||||
task = self._add_task(plugin_id, method_, *args,
|
||||
task_id=str(task_id), **kwargs)
|
||||
except Exception:
|
||||
raise
|
||||
if kwargs.get('persistent', '') == 'True':
|
||||
try:
|
||||
self._store_task(task, method_)
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
pass
|
||||
return str(task_id)
|
||||
|
||||
def _stop_recurrent_task(self, task_id):
|
||||
"""Stop the recurrent task but does not remove it from the ThreadGroup.
|
||||
|
||||
The task still exists and could be restarted. Plus, if the task is
|
||||
running, wait for the end of its execution
|
||||
:param task_id: the id of the recurrent task to stop
|
||||
:return:
|
||||
:raises:
|
||||
StopIteration: the task is not found
|
||||
"""
|
||||
recurrent_task = self._get_recurrent_task(id)
|
||||
recurrent_task = self._get_recurrent_task(task_id)
|
||||
if recurrent_task is None:
|
||||
raise errors.TaskNotFound(id)
|
||||
raise errors.TaskNotFound(task_id)
|
||||
recurrent_task.stop()
|
||||
if recurrent_task.kw.get('persistent', '') == 'True':
|
||||
try:
|
||||
db_api.update_state_task(task_id, False)
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise e
|
||||
|
||||
def _stop_unique_task(self, id):
|
||||
unique_task = self._get_unique_task(id)
|
||||
def _stop_unique_task(self, task_id):
|
||||
"""Stop the task. This task is automatically deleted as it's not
|
||||
recurrent
|
||||
"""
|
||||
unique_task = self._get_unique_task(task_id)
|
||||
if unique_task is None:
|
||||
raise errors.TaskNotFound(id)
|
||||
raise errors.TaskNotFound(task_id)
|
||||
unique_task.stop()
|
||||
if unique_task.kw.get('persistent', '') == 'True':
|
||||
try:
|
||||
db_api.delete_task(task_id)
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise e
|
||||
|
||||
def _stop_task(self, id):
|
||||
task = self._get_task(id)
|
||||
def _stop_task(self, task_id):
|
||||
task = self._get_task(task_id)
|
||||
if isinstance(task, loopingcall.FixedIntervalLoopingCall):
|
||||
try:
|
||||
self._stop_recurrent_task(id)
|
||||
self._stop_recurrent_task(task_id)
|
||||
except errors.InvalidOperation:
|
||||
raise
|
||||
elif isinstance(task, threadgroup.Thread):
|
||||
try:
|
||||
self._stop_unique_task(id)
|
||||
self._stop_unique_task(task_id)
|
||||
except errors.InvalidOperation:
|
||||
raise
|
||||
|
||||
def stop_task(self, ctx, id):
|
||||
def stop_task(self, ctx, task_id):
|
||||
try:
|
||||
self._stop_task(id)
|
||||
self._stop_task(task_id)
|
||||
except errors.InvalidOperation:
|
||||
raise
|
||||
return id
|
||||
return task_id
|
||||
|
||||
def _delete_recurrent_task(self, id):
|
||||
def _delete_recurrent_task(self, task_id):
|
||||
"""
|
||||
Stop the task and delete the recurrent task from the ThreadGroup.
|
||||
If the task is running, wait for the end of its execution
|
||||
:param id: the identifier of the task to delete
|
||||
:param task_id: the identifier of the task to delete
|
||||
:return:
|
||||
"""
|
||||
recurrent_task = self._get_recurrent_task(id)
|
||||
recurrent_task = self._get_recurrent_task(task_id)
|
||||
if (recurrent_task is None):
|
||||
raise errors.TaskDeletionNotAllowed(id)
|
||||
raise errors.TaskDeletionNotAllowed(task_id)
|
||||
recurrent_task.stop()
|
||||
try:
|
||||
self.tg.timers.remove(recurrent_task)
|
||||
except ValueError:
|
||||
raise
|
||||
if recurrent_task.kw.get('persistent', '') == 'True':
|
||||
try:
|
||||
db_api.delete_task(task_id)
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise e
|
||||
|
||||
def delete_recurrent_task(self, ctx, id):
|
||||
def delete_recurrent_task(self, ctx, task_id):
|
||||
'''
|
||||
This method is designed to be called by an rpc client.
|
||||
E.g: Cerberus-api
|
||||
Stop the task and delete the recurrent task from the ThreadGroup.
|
||||
If the task is running, wait for the end of its execution
|
||||
:param ctx: a request context dict supplied by client
|
||||
:param id: the identifier of the task to delete
|
||||
:param task_id: the identifier of the task to delete
|
||||
'''
|
||||
try:
|
||||
self._delete_recurrent_task(id)
|
||||
self._delete_recurrent_task(task_id)
|
||||
except errors.InvalidOperation:
|
||||
raise
|
||||
return id
|
||||
return task_id
|
||||
|
||||
def _force_delete_recurrent_task(self, id):
|
||||
def _force_delete_recurrent_task(self, task_id):
|
||||
"""
|
||||
Stop the task even if it is running and delete the recurrent task from
|
||||
the ThreadGroup.
|
||||
:param id: the identifier of the task to force delete
|
||||
:param task_id: the identifier of the task to force delete
|
||||
:return:
|
||||
"""
|
||||
recurrent_task = self._get_recurrent_task(id)
|
||||
recurrent_task = self._get_recurrent_task(task_id)
|
||||
if (recurrent_task is None):
|
||||
raise errors.TaskDeletionNotAllowed(id)
|
||||
raise errors.TaskDeletionNotAllowed(task_id)
|
||||
recurrent_task.stop()
|
||||
recurrent_task.gt.kill()
|
||||
try:
|
||||
self.tg.timers.remove(recurrent_task)
|
||||
except ValueError:
|
||||
raise
|
||||
if recurrent_task.kw.get('persistent', '') == 'True':
|
||||
try:
|
||||
db_api.delete_task(task_id)
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise e
|
||||
|
||||
def force_delete_recurrent_task(self, ctx, id):
|
||||
def force_delete_recurrent_task(self, ctx, task_id):
|
||||
'''
|
||||
This method is designed to be called by an rpc client.
|
||||
E.g: Cerberus-api
|
||||
Stop the task even if it is running and delete the recurrent task
|
||||
from the ThreadGroup.
|
||||
:param ctx: a request context dict supplied by client
|
||||
:param id: the identifier of the task to force delete
|
||||
:param task_id: the identifier of the task to force delete
|
||||
'''
|
||||
try:
|
||||
self._force_delete_recurrent_task(id)
|
||||
self._force_delete_recurrent_task(task_id)
|
||||
except errors.InvalidOperation:
|
||||
raise
|
||||
return id
|
||||
return task_id
|
||||
|
||||
def _get_tasks(self):
|
||||
tasks = []
|
||||
@@ -360,11 +472,11 @@ class CerberusManager(service.Service):
|
||||
tasks.append(thread)
|
||||
return tasks
|
||||
|
||||
def _get_task(self, id):
|
||||
task = self._get_unique_task(id)
|
||||
task_ = self._get_recurrent_task(id)
|
||||
def _get_task(self, task_id):
|
||||
task = self._get_unique_task(task_id)
|
||||
task_ = self._get_recurrent_task(task_id)
|
||||
if (task is None and task_ is None):
|
||||
raise errors.TaskNotFound(id)
|
||||
raise errors.TaskNotFound(task_id)
|
||||
return task if task is not None else task_
|
||||
|
||||
def get_tasks(self, ctx):
|
||||
@@ -381,9 +493,9 @@ class CerberusManager(service.Service):
|
||||
cls=base.ThreadEncoder))
|
||||
return tasks_
|
||||
|
||||
def get_task(self, ctx, id):
|
||||
def get_task(self, ctx, task_id):
|
||||
try:
|
||||
task = self._get_task(id)
|
||||
task = self._get_task(task_id)
|
||||
except errors.InvalidOperation:
|
||||
raise
|
||||
if isinstance(task, loopingcall.FixedIntervalLoopingCall):
|
||||
@@ -393,34 +505,37 @@ class CerberusManager(service.Service):
|
||||
return json.dumps(task,
|
||||
cls=base.ThreadEncoder)
|
||||
|
||||
def _restart_recurrent_task(self, id):
|
||||
def _restart_recurrent_task(self, task_id):
|
||||
"""
|
||||
Restart the task
|
||||
:param id: the identifier of the task to restart
|
||||
:param task_id: the identifier of the task to restart
|
||||
:return:
|
||||
"""
|
||||
recurrent_task = self._get_recurrent_task(id)
|
||||
recurrent_task = self._get_recurrent_task(task_id)
|
||||
if (recurrent_task is None):
|
||||
raise errors.TaskRestartNotAllowed(str(id))
|
||||
raise errors.TaskRestartNotAllowed(str(task_id))
|
||||
period = recurrent_task.kw.get("task_period", None)
|
||||
if recurrent_task._running is True:
|
||||
raise errors.TaskRestartNotPossible(str(id))
|
||||
raise errors.TaskRestartNotPossible(str(task_id))
|
||||
else:
|
||||
try:
|
||||
recurrent_task.start(int(period))
|
||||
except ValueError as e:
|
||||
if recurrent_task.kw.get('persistent', '') == 'True':
|
||||
db_api.update_state_task(task_id, True)
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
raise e
|
||||
|
||||
def restart_recurrent_task(self, ctx, id):
|
||||
def restart_recurrent_task(self, ctx, task_id):
|
||||
'''
|
||||
This method is designed to be called by an rpc client.
|
||||
E.g: Cerberus-api
|
||||
Restart a recurrent task after it's being stopped
|
||||
:param ctx: a request context dict supplied by client
|
||||
:param id: the identifier of the task to restart
|
||||
:param task_id: the identifier of the task to restart
|
||||
'''
|
||||
try:
|
||||
self._restart_recurrent_task(id)
|
||||
self._restart_recurrent_task(task_id)
|
||||
except errors.InvalidOperation:
|
||||
raise
|
||||
return id
|
||||
return task_id
|
||||
|
||||
@@ -94,7 +94,7 @@ class FixedIntervalLoopingCall(LoopingCallBase):
|
||||
|
||||
self.done = done
|
||||
|
||||
self.gt = greenthread.spawn(_inner)
|
||||
greenthread.spawn_n(_inner)
|
||||
return self.done
|
||||
|
||||
|
||||
|
||||
@@ -1,173 +0,0 @@
|
||||
# Copyright 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import socket
|
||||
import uuid
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from cerberus.openstack.common import context
|
||||
from cerberus.openstack.common.gettextutils import _, _LE
|
||||
from cerberus.openstack.common import importutils
|
||||
from cerberus.openstack.common import jsonutils
|
||||
from cerberus.openstack.common import log as logging
|
||||
from cerberus.openstack.common import timeutils
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
notifier_opts = [
|
||||
cfg.MultiStrOpt('notification_driver',
|
||||
default=[],
|
||||
help='Driver or drivers to handle sending notifications'),
|
||||
cfg.StrOpt('default_notification_level',
|
||||
default='INFO',
|
||||
help='Default notification level for outgoing notifications'),
|
||||
cfg.StrOpt('default_publisher_id',
|
||||
default=None,
|
||||
help='Default publisher_id for outgoing notifications'),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(notifier_opts)
|
||||
|
||||
WARN = 'WARN'
|
||||
INFO = 'INFO'
|
||||
ERROR = 'ERROR'
|
||||
CRITICAL = 'CRITICAL'
|
||||
DEBUG = 'DEBUG'
|
||||
|
||||
log_levels = (DEBUG, WARN, INFO, ERROR, CRITICAL)
|
||||
|
||||
|
||||
class BadPriorityException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def notify_decorator(name, fn):
|
||||
"""Decorator for notify which is used from utils.monkey_patch().
|
||||
|
||||
:param name: name of the function
|
||||
:param function: - object of the function
|
||||
:returns: function -- decorated function
|
||||
|
||||
"""
|
||||
def wrapped_func(*args, **kwarg):
|
||||
body = {}
|
||||
body['args'] = []
|
||||
body['kwarg'] = {}
|
||||
for arg in args:
|
||||
body['args'].append(arg)
|
||||
for key in kwarg:
|
||||
body['kwarg'][key] = kwarg[key]
|
||||
|
||||
ctxt = context.get_context_from_function_and_args(fn, args, kwarg)
|
||||
notify(ctxt,
|
||||
CONF.default_publisher_id or socket.gethostname(),
|
||||
name,
|
||||
CONF.default_notification_level,
|
||||
body)
|
||||
return fn(*args, **kwarg)
|
||||
return wrapped_func
|
||||
|
||||
|
||||
def publisher_id(service, host=None):
|
||||
if not host:
|
||||
try:
|
||||
host = CONF.host
|
||||
except AttributeError:
|
||||
host = CONF.default_publisher_id or socket.gethostname()
|
||||
return "%s.%s" % (service, host)
|
||||
|
||||
|
||||
def notify(context, publisher_id, event_type, priority, payload):
|
||||
"""Sends a notification using the specified driver
|
||||
|
||||
:param publisher_id: the source worker_type.host of the message
|
||||
:param event_type: the literal type of event (ex. Instance Creation)
|
||||
:param priority: patterned after the enumeration of Python logging
|
||||
levels in the set (DEBUG, WARN, INFO, ERROR, CRITICAL)
|
||||
:param payload: A python dictionary of attributes
|
||||
|
||||
Outgoing message format includes the above parameters, and appends the
|
||||
following:
|
||||
|
||||
message_id
|
||||
a UUID representing the id for this notification
|
||||
|
||||
timestamp
|
||||
the GMT timestamp the notification was sent at
|
||||
|
||||
The composite message will be constructed as a dictionary of the above
|
||||
attributes, which will then be sent via the transport mechanism defined
|
||||
by the driver.
|
||||
|
||||
Message example::
|
||||
|
||||
{'message_id': str(uuid.uuid4()),
|
||||
'publisher_id': 'compute.host1',
|
||||
'timestamp': timeutils.utcnow(),
|
||||
'priority': 'WARN',
|
||||
'event_type': 'compute.create_instance',
|
||||
'payload': {'instance_id': 12, ... }}
|
||||
|
||||
"""
|
||||
if priority not in log_levels:
|
||||
raise BadPriorityException(
|
||||
_('%s not in valid priorities') % priority)
|
||||
|
||||
# Ensure everything is JSON serializable.
|
||||
payload = jsonutils.to_primitive(payload, convert_instances=True)
|
||||
|
||||
msg = dict(message_id=str(uuid.uuid4()),
|
||||
publisher_id=publisher_id,
|
||||
event_type=event_type,
|
||||
priority=priority,
|
||||
payload=payload,
|
||||
timestamp=str(timeutils.utcnow()))
|
||||
|
||||
for driver in _get_drivers():
|
||||
try:
|
||||
driver.notify(context, msg)
|
||||
except Exception as e:
|
||||
LOG.exception(_LE("Problem '%(e)s' attempting to "
|
||||
"send to notification system. "
|
||||
"Payload=%(payload)s")
|
||||
% dict(e=e, payload=payload))
|
||||
|
||||
|
||||
_drivers = None
|
||||
|
||||
|
||||
def _get_drivers():
|
||||
"""Instantiate, cache, and return drivers based on the CONF."""
|
||||
global _drivers
|
||||
if _drivers is None:
|
||||
_drivers = {}
|
||||
for notification_driver in CONF.notification_driver:
|
||||
try:
|
||||
driver = importutils.import_module(notification_driver)
|
||||
_drivers[notification_driver] = driver
|
||||
except ImportError:
|
||||
LOG.exception(_LE("Failed to load notifier %s. "
|
||||
"These notifications will not be sent.") %
|
||||
notification_driver)
|
||||
return _drivers.values()
|
||||
|
||||
|
||||
def _reset_drivers():
|
||||
"""Used by unit tests to reset the drivers."""
|
||||
global _drivers
|
||||
_drivers = None
|
||||
@@ -1,37 +0,0 @@
|
||||
# Copyright 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from cerberus.openstack.common import jsonutils
|
||||
from cerberus.openstack.common import log as logging
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def notify(_context, message):
|
||||
"""Notifies the recipient of the desired event given the model.
|
||||
|
||||
Log notifications using OpenStack's default logging system.
|
||||
"""
|
||||
|
||||
priority = message.get('priority',
|
||||
CONF.default_notification_level)
|
||||
priority = priority.lower()
|
||||
logger = logging.getLogger(
|
||||
'cerberus.openstack.common.notification.%s' %
|
||||
message['event_type'])
|
||||
getattr(logger, priority)(jsonutils.dumps(message))
|
||||
@@ -1,19 +0,0 @@
|
||||
# Copyright 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
def notify(_context, message):
|
||||
"""Notifies the recipient of the desired event given the model."""
|
||||
pass
|
||||
@@ -1,77 +0,0 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
A temporary helper which emulates oslo.messaging.Notifier.
|
||||
|
||||
This helper method allows us to do the tedious porting to the new Notifier API
|
||||
as a standalone commit so that the commit which switches us to oslo.messaging
|
||||
is smaller and easier to review. This file will be removed as part of that
|
||||
commit.
|
||||
"""
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from cerberus.openstack.common.notifier import api as notifier_api
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class Notifier(object):
|
||||
|
||||
def __init__(self, publisher_id):
|
||||
super(Notifier, self).__init__()
|
||||
self.publisher_id = publisher_id
|
||||
|
||||
_marker = object()
|
||||
|
||||
def prepare(self, publisher_id=_marker):
|
||||
ret = self.__class__(self.publisher_id)
|
||||
if publisher_id is not self._marker:
|
||||
ret.publisher_id = publisher_id
|
||||
return ret
|
||||
|
||||
def _notify(self, ctxt, event_type, payload, priority):
|
||||
notifier_api.notify(ctxt,
|
||||
self.publisher_id,
|
||||
event_type,
|
||||
priority,
|
||||
payload)
|
||||
|
||||
def audit(self, ctxt, event_type, payload):
|
||||
# No audit in old notifier.
|
||||
self._notify(ctxt, event_type, payload, 'INFO')
|
||||
|
||||
def debug(self, ctxt, event_type, payload):
|
||||
self._notify(ctxt, event_type, payload, 'DEBUG')
|
||||
|
||||
def info(self, ctxt, event_type, payload):
|
||||
self._notify(ctxt, event_type, payload, 'INFO')
|
||||
|
||||
def warn(self, ctxt, event_type, payload):
|
||||
self._notify(ctxt, event_type, payload, 'WARN')
|
||||
|
||||
warning = warn
|
||||
|
||||
def error(self, ctxt, event_type, payload):
|
||||
self._notify(ctxt, event_type, payload, 'ERROR')
|
||||
|
||||
def critical(self, ctxt, event_type, payload):
|
||||
self._notify(ctxt, event_type, payload, 'CRITICAL')
|
||||
|
||||
|
||||
def get_notifier(service=None, host=None, publisher_id=None):
|
||||
if not publisher_id:
|
||||
publisher_id = "%s.%s" % (service, host or CONF.host)
|
||||
return Notifier(publisher_id)
|
||||
@@ -1,47 +0,0 @@
|
||||
# Copyright 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from cerberus.openstack.common import context as req_context
|
||||
from cerberus.openstack.common.gettextutils import _LE
|
||||
from cerberus.openstack.common import log as logging
|
||||
from cerberus.openstack.common import rpc
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
notification_topic_opt = cfg.ListOpt(
|
||||
'notification_topics', default=['notifications', ],
|
||||
help='AMQP topic used for OpenStack notifications')
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opt(notification_topic_opt)
|
||||
|
||||
|
||||
def notify(context, message):
|
||||
"""Sends a notification via RPC."""
|
||||
if not context:
|
||||
context = req_context.get_admin_context()
|
||||
priority = message.get('priority',
|
||||
CONF.default_notification_level)
|
||||
priority = priority.lower()
|
||||
for topic in CONF.notification_topics:
|
||||
topic = '%s.%s' % (topic, priority)
|
||||
try:
|
||||
rpc.notify(context, topic, message)
|
||||
except Exception:
|
||||
LOG.exception(_LE("Could not send notification to %(topic)s. "
|
||||
"Payload=%(message)s"),
|
||||
{"topic": topic, "message": message})
|
||||
@@ -1,53 +0,0 @@
|
||||
# Copyright 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
'''messaging based notification driver, with message envelopes'''
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from cerberus.openstack.common import context as req_context
|
||||
from cerberus.openstack.common.gettextutils import _LE
|
||||
from cerberus.openstack.common import log as logging
|
||||
from cerberus.openstack.common import rpc
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
notification_topic_opt = cfg.ListOpt(
|
||||
'topics', default=['notifications', ],
|
||||
help='AMQP topic(s) used for OpenStack notifications')
|
||||
|
||||
opt_group = cfg.OptGroup(name='rpc_notifier2',
|
||||
title='Options for rpc_notifier2')
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_group(opt_group)
|
||||
CONF.register_opt(notification_topic_opt, opt_group)
|
||||
|
||||
|
||||
def notify(context, message):
|
||||
"""Sends a notification via RPC."""
|
||||
if not context:
|
||||
context = req_context.get_admin_context()
|
||||
priority = message.get('priority',
|
||||
CONF.default_notification_level)
|
||||
priority = priority.lower()
|
||||
for topic in CONF.rpc_notifier2.topics:
|
||||
topic = '%s.%s' % (topic, priority)
|
||||
try:
|
||||
rpc.notify(context, topic, message, envelope=True)
|
||||
except Exception:
|
||||
LOG.exception(_LE("Could not send notification to %(topic)s. "
|
||||
"Payload=%(message)s"),
|
||||
{"topic": topic, "message": message})
|
||||
@@ -36,9 +36,7 @@ class Thread(object):
|
||||
:class:`ThreadGroup`. The Thread will notify the :class:`ThreadGroup` when
|
||||
it has done so it can be removed from the threads list.
|
||||
"""
|
||||
def __init__(self, thread, group, *args, **kwargs):
|
||||
self.args = args
|
||||
self.kw = kwargs
|
||||
def __init__(self, thread, group):
|
||||
self.thread = thread
|
||||
self.thread.link(_thread_done, group=group, thread=self)
|
||||
|
||||
@@ -80,7 +78,7 @@ class ThreadGroup(object):
|
||||
|
||||
def add_thread(self, callback, *args, **kwargs):
|
||||
gt = self.pool.spawn(callback, *args, **kwargs)
|
||||
th = Thread(gt, self, *args, **kwargs)
|
||||
th = Thread(gt, self)
|
||||
self.threads.append(th)
|
||||
return th
|
||||
|
||||
|
||||
@@ -140,6 +140,7 @@ class FixedIntervalLoopingCallEncoder(json.JSONEncoder):
|
||||
'period': obj.kw.get('task_period', None),
|
||||
'type': obj.kw.get('task_type', None),
|
||||
'plugin_id': obj.kw.get('plugin_id', None),
|
||||
'persistent': obj.kw.get('persistent', 'False'),
|
||||
'state': state}
|
||||
|
||||
|
||||
@@ -151,4 +152,5 @@ class ThreadEncoder(json.JSONEncoder):
|
||||
'name': obj.kw.get('task_name', None),
|
||||
'type': obj.kw.get('task_type', None),
|
||||
'plugin_id': obj.kw.get('plugin_id', None),
|
||||
'persistent': obj.kw.get('persistent', False),
|
||||
'state': 'running'}
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
#
|
||||
# Copyright (c) 2014 EUROGICIEL
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from cerberus.client import keystone_client
|
||||
from cerberus.client import neutron_client
|
||||
from cerberus.client import nova_client
|
||||
from cerberus.openstack.common import log
|
||||
from cerberus.plugins import base
|
||||
import openvas_lib
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
# Register options for the service
|
||||
OPENVAS_OPTS = [
|
||||
cfg.StrOpt('openvas_admin',
|
||||
default='admin',
|
||||
help='The admin user for rcp server',
|
||||
),
|
||||
cfg.StrOpt('openvas_passwd',
|
||||
default='admin',
|
||||
help='The password for rcp server',
|
||||
),
|
||||
cfg.StrOpt('openvas_url',
|
||||
default='https://',
|
||||
help='Url of rcp server',
|
||||
),
|
||||
]
|
||||
|
||||
opt_group = cfg.OptGroup(name='openvas',
|
||||
title='Options for the OpenVas client')
|
||||
|
||||
cfg.CONF.register_group(opt_group)
|
||||
cfg.CONF.register_opts(OPENVAS_OPTS, opt_group)
|
||||
cfg.CONF.import_group('openvas', 'cerberus.service')
|
||||
|
||||
_FLOATINGIP_UPDATED = 'floatingip.update.end'
|
||||
_ROLE_ASSIGNMENT_CREATED = 'identity.created.role_assignment'
|
||||
_ROLE_ASSIGNMENT_DELETED = 'identity.deleted.role_assignment'
|
||||
_PROJECT_DELETED = 'identity.project.deleted'
|
||||
|
||||
|
||||
class OpenVasPlugin(base.PluginBase):
|
||||
|
||||
def __init__(self):
|
||||
self.task_id = None
|
||||
super(OpenVasPlugin, self).__init__()
|
||||
self.subscribe_event(_ROLE_ASSIGNMENT_CREATED)
|
||||
self.subscribe_event(_ROLE_ASSIGNMENT_DELETED)
|
||||
self.subscribe_event(_FLOATINGIP_UPDATED)
|
||||
self.subscribe_event(_PROJECT_DELETED)
|
||||
self.kc = keystone_client.Client()
|
||||
self.nc = neutron_client.Client()
|
||||
self.nova_client = nova_client.Client()
|
||||
self.conf = cfg.CONF.openvas
|
||||
|
||||
@base.PluginBase.webmethod
|
||||
def get_security_reports(self, **kwargs):
|
||||
security_reports = []
|
||||
try:
|
||||
scanner = openvas_lib.VulnscanManager(self.conf.openvas_url,
|
||||
self.conf.openvas_admin,
|
||||
self.conf.openvas_passwd)
|
||||
finished_scans = scanner.get_finished_scans
|
||||
for scan_key, scan_id in finished_scans.iteritems():
|
||||
report_id = scanner.get_report_id(scan_id)
|
||||
report = scanner.get_report_html(report_id)
|
||||
|
||||
security_reports.append(report)
|
||||
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
pass
|
||||
return security_reports
|
||||
|
||||
def process_notification(self, ctxt, publisher_id, event_type, payload,
|
||||
metadata):
|
||||
pass
|
||||
@@ -43,7 +43,7 @@ class TaskPlugin(base.PluginBase):
|
||||
LOG.info(str(kwargs.get('task_name', 'unknown')) + " :"
|
||||
+ str(datetime.datetime.time(datetime.datetime.now())))
|
||||
i = 0
|
||||
while(i < 3600):
|
||||
while(i < 60):
|
||||
LOG.info(str(kwargs.get('task_name', 'unknown')) + " :"
|
||||
+ str(datetime.datetime.time(datetime.datetime.now())))
|
||||
i += 1
|
||||
|
||||
@@ -15,7 +15,11 @@
|
||||
#
|
||||
|
||||
import datetime
|
||||
import json
|
||||
|
||||
from cerberus.common import exception as cerberus_exception
|
||||
from cerberus.common import json_encoders
|
||||
from cerberus.db import api as db_api
|
||||
from cerberus.openstack.common import log
|
||||
from cerberus.plugins import base
|
||||
|
||||
@@ -34,6 +38,120 @@ class TestPlugin(base.PluginBase):
|
||||
LOG.info(str(kwargs.get('task_name', 'unknown')) + " :"
|
||||
+ str(datetime.datetime.time(datetime.datetime.now())))
|
||||
|
||||
@base.PluginBase.webmethod
|
||||
def get_security_reports(self, **kwargs):
|
||||
security_reports = []
|
||||
try:
|
||||
security_report = {
|
||||
'vulns': {'443': {'ip': '192.168.100.3', 'archived': False,
|
||||
'protocol': 'tcp', 'iface_id': 329,
|
||||
'family': 'Web Servers',
|
||||
'plugin': '1.3.6.1.4.1.25623.1.0.10386',
|
||||
'service_name': 'Apache httpd 2.2.22',
|
||||
'vuln_state': 'acked', 'port': 80,
|
||||
'state': 'acked', 'service': '80/tcp',
|
||||
'service_status': None, 'host_id': 328,
|
||||
'vuln_id': 443,
|
||||
'output': "Summary: \nRemote web server does not reply with 404 error code.\n\nInsight: \nThis web server is [mis]configured in that it does not return\n '404 Not Found' error codes when a non-existent file is requested,\n perhaps returning a site map, search page or authentication page\n instead.\n \n OpenVAS enabled some counter measures for that, however they might\n be insufficient. If a great number of security holes are produced\n for this port, they might not all be accurate\n\nReferences: \nNOXREF\nCVE:NOCVE\n\n", # noqa
|
||||
'service_id': 337, 'score': 0.0, 'id': 443,
|
||||
'name': 'No 404 check'},
|
||||
'447': {'ip': '192.168.100.3', 'archived': False,
|
||||
'protocol': 'tcp', 'iface_id': 329,
|
||||
'family': 'Denial of Service',
|
||||
'plugin': '1.3.6.1.4.1.25623.1.0.121035',
|
||||
'service_name': 'OpenSSH 5.9p1 Debian',
|
||||
'vuln_state': 'acked', 'port': 22,
|
||||
'state': 'acked', 'service': '22/tcp',
|
||||
'service_status': None, 'host_id': 328,
|
||||
'vuln_id': 447,
|
||||
'output': "Summary: \nDenial of Service Vulnerability in OpenSSH\n\nInsight: \nThe sshd_config configuration file indicates connection limits:\n - MaxStartups: maximal number of unauthenticated connections (default : 10)\n - LoginGraceTime: expiration duration of unauthenticated connections (default : 2 minutes)\n\nHowever, in this default configuration, an attacker can open 10 TCP sessions on port 22/tcp, and then reopen them every 2 minutes, in order to limit the probability of a legitimate client to access to the service.\n\nNote: MaxStartups supports the 'random early drop' feature, which protects against this type of attack, but it is not enabled by default.\n\nAn unauthenticated attacker can therefore open ten connections to OpenSSH, in order to forbid the access to legitimate users.\n\nThis plugin only check OpenSSH version and not test to exploit this vulnerability.\n\nImpact: \nAttackers to cause a denial of service (connection-slot exhaustion).\n\nReferences: \nURL:http://www.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/sshd_config?r1=1.89#rev1.89\nURL:http://www.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/sshd_config.5?r1=1.156#rev1.156\nURL:http://www.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/servconf.c?r1=1.234#rev1.234\nURL:http://vigilance.fr/vulnerability/OpenSSH-denial-of-service-via-MaxStartups-11256\nCVE:CVE-2010-5107\n\nSolution: \nUpgrade your OpenSSH to 6.2. or modify LoginGraceTime and MaxStartups on server configuration\n\n", # noqa
|
||||
'service_id': 333, 'score': 5.0, 'id': 447,
|
||||
'name': 'Denial of Service in OpenSSH'},
|
||||
'446': {'ip': '192.168.100.3', 'archived': False,
|
||||
'protocol': 'udp', 'iface_id': 329,
|
||||
'family': 'Service detection',
|
||||
'plugin': '1.3.6.1.4.1.25623.1.0.10884',
|
||||
'service_name': 'NTP v4 (unsynchronized)',
|
||||
'vuln_state': 'new', 'port': 123,
|
||||
'state': 'new', 'service': '123/udp',
|
||||
'service_status': None, 'host_id': 328,
|
||||
'vuln_id': 446,
|
||||
'output': 'Summary: \nA NTP (Network Time Protocol) server is listening on this port.\n\nReferences: \nNOXREF\nCVE:NOCVE\n\n', # noqa
|
||||
'service_id': 335, 'score': 0.0, 'id': 446,
|
||||
'name': 'NTP read variables'},
|
||||
'445': {'ip': '192.168.100.3', 'archived': False,
|
||||
'protocol': 'tcp', 'iface_id': 329,
|
||||
'family': 'General',
|
||||
'plugin': '1.3.6.1.4.1.25623.1.0.120008',
|
||||
'service_name': 'Apache httpd 2.2.22 ',
|
||||
'vuln_state': 'acked', 'port': 443,
|
||||
'state': 'acked', 'service': '443/tcp',
|
||||
'service_status': None, 'host_id': 328,
|
||||
'vuln_id': 445,
|
||||
'output': '\nFollowing is a list of the SSL cipher suites supported when connecting to the host.\n\nSupported cipher suites (ORDER IS NOT SIGNIFICANT)\n SSLv3\n RSA_WITH_3DES_EDE_CBC_SHA\n DHE_RSA_WITH_3DES_EDE_CBC_SHA\n RSA_WITH_AES_128_CBC_SHA\n DHE_RSA_WITH_AES_128_CBC_SHA\n RSA_WITH_AES_256_CBC_SHA\n DHE_RSA_WITH_AES_256_CBC_SHA\n RSA_WITH_CAMELLIA_128_CBC_SHA\n DHE_RSA_WITH_CAMELLIA_128_CBC_SHA\n RSA_WITH_CAMELLIA_256_CBC_SHA\n DHE_RSA_WITH_CAMELLIA_256_CBC_SHA\n (TLSv1.0: idem)\n (TLSv1.1: idem)\n TLSv1.2\n RSA_WITH_3DES_EDE_CBC_SHA\n DHE_RSA_WITH_3DES_EDE_CBC_SHA\n RSA_WITH_AES_128_CBC_SHA\n DHE_RSA_WITH_AES_128_CBC_SHA\n RSA_WITH_AES_256_CBC_SHA\n DHE_RSA_WITH_AES_256_CBC_SHA\n RSA_WITH_AES_128_CBC_SHA256\n RSA_WITH_AES_256_CBC_SHA256\n RSA_WITH_CAMELLIA_128_CBC_SHA\n DHE_RSA_WITH_CAMELLIA_128_CBC_SHA\n DHE_RSA_WITH_AES_128_CBC_SHA256\n DHE_RSA_WITH_AES_256_CBC_SHA256\n RSA_WITH_CAMELLIA_256_CBC_SHA\n DHE_RSA_WITH_CAMELLIA_256_CBC_SHA\n\n', # noqa
|
||||
'service_id': 339, 'score': 0.0, 'id': 445,
|
||||
'name': 'SSL Cipher Suites Supported'},
|
||||
'444': {'ip': '192.168.100.3', 'archived': False,
|
||||
'protocol': 'tcp', 'iface_id': 329,
|
||||
'family': 'General',
|
||||
'plugin': '1.3.6.1.4.1.25623.1.0.120002',
|
||||
'service_name': 'Apache httpd 2.2.22',
|
||||
'vuln_state': 'acked', 'port': 443,
|
||||
'state': 'acked', 'service': '443/tcp',
|
||||
'service_status': None, 'host_id': 328,
|
||||
'vuln_id': 444,
|
||||
'output': '\nA vulnerability exists in SSL 3.0 and TLS 1.0 that could allow information \ndisclosure if an attacker intercepts encrypted traffic served from an affected \nsystem. It is also known as BEAST attack. \n\nCVSS Severity:\n CVSS Base Score: 4.3 (AV:N/AC:M/Au:N/C:P/I:N/A:N) \n Impact Subscore: \n Exploitability Subscore:\n\nReference:\n CVE-2011-3389\n \nSolution:\n Disable usage of CBC ciphers with SSL 3.0 and TLS 1.0 protocols.\n \nNote: \n This script detects the vulnerability in the SSLv3/TLSv1 protocol implemented \n in the server. It does not detect the BEAST attack where it exploits the \n vulnerability at HTTPS client-side.\n\n The detection at server-side does not necessarily mean your server is \n vulnerableto the BEAST attack because the attack exploits the vulnerability \n at client-side, and both SSL/TLS clients and servers can independently employ \n the split record countermeasure.\n \nSee Also:\n http://vnhacker.blogspot.com/2011/09/beast.html\n http://www.openssl.org/~bodo/tls-cbc.txt\n http://blogs.msdn.com/b/kaushal/archive/2012/01/21/fixing-the-beast.aspx\n \n \n', # noqa
|
||||
'service_id': 339, 'score': 4.3, 'id': 444,
|
||||
'name': 'BEAST Vulnerability'}},
|
||||
'host': {'archived': False, 'name': '192.168.100.3',
|
||||
'ifaces': [329], 'scan': True,
|
||||
'cpe': 'cpe:/o:canonical:ubuntu_linux', 'state': 'up',
|
||||
'cpe_title': 'Canonical Ubuntu Linux',
|
||||
'fingerprint': 'Linux Kernel', 'device': 'server',
|
||||
'id': 328},
|
||||
'stat': {'ignored': 0, 'entity_id': 328, 'medium': 2,
|
||||
'grade': 7.4, 'vulns': 2, 'archived': 0,
|
||||
'not_scanned': 0, 'high': 0, 'score': 9.3, 'hosts': 1,
|
||||
'trending': 0.0, 'scanned': 1, 'critical': 0,
|
||||
'low': 0},
|
||||
'ifaces': {'329': {'archived': False, 'ip': '192.168.100.3',
|
||||
'state': 'up',
|
||||
'services': [333, 335, 337, 339],
|
||||
'host_id': 328, 'id': 329}}}
|
||||
|
||||
report_id = 1
|
||||
if (security_report.get('stat'), False):
|
||||
vulnerabilities_number = security_report['stat']\
|
||||
.get('vulns', None)
|
||||
try:
|
||||
db_api.security_report_create(
|
||||
{'title': 'Security report',
|
||||
'plugin_id': self._uuid,
|
||||
'report_id': report_id,
|
||||
'component_id': 'a1d869a1-6ab0-4f02-9e56-f83034bacfcb',
|
||||
'component_type': 'instance',
|
||||
'component_name': 'openstack-test-server',
|
||||
'project_id': '510c7f4ed14243f09df371bba2561177',
|
||||
'description': 'openstack-test-server',
|
||||
'security_rating': security_report['stat']['grade'],
|
||||
'vulnerabilities': json.dumps(
|
||||
security_report['vulns'],
|
||||
cls=json_encoders.DateTimeEncoder),
|
||||
'vulnerabilities_number': vulnerabilities_number}
|
||||
)
|
||||
except cerberus_exception.DBException as e:
|
||||
LOG.exception(e)
|
||||
pass
|
||||
security_reports.append(security_report)
|
||||
db_report_id = db_api.security_report_get_from_report_id(
|
||||
report_id).id
|
||||
db_api.security_report_update_last_report_date(
|
||||
db_report_id, datetime.datetime(2015, 5, 6, 16, 19, 29))
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
pass
|
||||
return security_reports
|
||||
|
||||
def process_notification(self, ctxt, publisher_id, event_type, payload,
|
||||
metadata):
|
||||
|
||||
|
||||
@@ -55,13 +55,19 @@ class TestPlugins(base.TestApiBase):
|
||||
name=PLUGIN_NAME_2
|
||||
)
|
||||
)
|
||||
self.fake_rpc_plugin = db_utils.get_rpc_plugin()
|
||||
self.fake_rpc_plugins = []
|
||||
self.fake_rpc_plugins.append(self.fake_rpc_plugin)
|
||||
self.fake_rpc_plugins.append(db_utils.get_rpc_plugin(
|
||||
name=PLUGIN_NAME_2
|
||||
))
|
||||
self.plugins_path = '/plugins'
|
||||
self.plugin_path = '/plugins/%s' % self.fake_plugin['uuid']
|
||||
|
||||
def test_list(self):
|
||||
|
||||
rpc_plugins = []
|
||||
for plugin in self.fake_plugins:
|
||||
for plugin in self.fake_rpc_plugins:
|
||||
rpc_plugins.append(json.dumps(plugin))
|
||||
|
||||
messaging.RPCClient.call = mock.MagicMock(
|
||||
@@ -70,16 +76,19 @@ class TestPlugins(base.TestApiBase):
|
||||
return_value=self.fake_plugins_model)
|
||||
|
||||
plugins = self.get_json(self.plugins_path)
|
||||
self.assertEqual({'plugins': self.fake_plugins},
|
||||
plugins)
|
||||
expecting_sorted = sorted({'plugins': self.fake_plugins}['plugins'],
|
||||
key=lambda k: k['name'])
|
||||
actual_sorted = sorted(plugins['plugins'], key=lambda k: k['name'])
|
||||
self.assertEqual(expecting_sorted,
|
||||
actual_sorted)
|
||||
|
||||
def test_get(self):
|
||||
rpc_plugin = json.dumps(self.fake_plugin)
|
||||
rpc_plugin = json.dumps(self.fake_rpc_plugin)
|
||||
messaging.RPCClient.call = mock.MagicMock(return_value=rpc_plugin)
|
||||
db.plugin_info_get_from_uuid = mock.MagicMock(
|
||||
return_value=self.fake_plugin_model)
|
||||
plugin = self.get_json(self.plugin_path)
|
||||
self.assertEqual({'plugin': self.fake_plugin}, plugin)
|
||||
self.assertEqual(self.fake_plugin, plugin)
|
||||
|
||||
def test_list_plugins_remote_error(self):
|
||||
messaging.RPCClient.call = mock.MagicMock(
|
||||
|
||||
84
cerberus/tests/api/v1/test_security_alarms.py
Normal file
84
cerberus/tests/api/v1/test_security_alarms.py
Normal file
@@ -0,0 +1,84 @@
|
||||
#
|
||||
# Copyright (c) 2015 EUROGICIEL
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import mock
|
||||
from sqlalchemy import exc as sql_exc
|
||||
|
||||
from cerberus import db
|
||||
from cerberus.tests.api import base
|
||||
from cerberus.tests.db import utils as db_utils
|
||||
|
||||
SECURITY_ALARM_ID = 'abc123'
|
||||
SECURITY_ALARM_ID_2 = 'xyz789'
|
||||
|
||||
|
||||
class TestSecurityReports(base.TestApiBase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestSecurityReports, self).setUp()
|
||||
self.fake_security_alarm = db_utils.get_test_security_alarm(
|
||||
id=SECURITY_ALARM_ID
|
||||
)
|
||||
self.fake_security_alarms = []
|
||||
self.fake_security_alarms.append(self.fake_security_alarm)
|
||||
self.fake_security_alarms.append(db_utils.get_test_security_alarm(
|
||||
id=SECURITY_ALARM_ID_2
|
||||
))
|
||||
self.fake_security_alarm_model = db_utils.get_security_alarm_model(
|
||||
id=SECURITY_ALARM_ID
|
||||
)
|
||||
self.fake_security_alarms_model = []
|
||||
self.fake_security_alarms_model.append(
|
||||
self.fake_security_alarm_model)
|
||||
self.fake_security_alarms_model.append(
|
||||
db_utils.get_security_alarm_model(
|
||||
id=SECURITY_ALARM_ID_2
|
||||
)
|
||||
)
|
||||
self.security_alarms_path = '/security_alarms'
|
||||
self.security_alarm_path = '/security_alarms/%s' \
|
||||
% self.fake_security_alarm['alarm_id']
|
||||
|
||||
def test_get(self):
|
||||
|
||||
db.security_alarm_get = mock.MagicMock(
|
||||
return_value=self.fake_security_alarm_model)
|
||||
security_alarm = self.get_json(self.security_alarm_path)
|
||||
self.assertEqual(self.fake_security_alarm,
|
||||
security_alarm)
|
||||
|
||||
def test_list(self):
|
||||
|
||||
db.security_alarm_get_all = mock.MagicMock(
|
||||
return_value=self.fake_security_alarms_model)
|
||||
|
||||
security_alarms = self.get_json(self.security_alarms_path)
|
||||
|
||||
self.assertEqual({'security_alarms': self.fake_security_alarms},
|
||||
security_alarms)
|
||||
|
||||
def test_get_salarms_db_error(self):
|
||||
db.security_alarm_get_all = mock.MagicMock(
|
||||
side_effect=sql_exc.NoSuchTableError)
|
||||
|
||||
res = self.get_json(self.security_alarms_path, expect_errors=True)
|
||||
self.assertEqual(404, res.status_code)
|
||||
|
||||
def test_get_salarm_db_error(self):
|
||||
db.security_alarm_get = mock.MagicMock(
|
||||
side_effect=sql_exc.OperationalError)
|
||||
res = self.get_json(self.security_alarm_path, expect_errors=True)
|
||||
self.assertEqual(404, res.status_code)
|
||||
@@ -21,17 +21,6 @@ from cerberus import db
|
||||
from cerberus.tests.api import base
|
||||
from cerberus.tests.db import utils as db_utils
|
||||
|
||||
|
||||
def get_tasks():
|
||||
tasks = []
|
||||
return tasks
|
||||
|
||||
|
||||
def get_task():
|
||||
task = {}
|
||||
return task
|
||||
|
||||
|
||||
SECURITY_REPORT_ID = 'abc123'
|
||||
SECURITY_REPORT_ID_2 = 'xyz789'
|
||||
|
||||
@@ -68,7 +57,7 @@ class TestSecurityReports(base.TestApiBase):
|
||||
db.security_report_get = mock.MagicMock(
|
||||
return_value=self.fake_security_report_model)
|
||||
security_report = self.get_json(self.security_report_path)
|
||||
self.assertEqual({'security_report': self.fake_security_report},
|
||||
self.assertEqual(self.fake_security_report,
|
||||
security_report)
|
||||
|
||||
def test_list(self):
|
||||
@@ -81,6 +70,11 @@ class TestSecurityReports(base.TestApiBase):
|
||||
self.assertEqual({'security_reports': self.fake_security_reports},
|
||||
security_reports)
|
||||
|
||||
def test_update_sr_ticket_id(self):
|
||||
db.security_report_update_ticket_id = mock.MagicMock()
|
||||
res = self.put_json(self.security_report_path + '/tickets/1', None)
|
||||
self.assertEqual(200, res.status_code)
|
||||
|
||||
def test_get_sreports_db_error(self):
|
||||
db.security_report_get_all = mock.MagicMock(
|
||||
side_effect=sql_exc.NoSuchTableError)
|
||||
|
||||
@@ -19,10 +19,26 @@ import mock
|
||||
|
||||
from oslo import messaging
|
||||
|
||||
from cerberus.api.v1.datamodels import task as task_model
|
||||
from cerberus.tests.api import base
|
||||
from cerberus.tests.db import utils as db_utils
|
||||
|
||||
|
||||
class MockTask(object):
|
||||
name = None
|
||||
id = None
|
||||
period = None
|
||||
plugin_id = None
|
||||
type = None
|
||||
|
||||
def __init__(self, name, period, plugin_id, type, method):
|
||||
self.name = name
|
||||
self.period = period
|
||||
self.plugin_id = plugin_id
|
||||
self.type = type
|
||||
self.method = method
|
||||
|
||||
|
||||
class TestTasks(base.TestApiBase):
|
||||
|
||||
def setUp(self):
|
||||
@@ -31,13 +47,13 @@ class TestTasks(base.TestApiBase):
|
||||
self.fake_tasks = []
|
||||
self.fake_tasks.append(self.fake_task)
|
||||
self.fake_tasks.append(db_utils.get_test_task(
|
||||
task_id=2,
|
||||
task_type='reccurent',
|
||||
task_name='recurrent_task',
|
||||
task_period=20
|
||||
id=2,
|
||||
type='reccurent',
|
||||
name='recurrent_task',
|
||||
period=20
|
||||
))
|
||||
self.tasks_path = '/tasks'
|
||||
self.task_path = '/tasks/%s' % self.fake_task['task_id']
|
||||
self.task_path = '/tasks/%s' % self.fake_task['id']
|
||||
|
||||
def test_list(self):
|
||||
rpc_tasks = []
|
||||
@@ -50,36 +66,36 @@ class TestTasks(base.TestApiBase):
|
||||
|
||||
def test_create(self):
|
||||
task_id = 1
|
||||
task = {
|
||||
"method": "act_long",
|
||||
"name": "task1",
|
||||
"type": "recurrent",
|
||||
"period": 60,
|
||||
"plugin_id": "test"
|
||||
}
|
||||
task = task_model.TaskResource(
|
||||
initial_data={
|
||||
'method': "act_long",
|
||||
'name': "task1",
|
||||
'type': "recurrent",
|
||||
'period': 60,
|
||||
'plugin_id': "test"})
|
||||
|
||||
expected_task = task
|
||||
expected_task['id'] = task_id
|
||||
expected_task.id = task_id
|
||||
messaging.RPCClient.call = mock.MagicMock(return_value=task_id)
|
||||
task = self.post_json(self.tasks_path, {'task': task})
|
||||
self.assertEqual({'task': expected_task}, task.json_body)
|
||||
task = self.post_json(self.tasks_path, task.as_dict())
|
||||
self.assertEqual(expected_task.as_dict(), task.json_body)
|
||||
|
||||
def test_get(self):
|
||||
rpc_task = json.dumps(self.fake_task)
|
||||
messaging.RPCClient.call = mock.MagicMock(
|
||||
return_value=rpc_task)
|
||||
task = self.get_json(self.task_path)
|
||||
self.assertEqual({'task': self.fake_task}, task)
|
||||
task = self.get_json(self.task_path,)
|
||||
self.assertEqual(self.fake_task, task)
|
||||
|
||||
def test_stop(self):
|
||||
request_body = {'stop': 'null'}
|
||||
messaging.RPCClient.call = mock.MagicMock(return_value=1)
|
||||
response = self.post_json(self.task_path, request_body)
|
||||
self.assertEqual(200, response.status_code)
|
||||
response = self.post_json(self.task_path + '/action/stop', {})
|
||||
self.assertEqual(204, response.status_code)
|
||||
|
||||
def test_delete(self):
|
||||
messaging.RPCClient.call = mock.MagicMock(return_value=1)
|
||||
response = self.delete(self.task_path)
|
||||
self.assertEqual(200, response.status_code)
|
||||
self.assertEqual(204, response.status_code)
|
||||
|
||||
def test_list_tasks_remote_error(self):
|
||||
messaging.RPCClient.call = mock.MagicMock(
|
||||
@@ -96,65 +112,56 @@ class TestTasks(base.TestApiBase):
|
||||
|
||||
def test_create_recurrent_task_without_task_object(self):
|
||||
task_id = 1
|
||||
request_body = {
|
||||
"method": "act_long",
|
||||
"name": "task1",
|
||||
"type": "recurrent",
|
||||
}
|
||||
messaging.RPCClient.call = mock.MagicMock(return_value=task_id)
|
||||
response = self.post_json(self.tasks_path,
|
||||
request_body,
|
||||
response = self.post_json(self.tasks_path, None,
|
||||
expect_errors=True)
|
||||
self.assertEqual(400, response.status_code)
|
||||
|
||||
def test_create_recurrent_task_without_plugin_id(self):
|
||||
task_id = 1
|
||||
task = {
|
||||
"method": "act_long",
|
||||
"name": "task1",
|
||||
"type": "recurrent",
|
||||
"period": 60,
|
||||
}
|
||||
request_body = {'task': task}
|
||||
task = task_model.TaskResource(
|
||||
initial_data={
|
||||
"method": "act_long",
|
||||
"name": "task1",
|
||||
"type": "recurrent",
|
||||
"period": 60,
|
||||
})
|
||||
messaging.RPCClient.call = mock.MagicMock(return_value=task_id)
|
||||
response = self.post_json(self.tasks_path,
|
||||
request_body,
|
||||
task.as_dict(),
|
||||
expect_errors=True)
|
||||
self.assertEqual(400, response.status_code)
|
||||
|
||||
def test_create_recurrent_task_without_method(self):
|
||||
task_id = 1
|
||||
task = {
|
||||
"name": "task1",
|
||||
"type": "recurrent",
|
||||
"period": 60,
|
||||
"plugin_id": "plugin-test"
|
||||
}
|
||||
request_body = {'task': task}
|
||||
task = task_model.TaskResource(
|
||||
initial_data={
|
||||
"name": "task1",
|
||||
"type": "recurrent",
|
||||
"period": 60,
|
||||
"plugin_id": "plugin-test"
|
||||
})
|
||||
messaging.RPCClient.call = mock.MagicMock(return_value=task_id)
|
||||
response = self.post_json(self.tasks_path,
|
||||
request_body,
|
||||
task.as_dict(),
|
||||
expect_errors=True)
|
||||
self.assertEqual(400, response.status_code)
|
||||
|
||||
def test_create_recurrent_task_remote_error(self):
|
||||
task = {
|
||||
"method": "act_long",
|
||||
"name": "task1",
|
||||
"type": "recurrent",
|
||||
"period": 60,
|
||||
"plugin_id": "plugin-test"
|
||||
}
|
||||
request_body = {'task': task}
|
||||
messaging.RPCClient.call = mock.MagicMock(
|
||||
side_effect=messaging.RemoteError)
|
||||
response = self.post_json(self.tasks_path,
|
||||
request_body,
|
||||
expect_errors=True)
|
||||
self.assertEqual(400, response.status_code)
|
||||
task = task_model.TaskResource(
|
||||
initial_data={
|
||||
"method": "act_long",
|
||||
"name": "task1",
|
||||
"type": "recurrent",
|
||||
"period": 60,
|
||||
"plugin_id": "plugin-test"
|
||||
})
|
||||
|
||||
def test_get_task_bad_id(self):
|
||||
response = self.get_json('/tasks/toto', expect_errors=True)
|
||||
messaging.RPCClient.call = mock.MagicMock(
|
||||
side_effect=messaging.RemoteError(value="dummy"))
|
||||
response = self.post_json(self.tasks_path,
|
||||
task.as_dict(),
|
||||
expect_errors=True)
|
||||
self.assertEqual(400, response.status_code)
|
||||
|
||||
def test_get_task_remote_error(self):
|
||||
@@ -181,15 +188,6 @@ class TestTasks(base.TestApiBase):
|
||||
expect_errors=True)
|
||||
self.assertEqual(400, response.status_code)
|
||||
|
||||
def test_stop_task_id_not_integer(self):
|
||||
request_body = json.dumps({
|
||||
"stop": "null"
|
||||
})
|
||||
response = self.post_json('/tasks/toto',
|
||||
request_body,
|
||||
expect_errors=True)
|
||||
self.assertEqual(400, response.status_code)
|
||||
|
||||
def test_force_delete_task_wrong_id(self):
|
||||
request_body = json.dumps({
|
||||
"forceDelete": "null"
|
||||
@@ -210,12 +208,8 @@ class TestTasks(base.TestApiBase):
|
||||
expect_errors=True)
|
||||
self.assertEqual(400, response.status_code)
|
||||
|
||||
def test_delete_task_id_not_integer(self):
|
||||
response = self.delete('/tasks/toto', expect_errors=True)
|
||||
self.assertEqual(400, response.status_code)
|
||||
|
||||
def test_delete_task_not_existing(self):
|
||||
messaging.RPCClient.call = mock.MagicMock(
|
||||
side_effect=messaging.RemoteError)
|
||||
side_effect=messaging.RemoteError(value="dummy"))
|
||||
response = self.delete(self.task_path, expect_errors=True)
|
||||
self.assertEqual(400, response.status_code)
|
||||
|
||||
@@ -16,9 +16,14 @@
|
||||
|
||||
import datetime
|
||||
|
||||
from cerberus.common import loopingcall
|
||||
from cerberus.db.sqlalchemy import models
|
||||
|
||||
|
||||
def fake_function():
|
||||
pass
|
||||
|
||||
|
||||
def get_test_security_report(**kwargs):
|
||||
return {
|
||||
'id': kwargs.get('id', 1),
|
||||
@@ -34,11 +39,11 @@ def get_test_security_report(**kwargs):
|
||||
'title': kwargs.get('title', 'test-security-report'),
|
||||
'description': kwargs.get('description',
|
||||
'no fear, this is just a test'),
|
||||
'security_rating': kwargs.get('security_rating', 5),
|
||||
'security_rating': kwargs.get('security_rating', 5.1),
|
||||
'vulnerabilities': kwargs.get('vulnerabilities', 'vulns'),
|
||||
'vulnerabilities_number': kwargs.get('vulnerabilities_number', 1),
|
||||
'last_report_date': kwargs.get('last_report_date',
|
||||
'2015-01-01 00:00:00')
|
||||
'2015-01-01T00:00:00')
|
||||
}
|
||||
|
||||
|
||||
@@ -61,7 +66,8 @@ def get_security_report_model(**kwargs):
|
||||
security_report.title = kwargs.get('title', 'test-security-report')
|
||||
security_report.description = kwargs.get('description',
|
||||
'no fear, this is just a test')
|
||||
security_report.security_rating = kwargs.get('security_rating', 5)
|
||||
security_report.security_rating = kwargs.get('security_rating',
|
||||
float('5.1'))
|
||||
security_report.vulnerabilities = kwargs.get('vulnerabilities', 'vulns')
|
||||
security_report.vulnerabilities_number = kwargs.get(
|
||||
'vulnerabilities_number', 1)
|
||||
@@ -98,16 +104,81 @@ def get_plugin_model(**kwargs):
|
||||
plugin.uuid = kwargs.get('uuid', '490cc562-9e60-46a7-9b5f-c7619aca2e07')
|
||||
plugin.version = kwargs.get('version', '0.1a')
|
||||
plugin.name = kwargs.get('name', 'tooly')
|
||||
plugin.subscribed_events = kwargs.get('subscribed_events',
|
||||
["compute.instance.updated"])
|
||||
plugin.methods = kwargs.get('methods', [])
|
||||
return plugin
|
||||
|
||||
|
||||
def get_rpc_plugin(**kwargs):
|
||||
return {
|
||||
'name': kwargs.get('name', 'tooly'),
|
||||
'subscribed_events': kwargs.get('subscribed_events',
|
||||
["compute.instance.updated"]),
|
||||
'methods': kwargs.get('methods', [])
|
||||
}
|
||||
|
||||
|
||||
def get_test_task(**kwargs):
|
||||
return {
|
||||
'task_id': kwargs.get('task_id', 1),
|
||||
'task_type': kwargs.get('task_type', 'unique'),
|
||||
'task_name': kwargs.get('task_name', 'No Name'),
|
||||
'task_period': kwargs.get('task_period', ''),
|
||||
'id': kwargs.get('task_id', 1),
|
||||
'type': kwargs.get('task_type', 'unique'),
|
||||
'name': kwargs.get('task_name', 'No Name'),
|
||||
'period': kwargs.get('task_period', ''),
|
||||
'persistent': 'false',
|
||||
}
|
||||
|
||||
|
||||
def get_recurrent_task_object(**kwargs):
|
||||
return(loopingcall.CerberusFixedIntervalLoopingCall(fake_function,
|
||||
**kwargs))
|
||||
|
||||
|
||||
def get_recurrent_task_model(**kwargs):
|
||||
task = models.Task()
|
||||
task.id = kwargs.get('id', 1)
|
||||
task.name = kwargs.get('name', 'this_task')
|
||||
task.method = kwargs.get('method', 'method')
|
||||
task.type = kwargs.get('type', 'recurrent')
|
||||
task.period = kwargs.get('period', 10)
|
||||
task.plugin_id = kwargs.get('plugin_id',
|
||||
'490cc562-9e60-46a7-9b5f-c7619aca2e07')
|
||||
task.uuid = kwargs.get('uuid', '500cc562-5c50-89t4-5fc8-c7619aca3n29')
|
||||
|
||||
|
||||
def get_test_security_alarm(**kwargs):
|
||||
return {
|
||||
'id': kwargs.get('id', 1),
|
||||
'plugin_id': kwargs.get('plugin_id',
|
||||
'228df8e8-d5f4-4eb9-a547-dfc649dd1017'),
|
||||
'alarm_id': kwargs.get('alarm_id', '1234'),
|
||||
'timestamp': kwargs.get('timestamp', '2015-01-01T00:00:00'),
|
||||
'status': kwargs.get('status', 'new'),
|
||||
'severity': kwargs.get('severity', 'CRITICAL'),
|
||||
'component_id': kwargs.get('component_id',
|
||||
'422zb9d5-c5g3-8wy9-a547-hhc885dd8548'),
|
||||
'summary': kwargs.get('summary', 'test-security-alarm'),
|
||||
'description': kwargs.get('description',
|
||||
'no fear, this is just a test')
|
||||
|
||||
}
|
||||
|
||||
|
||||
def get_security_alarm_model(**kwargs):
|
||||
security_alarm = models.SecurityAlarm()
|
||||
security_alarm.id = kwargs.get('id', 1)
|
||||
security_alarm.plugin_id = kwargs.get(
|
||||
'plugin_id',
|
||||
'228df8e8-d5f4-4eb9-a547-dfc649dd1017'
|
||||
)
|
||||
security_alarm.alarm_id = kwargs.get('alarm_id', '1234')
|
||||
security_alarm.timestamp = kwargs.get(
|
||||
'timestamp',
|
||||
datetime.datetime(2015, 1, 1)
|
||||
)
|
||||
security_alarm.status = kwargs.get('status', 'new')
|
||||
security_alarm.severity = kwargs.get('severity', 'CRITICAL')
|
||||
security_alarm.component_id = kwargs.get(
|
||||
'component_id',
|
||||
'422zb9d5-c5g3-8wy9-a547-hhc885dd8548')
|
||||
security_alarm.summary = kwargs.get('summary', 'test-security-alarm')
|
||||
security_alarm.description = kwargs.get('description',
|
||||
'no fear, this is just a test')
|
||||
return security_alarm
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
test_cerberus
|
||||
----------------------------------
|
||||
|
||||
Tests for `cerberus` module.
|
||||
"""
|
||||
|
||||
from cerberus.tests import base
|
||||
|
||||
|
||||
class TestCerberus(base.TestCase):
|
||||
|
||||
def test_something(self):
|
||||
pass
|
||||
@@ -20,21 +20,24 @@ test_cerberus manager
|
||||
|
||||
Tests for `cerberus` module.
|
||||
"""
|
||||
import json
|
||||
|
||||
from eventlet import greenpool
|
||||
import json
|
||||
import mock
|
||||
from oslo import messaging
|
||||
import pkg_resources
|
||||
import uuid
|
||||
|
||||
from oslo import messaging
|
||||
from stevedore import extension
|
||||
|
||||
from cerberus.common import errors
|
||||
from cerberus.db.sqlalchemy import api
|
||||
from cerberus.common import loopingcall
|
||||
from cerberus.common import threadgroup
|
||||
from cerberus.db.sqlalchemy import api as db_api
|
||||
from cerberus import manager
|
||||
from cerberus.openstack.common import loopingcall
|
||||
from cerberus.openstack.common import threadgroup
|
||||
from cerberus.plugins import base as base_plugin
|
||||
from cerberus.tests import base
|
||||
from cerberus.tests.db import utils as db_utils
|
||||
|
||||
|
||||
PLUGIN_UUID = 'UUID'
|
||||
@@ -87,6 +90,9 @@ class TestCerberusManager(base.TestBase):
|
||||
self.db_plugin_info = DbPluginInfo(1, PLUGIN_UUID)
|
||||
self.manager = manager.CerberusManager()
|
||||
self.manager.cerberus_manager = self.extension_mgr
|
||||
self.fake_db_task = db_utils.get_recurrent_task_model(
|
||||
plugin_id=PLUGIN_UUID
|
||||
)
|
||||
|
||||
def test_register_plugin(self):
|
||||
with mock.patch('cerberus.db.sqlalchemy.api.plugin_info_create') \
|
||||
@@ -101,7 +107,7 @@ class TestCerberusManager(base.TestBase):
|
||||
with mock.patch('cerberus.db.sqlalchemy.api.plugin_info_get') \
|
||||
as MockClass:
|
||||
MockClass.return_value = DbPluginInfo(1, PLUGIN_UUID)
|
||||
api.plugin_version_update = mock.MagicMock()
|
||||
db_api.plugin_version_update = mock.MagicMock()
|
||||
self.manager._register_plugin(
|
||||
self.manager.cerberus_manager['plugin'])
|
||||
self.assertEqual(self.db_plugin_info.uuid,
|
||||
@@ -110,6 +116,7 @@ class TestCerberusManager(base.TestBase):
|
||||
@mock.patch.object(messaging.MessageHandlingServer, 'start')
|
||||
def test_start(self, rpc_start):
|
||||
manager.CerberusManager._register_plugin = mock.MagicMock()
|
||||
manager.CerberusManager.add_stored_tasks = mock.MagicMock()
|
||||
mgr = manager.CerberusManager()
|
||||
mgr.start()
|
||||
rpc_start.assert_called_with()
|
||||
@@ -133,7 +140,7 @@ class TestCerberusManager(base.TestBase):
|
||||
self.manager.cerberus_manager['plugin'].obj.fake_function,
|
||||
name="fake")
|
||||
|
||||
@mock.patch.object(loopingcall.FixedIntervalLoopingCall, "start")
|
||||
@mock.patch.object(loopingcall.CerberusFixedIntervalLoopingCall, "start")
|
||||
def test_add_recurrent_task_without_delay(self, mock):
|
||||
self.manager._add_recurrent_task(
|
||||
self.manager.cerberus_manager['plugin'].obj.fake_function,
|
||||
@@ -141,7 +148,7 @@ class TestCerberusManager(base.TestBase):
|
||||
assert(len(self.manager.tg.timers) == 1)
|
||||
mock.assert_called_with(initial_delay=None, interval=15)
|
||||
|
||||
@mock.patch.object(loopingcall.FixedIntervalLoopingCall, "start")
|
||||
@mock.patch.object(loopingcall.CerberusFixedIntervalLoopingCall, "start")
|
||||
def test_add_recurrent_task_with_delay(self, mock):
|
||||
self.manager._add_recurrent_task(
|
||||
self.manager.cerberus_manager['plugin'].obj.fake_function,
|
||||
@@ -150,43 +157,65 @@ class TestCerberusManager(base.TestBase):
|
||||
assert(len(self.manager.tg.timers) == 1)
|
||||
mock.assert_called_with(initial_delay=200, interval=15)
|
||||
|
||||
@mock.patch.object(greenpool.GreenPool, "spawn")
|
||||
def test_add_task(self, mock):
|
||||
ctx = {"some": "context"}
|
||||
self.manager.add_task(ctx, PLUGIN_UUID, 'fake_function')
|
||||
assert(len(self.manager.tg.threads) == 1)
|
||||
mock.assert_called_with(self.manager.cerberus_manager['plugin'].obj.
|
||||
fake_function,
|
||||
plugin_id=PLUGIN_UUID,
|
||||
task_id=1)
|
||||
@mock.patch.object(db_api, "create_task")
|
||||
def test_store_task(self, db_mock):
|
||||
task = db_utils.get_recurrent_task_object(
|
||||
persistent='True', task_name='task_name', task_type='recurrent',
|
||||
task_period=5, plugin_id='490cc562-9e60-46a7-9b5f-c7619aca2e07',
|
||||
task_id='500cc562-5c50-89t4-5fc8-c7619aca3n29')
|
||||
self.manager._store_task(task, 'method_')
|
||||
db_mock.assert_called_with(
|
||||
{'name': 'task_name',
|
||||
'method': 'method_',
|
||||
'type': 'recurrent',
|
||||
'period': 5,
|
||||
'plugin_id': '490cc562-9e60-46a7-9b5f-c7619aca2e07',
|
||||
'running': True,
|
||||
'uuid': '500cc562-5c50-89t4-5fc8-c7619aca3n29'})
|
||||
|
||||
@mock.patch.object(greenpool.GreenPool, "spawn")
|
||||
def test_add_task_incorrect_task_type(self, mock):
|
||||
@mock.patch.object(uuid, "uuid4", return_value=1)
|
||||
def test_create_task(self, uuid_mock, th_mock):
|
||||
ctx = {"some": "context"}
|
||||
self.manager.add_task(ctx, PLUGIN_UUID, 'fake_function',
|
||||
task_type='INCORRECT')
|
||||
db_api.create_task = mock.MagicMock(return_value=self.fake_db_task)
|
||||
self.manager.create_task(ctx, PLUGIN_UUID, 'fake_function')
|
||||
assert(len(self.manager.tg.threads) == 1)
|
||||
mock.assert_called_with(self.manager.cerberus_manager[
|
||||
'plugin'].obj.fake_function,
|
||||
plugin_id=PLUGIN_UUID,
|
||||
task_type='INCORRECT',
|
||||
task_id=1)
|
||||
th_mock.assert_called_with(
|
||||
self.manager.cerberus_manager['plugin'].obj.fake_function,
|
||||
plugin_id=PLUGIN_UUID,
|
||||
task_id='1')
|
||||
|
||||
@mock.patch.object(loopingcall.FixedIntervalLoopingCall, "start")
|
||||
def test_add_recurrent_task_with_interval(self, mock):
|
||||
@mock.patch.object(greenpool.GreenPool, "spawn")
|
||||
@mock.patch.object(uuid, "uuid4", return_value=1)
|
||||
def test_create_task_incorrect_task_type(self, uuid_mock, th_mock):
|
||||
ctx = {"some": "context"}
|
||||
self.manager.add_task(ctx, PLUGIN_UUID, 'fake_function',
|
||||
task_type='recurrent', task_period=5)
|
||||
db_api.create_task = mock.MagicMock(return_value=self.fake_db_task)
|
||||
self.manager.create_task(ctx, PLUGIN_UUID, 'fake_function',
|
||||
task_type='INCORRECT')
|
||||
assert(len(self.manager.tg.threads) == 1)
|
||||
th_mock.assert_called_with(
|
||||
self.manager.cerberus_manager['plugin'].obj.fake_function,
|
||||
plugin_id=PLUGIN_UUID,
|
||||
task_type='INCORRECT',
|
||||
task_id='1')
|
||||
|
||||
@mock.patch.object(loopingcall.CerberusFixedIntervalLoopingCall, "start")
|
||||
def test_create_recurrent_task_with_interval(self, mock):
|
||||
ctx = {"some": "context"}
|
||||
db_api.create_task = mock.MagicMock(return_value=self.fake_db_task)
|
||||
self.manager.create_task(ctx, PLUGIN_UUID, 'fake_function',
|
||||
task_type='recurrent', task_period=5)
|
||||
assert(len(self.manager.tg.timers) == 1)
|
||||
mock.assert_called_with(initial_delay=None, interval=5)
|
||||
|
||||
def test_get_recurrent_task(self):
|
||||
task_id = self.manager._add_recurrent_task(
|
||||
self.manager._add_recurrent_task(
|
||||
self.manager.cerberus_manager['plugin'].obj.fake_function,
|
||||
15)
|
||||
recurrent_task = self.manager._get_recurrent_task(task_id)
|
||||
15,
|
||||
task_id=1)
|
||||
recurrent_task = self.manager._get_recurrent_task(1)
|
||||
assert(isinstance(recurrent_task,
|
||||
loopingcall.FixedIntervalLoopingCall))
|
||||
loopingcall.CerberusFixedIntervalLoopingCall))
|
||||
|
||||
def test_get_recurrent_task_wrong_id(self):
|
||||
task_id = 1
|
||||
@@ -251,9 +280,10 @@ class TestCerberusManager(base.TestBase):
|
||||
task_id=unique_task_id)
|
||||
tasks = self.manager._get_tasks()
|
||||
self.assertTrue(len(tasks) == 2)
|
||||
self.assertTrue(isinstance(tasks[0],
|
||||
loopingcall.FixedIntervalLoopingCall))
|
||||
self.assertTrue(isinstance(tasks[1], threadgroup.Thread))
|
||||
self.assertTrue(
|
||||
isinstance(tasks[0],
|
||||
loopingcall.CerberusFixedIntervalLoopingCall))
|
||||
self.assertTrue(isinstance(tasks[1], threadgroup.CerberusThread))
|
||||
|
||||
def test_get_tasks_(self):
|
||||
recurrent_task_id = 1
|
||||
@@ -277,7 +307,8 @@ class TestCerberusManager(base.TestBase):
|
||||
task_period,
|
||||
task_id=task_id)
|
||||
task = self.manager._get_task(task_id)
|
||||
self.assertTrue(isinstance(task, loopingcall.FixedIntervalLoopingCall))
|
||||
self.assertTrue(
|
||||
isinstance(task, loopingcall.CerberusFixedIntervalLoopingCall))
|
||||
|
||||
def test_get_task_unique(self):
|
||||
task_id = 1
|
||||
@@ -285,7 +316,7 @@ class TestCerberusManager(base.TestBase):
|
||||
self.manager.cerberus_manager['plugin'].obj.fake_function,
|
||||
task_id=task_id)
|
||||
task = self.manager._get_task(task_id)
|
||||
self.assertTrue(isinstance(task, threadgroup.Thread))
|
||||
self.assertTrue(isinstance(task, threadgroup.CerberusThread))
|
||||
|
||||
def test_get_task(self):
|
||||
recurrent_task_id = 1
|
||||
@@ -305,7 +336,7 @@ class TestCerberusManager(base.TestBase):
|
||||
task_name=unique_task_name)
|
||||
task = self.manager.get_task({'some': 'context'}, 1)
|
||||
self.assertTrue(json.loads(task).get('name') == recurrent_task_name)
|
||||
self.assertTrue(json.loads(task).get('id') == recurrent_task_id)
|
||||
self.assertTrue(int(json.loads(task).get('id')) == recurrent_task_id)
|
||||
task_2 = self.manager.get_task({'some': 'context'}, 2)
|
||||
self.assertTrue(json.loads(task_2).get('name') == unique_task_name)
|
||||
self.assertTrue(json.loads(task_2).get('id') == unique_task_id)
|
||||
@@ -320,6 +351,7 @@ class TestCerberusManager(base.TestBase):
|
||||
assert(len(self.manager.tg.threads) == 0)
|
||||
|
||||
def test_stop_recurrent_task(self):
|
||||
db_api.update_state_task = mock.MagicMock()
|
||||
task_id = 1
|
||||
self.manager._add_recurrent_task(
|
||||
self.manager.cerberus_manager['plugin'].obj.fake_function,
|
||||
@@ -330,6 +362,7 @@ class TestCerberusManager(base.TestBase):
|
||||
assert(self.manager.tg.timers[0]._running is False)
|
||||
|
||||
def test_stop_task_recurrent(self):
|
||||
db_api.update_state_task = mock.MagicMock()
|
||||
recurrent_task_id = 1
|
||||
unique_task_id = 2
|
||||
task_period = 5
|
||||
@@ -359,6 +392,7 @@ class TestCerberusManager(base.TestBase):
|
||||
|
||||
def test_delete_recurrent_task(self):
|
||||
ctx = {"some": "context"}
|
||||
db_api.delete_task = mock.MagicMock()
|
||||
task_id = 1
|
||||
self.manager._add_recurrent_task(
|
||||
self.manager.cerberus_manager['plugin'].obj.fake_function,
|
||||
@@ -387,6 +421,7 @@ class TestCerberusManager(base.TestBase):
|
||||
|
||||
def test_restart_recurrent_task(self):
|
||||
ctxt = {'some': 'context'}
|
||||
db_api.update_state_task = mock.MagicMock()
|
||||
task_id = 1
|
||||
task_period = 5
|
||||
self.manager._add_recurrent_task(
|
||||
@@ -419,16 +454,16 @@ class FaultyTestCerberusManager(base.TestBaseFaulty):
|
||||
self.manager = manager.CerberusManager()
|
||||
self.manager.cerberus_manager = self.extension_mgr
|
||||
|
||||
def test_add_task_wrong_plugin_id(self):
|
||||
def test_create_task_wrong_plugin_id(self):
|
||||
ctx = {"some": "context"}
|
||||
self.assertRaises(errors.PluginNotFound, self.manager.add_task,
|
||||
self.assertRaises(errors.PluginNotFound, self.manager.create_task,
|
||||
ctx, 'WRONG_UUID', 'fake_function')
|
||||
assert(len(self.manager.tg.threads) == 0)
|
||||
|
||||
def test_add_task_incorrect_period(self):
|
||||
def test_create_task_incorrect_period(self):
|
||||
ctx = {"some": "context"}
|
||||
self.assertRaises(errors.TaskPeriodNotInteger,
|
||||
self.manager.add_task,
|
||||
self.manager.create_task,
|
||||
ctx,
|
||||
PLUGIN_UUID,
|
||||
'fake_function',
|
||||
@@ -436,43 +471,43 @@ class FaultyTestCerberusManager(base.TestBaseFaulty):
|
||||
task_period='NOT_INTEGER')
|
||||
assert(len(self.manager.tg.threads) == 0)
|
||||
|
||||
def test_add_task_wrong_plugin_method(self):
|
||||
def test_create_task_wrong_plugin_method(self):
|
||||
ctx = {"some": "context"}
|
||||
self.assertRaises(errors.MethodNotCallable,
|
||||
self.manager.add_task, ctx, PLUGIN_UUID, 'fake')
|
||||
self.manager.create_task, ctx, PLUGIN_UUID, 'fake')
|
||||
assert(len(self.manager.tg.threads) == 0)
|
||||
|
||||
def test_add_task_method_not_as_string(self):
|
||||
def test_create_task_method_not_as_string(self):
|
||||
ctx = {"some": "context"}
|
||||
self.assertRaises(errors.MethodNotString,
|
||||
self.manager.add_task,
|
||||
self.manager.create_task,
|
||||
ctx,
|
||||
PLUGIN_UUID,
|
||||
self.manager.cerberus_manager[
|
||||
'plugin'].obj.fake_function)
|
||||
assert(len(self.manager.tg.threads) == 0)
|
||||
|
||||
def test_add_recurrent_task_without_period(self):
|
||||
def test_create_recurrent_task_without_period(self):
|
||||
ctx = {"some": "context"}
|
||||
self.assertRaises(errors.TaskPeriodNotInteger,
|
||||
self.manager.add_task,
|
||||
self.manager.create_task,
|
||||
ctx,
|
||||
PLUGIN_UUID,
|
||||
'fake_function',
|
||||
task_type='recurrent')
|
||||
assert(len(self.manager.tg.timers) == 0)
|
||||
|
||||
def test_add_recurrent_task_wrong_plugin_method(self):
|
||||
def test_create_recurrent_task_wrong_plugin_method(self):
|
||||
ctx = {"some": "context"}
|
||||
self.assertRaises(errors.MethodNotCallable,
|
||||
self.manager.add_task, ctx, PLUGIN_UUID, 'fake',
|
||||
self.manager.create_task, ctx, PLUGIN_UUID, 'fake',
|
||||
task_type='recurrent', task_period=5)
|
||||
assert(len(self.manager.tg.timers) == 0)
|
||||
|
||||
def test_add_recurrent_task_method_not_as_string(self):
|
||||
def test_create_recurrent_task_method_not_as_string(self):
|
||||
ctx = {"some": "context"}
|
||||
self.assertRaises(errors.MethodNotString,
|
||||
self.manager.add_task,
|
||||
self.manager.create_task,
|
||||
ctx,
|
||||
PLUGIN_UUID,
|
||||
self.manager.cerberus_manager[
|
||||
@@ -552,6 +587,7 @@ class FaultyTestCerberusManager(base.TestBaseFaulty):
|
||||
|
||||
def test_restart_recurrent_task_wrong_id(self):
|
||||
ctxt = {"some": "ctx"}
|
||||
db_api.update_state_task = mock.MagicMock()
|
||||
task_id = 1
|
||||
self.manager._add_recurrent_task(
|
||||
self.manager.cerberus_manager['plugin'].obj.fake_function,
|
||||
|
||||
@@ -23,29 +23,12 @@ import mock
|
||||
from oslo.config import fixture as fixture_config
|
||||
|
||||
from cerberus.db.sqlalchemy import api
|
||||
from cerberus.db.sqlalchemy import models
|
||||
from cerberus.openstack.common.db.sqlalchemy import models as db_models
|
||||
from cerberus.tests import base
|
||||
|
||||
|
||||
class DbApiTestCase(base.TestBase):
|
||||
|
||||
def test_alert_create(self):
|
||||
self.CONF = self.useFixture(fixture_config.Config()).conf
|
||||
self.CONF([], project='cerberus')
|
||||
al = api.alert_create({'title': 'TitleAlert'})
|
||||
self.assertTrue(al.id >= 0)
|
||||
|
||||
def test_alert_get_all(self):
|
||||
self.CONF = self.useFixture(fixture_config.Config()).conf
|
||||
self.CONF([], project='cerberus')
|
||||
self.test_alert_create()
|
||||
al = api.alert_get_all()
|
||||
for a in al:
|
||||
dec = models.AlertJsonSerializer().serialize(a)
|
||||
self.assertTrue(dec['id'], 1)
|
||||
self.assertTrue(dec['title'], 'TitleAlert')
|
||||
|
||||
def test_security_report_create(self):
|
||||
self.CONF = self.useFixture(fixture_config.Config()).conf
|
||||
self.CONF([], project='cerberus')
|
||||
|
||||
228
contrib/devstack/lib/cerberus
Normal file
228
contrib/devstack/lib/cerberus
Normal file
@@ -0,0 +1,228 @@
|
||||
# lib/cerberus
|
||||
# Install and start **Cerberus** service
|
||||
|
||||
# To enable a minimal set of Cerberus services:
|
||||
# - add the following to localrc:
|
||||
#
|
||||
# enable_service cerberus-api cerberus-agent
|
||||
#
|
||||
# Dependencies:
|
||||
# - functions
|
||||
# - OS_AUTH_URL for auth in api
|
||||
# - DEST, HORIZON_DIR, DATA_DIR set to the destination directory
|
||||
# - SERVICE_PASSWORD, SERVICE_TENANT_NAME for auth in api
|
||||
# - IDENTITY_API_VERSION for the version of Keystone
|
||||
# - STACK_USER service user
|
||||
|
||||
# stack.sh
|
||||
# ---------
|
||||
# install_cerberus
|
||||
# install_cerberusclient
|
||||
# configure_cerberus
|
||||
# init_cerberus
|
||||
# start_cerberus
|
||||
# stop_cerberus
|
||||
# cleanup_cerberus
|
||||
|
||||
# Save trace setting
|
||||
XTRACE=$(set +o | grep xtrace)
|
||||
set +o xtrace
|
||||
|
||||
|
||||
# Defaults
|
||||
# --------
|
||||
|
||||
# Set up default directories
|
||||
CERBERUS_DIR=$DEST/cerberus
|
||||
CERBERUS_CONF_DIR=/etc/cerberus
|
||||
CERBERUS_CONF=$CERBERUS_CONF_DIR/cerberus.conf
|
||||
CERBERUS_POLICY=$CERBERUS_CONF_DIR/policy.json
|
||||
CERBERUS_API_LOG_DIR=/var/log/cerberus
|
||||
CERBERUS_AUTH_CACHE_DIR=${CERBERUS_AUTH_CACHE_DIR:-/var/cache/cerberus}
|
||||
CERBERUS_REPORTS_DIR=${DATA_DIR}/cerberus/reports
|
||||
CERBERUS_CLIENT_DIR=$DEST/python-cerberusclient
|
||||
CERBERUS_DASHBOARD_DIR=$DEST/cerberus-dashboard
|
||||
|
||||
# Support potential entry-points console scripts
|
||||
if [[ -d $CERBERUS_DIR/bin ]]; then
|
||||
CERBERUS_BIN_DIR=$CERBERUS_DIR/bin
|
||||
else
|
||||
CERBERUS_BIN_DIR=$(get_python_exec_prefix)
|
||||
fi
|
||||
|
||||
# Set up database backend
|
||||
CERBERUS_BACKEND=${CERBERUS_BACKEND:-sqlite}
|
||||
|
||||
# Set cerberus repository
|
||||
CERBERUS_REPO=${CERBERUS_REPO:-git@svc-integration:/home/git/repositories/cerberus.git}
|
||||
CERBERUS_BRANCH=${CERBERUS_BRANCH:-master}
|
||||
CERBERUS_CLIENT_REPO=${CERBERUS_CLIENT_REPO:-git@svc-integration:/home/git/repositories/python-cerberusclient.git}
|
||||
CERBERUS_CLIENT_BRANCH=${CERBERUS_CLIENT_BRANCH:-master}
|
||||
CERBERUS_DASHBOARD_REPO=${CERBERUS_DASHBOARD_REPO:-git@svc-integration:/home/git/repositories/cerberus-dashboard.git}
|
||||
CERBERUS_DASHBOARD_BRANCH=${CERBERUS_DASHBOARD_BRANCH:-master}
|
||||
|
||||
# Set Cerberus connection info
|
||||
CERBERUS_SERVICE_HOST=${CERBERUS_SERVICE_HOST:-$SERVICE_HOST}
|
||||
CERBERUS_SERVICE_PORT=${CERBERUS_SERVICE_PORT:-8300}
|
||||
CERBERUS_SERVICE_HOSTPORT="$CERBERUS_SERVICE_HOST:$CERBERUS_SERVICE_PORT"
|
||||
CERBERUS_SERVICE_PROTOCOL=${CERBERUS_SERVICE_PROTOCOL:-$SERVICE_PROTOCOL}
|
||||
|
||||
# Set Cerberus auth info
|
||||
CERBERUS_ADMIN_USER=${CERBERUS_ADMIN_USER:-"admin"}
|
||||
CERBERUS_ADMIN_PASSWORD=${CERBERUS_ADMIN_PASSWORD:-$ADMIN_PASSWORD}
|
||||
CERBERUS_ADMIN_TENANT=${CERBERUS_ADMIN_TENANT:-"admin"}
|
||||
|
||||
# Tell Tempest this project is present
|
||||
TEMPEST_SERVICES+=,cerberus
|
||||
|
||||
|
||||
# Functions
|
||||
# ---------
|
||||
|
||||
# create_cerberus_accounts() - Set up common required cerberus accounts
|
||||
|
||||
# Tenant User Roles
|
||||
# ------------------------------------------------------------------
|
||||
# service cerberus admin # if enabled
|
||||
function create_cerberus_accounts {
|
||||
|
||||
SERVICE_TENANT=$(openstack project list | awk "/ $SERVICE_TENANT_NAME / { print \$2 }")
|
||||
ADMIN_ROLE=$(openstack role list | awk "/ admin / { print \$2 }")
|
||||
|
||||
# Cerberus
|
||||
if [[ "$ENABLED_SERVICES" =~ "cerberus-api" ]]; then
|
||||
CERBERUS_USER=$(openstack user create \
|
||||
cerberus \
|
||||
--password "$SERVICE_PASSWORD" \
|
||||
--project $SERVICE_TENANT \
|
||||
--email cerberus@example.com \
|
||||
| grep " id " | get_field 2)
|
||||
openstack role add \
|
||||
$ADMIN_ROLE \
|
||||
--project $SERVICE_TENANT \
|
||||
--user $CERBERUS_USER
|
||||
if [[ "$KEYSTONE_CATALOG_BACKEND" = 'sql' ]]; then
|
||||
CERBERUS_SERVICE=$(openstack service create \
|
||||
cerberus \
|
||||
--type=security \
|
||||
--description="Security service" \
|
||||
| grep " id " | get_field 2)
|
||||
openstack endpoint create \
|
||||
$CERBERUS_SERVICE \
|
||||
--region RegionOne \
|
||||
--publicurl "$CERBERUS_SERVICE_PROTOCOL://$CERBERUS_SERVICE_HOSTPORT" \
|
||||
--adminurl "$CERBERUS_SERVICE_PROTOCOL://$CERBERUS_SERVICE_HOSTPORT" \
|
||||
--internalurl "$CERBERUS_SERVICE_PROTOCOL://$CERBERUS_SERVICE_HOSTPORT"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# Test if any Cerberus services are enabled
|
||||
# is_cerberus_enabled
|
||||
function is_cerberus_enabled {
|
||||
[[ ,${ENABLED_SERVICES} =~ ,"cerberus-" ]] && return 0
|
||||
return 1
|
||||
}
|
||||
|
||||
# cleanup_cerberus() - Remove residual data files, anything left over from previous
|
||||
# runs that a clean run would need to clean up
|
||||
function cleanup_cerberus {
|
||||
# Clean up dirs
|
||||
rm -rf $CERBERUS_AUTH_CACHE_DIR/*
|
||||
rm -rf $CERBERUS_CONF_DIR/*
|
||||
}
|
||||
|
||||
# configure_cerberus() - Set config files, create data dirs, etc
|
||||
function configure_cerberus {
|
||||
setup_develop $CERBERUS_DIR
|
||||
|
||||
sudo mkdir -m 755 -p $CERBERUS_CONF_DIR
|
||||
sudo chown $STACK_USER $CERBERUS_CONF_DIR
|
||||
|
||||
sudo mkdir -m 755 -p $CERBERUS_API_LOG_DIR
|
||||
sudo chown $STACK_USER $CERBERUS_API_LOG_DIR
|
||||
|
||||
cp $CERBERUS_DIR$CERBERUS_CONF.sample $CERBERUS_CONF
|
||||
cp $CERBERUS_DIR$CERBERUS_POLICY $CERBERUS_POLICY
|
||||
|
||||
# Default
|
||||
iniset $CERBERUS_CONF DEFAULT verbose True
|
||||
iniset $CERBERUS_CONF DEFAULT debug "$ENABLE_DEBUG_LOG_LEVEL"
|
||||
iniset $CERBERUS_CONF DEFAULT sql_connection `database_connection_url cerberus`
|
||||
|
||||
# auth
|
||||
iniset $CERBERUS_CONF keystone_authtoken auth_uri "$KEYSTONE_SERVICE_PROTOCOL://$KEYSTONE_SERVICE_HOST:5000/v2.0/"
|
||||
iniset $CERBERUS_CONF keystone_authtoken admin_user cerberus
|
||||
iniset $CERBERUS_CONF keystone_authtoken admin_password $SERVICE_PASSWORD
|
||||
iniset $CERBERUS_CONF keystone_authtoken admin_tenant_name $SERVICE_TENANT_NAME
|
||||
iniset $CERBERUS_CONF keystone_authtoken region $REGION_NAME
|
||||
iniset $CERBERUS_CONF keystone_authtoken auth_host $KEYSTONE_AUTH_HOST
|
||||
iniset $CERBERUS_CONF keystone_authtoken auth_protocol $KEYSTONE_AUTH_PROTOCOL
|
||||
iniset $CERBERUS_CONF keystone_authtoken auth_port $KEYSTONE_AUTH_PORT
|
||||
iniset $CERBERUS_CONF keystone_authtoken signing_dir $CERBERUS_AUTH_CACHE_DIR
|
||||
}
|
||||
|
||||
# configure_cerberusdashboard()
|
||||
function configure_cerberusdashboard {
|
||||
ln -s $CERBERUS_DASHBOARD_DIR/_cerberus.py.example $HORIZON_DIR/openstack_dashboard/local/enabled/_50_cerberus.py
|
||||
}
|
||||
|
||||
# init_cerberus() - Initialize Cerberus database
|
||||
function init_cerberus {
|
||||
# Delete existing cache
|
||||
sudo rm -rf $CERBERUS_AUTH_CACHE_DIR
|
||||
sudo mkdir -p $CERBERUS_AUTH_CACHE_DIR
|
||||
sudo chown $STACK_USER $CERBERUS_AUTH_CACHE_DIR
|
||||
|
||||
# (Re)create cerberus database
|
||||
if is_service_enabled mysql postgresql; then
|
||||
recreate_database cerberus utf8
|
||||
$CERBERUS_BIN_DIR/dbcreate --config-file $CERBERUS_CONF
|
||||
fi # Migrate cerberus database
|
||||
}
|
||||
|
||||
# install_cerberus() - Collect source and prepare
|
||||
function install_cerberus {
|
||||
git_clone $CERBERUS_REPO $CERBERUS_DIR $CERBERUS_BRANCH
|
||||
setup_develop $CERBERUS_DIR
|
||||
}
|
||||
|
||||
# install_cerberusclient() - Collect source and prepare
|
||||
function install_cerberusclient {
|
||||
git_clone $CERBERUS_CLIENT_REPO $CERBERUS_CLIENT_DIR $CERBERUS_CLIENT_BRANCH
|
||||
setup_develop $CERBERUS_CLIENT_DIR
|
||||
}
|
||||
|
||||
# install_cerberusdashboard() - Collect source and prepare
|
||||
function install_cerberusdashboard {
|
||||
git_clone $CERBERUS_DASHBOARD_REPO $CERBERUS_DASHBOARD_DIR $CERBERUS_DASHBOARD_BRANCH
|
||||
setup_develop $CERBERUS_DASHBOARD_DIR
|
||||
}
|
||||
|
||||
|
||||
# start_cerberus() - Start running processes, including screen
|
||||
function start_cerberus {
|
||||
screen_it cerberus-agent "cd $CERBERUS_DIR; $CERBERUS_BIN_DIR/cerberus-agent --config-file=$CERBERUS_CONF"
|
||||
screen_it cerberus-api "cd $CERBERUS_DIR; $CERBERUS_BIN_DIR/cerberus-api --config-file=$CERBERUS_CONF"
|
||||
echo "Waiting for cerberus-api ($CERBERUS_SERVICE_HOST:$CERBERUS_SERVICE_PORT) to start..."
|
||||
if ! timeout $SERVICE_TIMEOUT sh -c "while ! curl --noproxy '*' -s http://$CERBERUS_SERVICE_HOST:$CERBERUS_SERVICE_PORT/v1/ >/dev/null; do sleep 1; done"; then
|
||||
die $LINENO "cerberus-api did not start"
|
||||
fi
|
||||
}
|
||||
|
||||
# stop_cerberus() - Stop running processes
|
||||
function stop_cerberus {
|
||||
# Kill the cerberus screen windows
|
||||
for serv in cerberus-api cerberus-agent; do
|
||||
screen_stop $serv
|
||||
done
|
||||
}
|
||||
|
||||
|
||||
# Restore xtrace
|
||||
$XTRACE
|
||||
|
||||
# Local variables:
|
||||
# mode: shell-script
|
||||
# End:
|
||||
1
doc/.gitignore
vendored
Normal file
1
doc/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
build
|
||||
177
doc/Makefile
Normal file
177
doc/Makefile
Normal file
@@ -0,0 +1,177 @@
|
||||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/cloudkitty.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/cloudkitty.qhc"
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/cloudkitty"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/cloudkitty"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
||||
17
doc/source/arch.rst
Normal file
17
doc/source/arch.rst
Normal file
@@ -0,0 +1,17 @@
|
||||
=======================
|
||||
Cerberus's Architecture
|
||||
=======================
|
||||
|
||||
Cerberus can be cut in two big parts:
|
||||
|
||||
* API
|
||||
* Manager
|
||||
|
||||
|
||||
.. graphviz:: graph/arch.dot
|
||||
|
||||
|
||||
Module loading and extensions
|
||||
=============================
|
||||
|
||||
Cerberus manager makes use of stevedore to load extensions dynamically.
|
||||
@@ -3,7 +3,7 @@
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@@ -15,30 +15,72 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
sys.path.insert(0, os.path.abspath('../..'))
|
||||
# -- General configuration ----------------------------------------------------
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
#'sphinx.ext.intersphinx',
|
||||
'oslosphinx'
|
||||
'sphinx.ext.graphviz',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.viewcode',
|
||||
'wsmeext.sphinxext',
|
||||
'sphinxcontrib.pecanwsme.rest',
|
||||
'sphinxcontrib.httpdomain',
|
||||
'oslosphinx',
|
||||
]
|
||||
|
||||
# autodoc generation is a bit aggressive and a nuisance when doing heavy
|
||||
# text edit cycles.
|
||||
# execute "export SPHINX_DEBUG=1" in your terminal to disable
|
||||
wsme_protocols = ['restjson', 'restxml']
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'cerberus'
|
||||
copyright = u'2013, OpenStack Foundation'
|
||||
copyright = u'2015, Eurogiciel'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '0.1'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '0.1'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = []
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
add_function_parentheses = True
|
||||
@@ -47,29 +89,179 @@ add_function_parentheses = True
|
||||
# unit titles (such as .. function::).
|
||||
add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# -- Options for HTML output --------------------------------------------------
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
modindex_common_prefix = ['cerberus.']
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
# html_theme_path = ["."]
|
||||
# html_theme = '_theme'
|
||||
# html_static_path = ['static']
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
#keep_warnings = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'default'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = '%sdoc' % project
|
||||
htmlhelp_basename = 'cerberusdoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass
|
||||
# [howto/manual]).
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
('index',
|
||||
'%s.tex' % project,
|
||||
u'%s Documentation' % project,
|
||||
u'OpenStack Foundation', 'manual'),
|
||||
('index', 'cerberus.tex', u'cerberus Documentation',
|
||||
u'Eurogiciel', 'manual'),
|
||||
]
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
#intersphinx_mapping = {'http://docs.python.org/': None}
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'cerberus', u'cerberus Documentation',
|
||||
[u'Eurogiciel'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'cerberus', u'cerberus Documentation',
|
||||
u'Eurogiciel', 'cerberus', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#texinfo_no_detailmenu = False
|
||||
|
||||
59
doc/source/graph/arch.dot
Normal file
59
doc/source/graph/arch.dot
Normal file
@@ -0,0 +1,59 @@
|
||||
digraph "CloudKitty's Architecture" {
|
||||
|
||||
// Graph parameters
|
||||
label="CloudKitty's Internal Architecture";
|
||||
node [shape=box];
|
||||
compound=true;
|
||||
|
||||
// API
|
||||
api [label="API"];
|
||||
|
||||
// Orchestrator
|
||||
subgraph cluster_3 {
|
||||
label="Orchestrator";
|
||||
node[shape=none, width=1.3, height=0, label=""];
|
||||
{rank=same; o1 -> o2 -> o3 [style=invis];}
|
||||
}
|
||||
|
||||
// Collector
|
||||
ceilometer [label="Ceilometer"];
|
||||
vendor [label="Vendor specific", style=dotted];
|
||||
subgraph cluster_0 {
|
||||
label="Collector";
|
||||
style=dashed;
|
||||
ceilometer -> vendor [style=invis];
|
||||
}
|
||||
|
||||
// Rating
|
||||
hashmap [label="HashMap module"];
|
||||
r_others [label="Other modules...", style=dotted];
|
||||
subgraph cluster_1 {
|
||||
label="Rating engines";
|
||||
style=dashed;
|
||||
hashmap -> r_others [style=invis];
|
||||
}
|
||||
|
||||
// Write Orchestrator
|
||||
w_orchestrator [label="Write Orchestrator"];
|
||||
tdb [label="Transient DB"];
|
||||
|
||||
//Writers
|
||||
osrf [label="OpenStack\nReference Format\n(json)"];
|
||||
w_others [label="Other modules...", style=dotted];
|
||||
subgraph cluster_2 {
|
||||
label="Writers";
|
||||
style=dashed;
|
||||
osrf -> w_others [style=invis];
|
||||
}
|
||||
|
||||
// Relations
|
||||
api -> hashmap;
|
||||
api -> r_others;
|
||||
o1 -> ceilometer [dir=both, ltail=cluster_3, lhead=cluster_0];
|
||||
o2 -> hashmap [dir=both, ltail=cluster_3, lhead=cluster_1];
|
||||
o3 -> w_orchestrator [ltail=cluster_3];
|
||||
w_orchestrator -> osrf [constraint=false];
|
||||
w_orchestrator -> w_others [style=dotted, constraint=false];
|
||||
w_orchestrator -> tdb;
|
||||
}
|
||||
|
||||
@@ -1,24 +1,48 @@
|
||||
.. cerberus documentation master file, created by
|
||||
sphinx-quickstart on Tue Jul 9 22:26:36 2013.
|
||||
sphinx-quickstart on Wed May 14 23:05:42 2014.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to cerberus's documentation!
|
||||
========================================================
|
||||
==============================================
|
||||
Welcome to Cerberus's developer documentation!
|
||||
==============================================
|
||||
|
||||
Contents:
|
||||
Introduction
|
||||
============
|
||||
|
||||
Cerberus is a Security As A Service project aimed at integrating security tools
|
||||
inside Openstack.
|
||||
|
||||
Installation
|
||||
============
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:maxdepth: 1
|
||||
|
||||
readme
|
||||
installation
|
||||
usage
|
||||
contributing
|
||||
|
||||
|
||||
Architecture
|
||||
============
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
arch
|
||||
|
||||
|
||||
API References
|
||||
==============
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
webapi/root
|
||||
webapi/v1
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
||||
@@ -1,12 +1,124 @@
|
||||
============
|
||||
Installation
|
||||
============
|
||||
#######################################
|
||||
Cerberus installation and configuration
|
||||
#######################################
|
||||
|
||||
At the command line::
|
||||
|
||||
$ pip install cerberus
|
||||
Install from source
|
||||
===================
|
||||
|
||||
Or, if you have virtualenvwrapper installed::
|
||||
There is no release of Cerberus as of now, the installation can be done from
|
||||
the git repository.
|
||||
|
||||
$ mkvirtualenv cerberus
|
||||
$ pip install cerberus
|
||||
Retrieve and install Cerberus :
|
||||
|
||||
::
|
||||
|
||||
git clone git://git.openstack.org/stackforge/cerberus
|
||||
cd cerberus
|
||||
python setup.py install
|
||||
|
||||
This procedure installs the ``cerberus`` python library and a few
|
||||
executables:
|
||||
|
||||
* ``cerberus-api``: API service
|
||||
* ``cerberus-agent``: Task management service
|
||||
|
||||
Install a sample configuration file :
|
||||
|
||||
::
|
||||
|
||||
mkdir /etc/cerberus
|
||||
cp etc/cerberus/cerberus.conf.sample /etc/cerberus/cerberus.conf
|
||||
|
||||
Configure Cerberus
|
||||
==================
|
||||
|
||||
Edit :file:`/etc/cerberus/cerberus.conf` to configure Cerberus.
|
||||
|
||||
The following shows the basic configuration items:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[DEFAULT]
|
||||
verbose = True
|
||||
log_dir = /var/log/cerberus
|
||||
|
||||
rabbit_host = RABBIT_HOST
|
||||
rabbit_userid = openstack
|
||||
rabbit_password = RABBIT_PASSWORD
|
||||
|
||||
[auth]
|
||||
username = cerberus
|
||||
password = CERBERUS_PASSWORD
|
||||
tenant = service
|
||||
region = RegionOne
|
||||
url = http://localhost:5000/v2.0
|
||||
|
||||
[keystone_authtoken]
|
||||
username = cerberus
|
||||
password = CERBERUS_PASSWORD
|
||||
project_name = service
|
||||
region = RegionOne
|
||||
auth_url = http://localhost:5000/v2.0
|
||||
auth_plugin = password
|
||||
|
||||
[database]
|
||||
connection = mysql://cerberus:CERBERUS_DBPASS@localhost/cerberus
|
||||
|
||||
Setup the database and storage backend
|
||||
======================================
|
||||
|
||||
MySQL/MariaDB is the recommended database engine. To setup the database, use
|
||||
the ``mysql`` client:
|
||||
|
||||
::
|
||||
|
||||
mysql -uroot -p << EOF
|
||||
CREATE DATABASE cerberus;
|
||||
GRANT ALL PRIVILEGES ON cerberus.* TO 'cerberus'@'localhost' IDENTIFIED BY 'CERBERUS_DBPASS';
|
||||
EOF
|
||||
|
||||
Run the database synchronisation scripts:
|
||||
|
||||
::
|
||||
|
||||
cerberus-dbsync upgrade
|
||||
|
||||
Init the storage backend:
|
||||
|
||||
::
|
||||
|
||||
cerberus-storage-init
|
||||
|
||||
Setup Keystone
|
||||
==============
|
||||
|
||||
Cerberus uses Keystone for authentication.
|
||||
|
||||
To integrate Cerberus to Keystone, run the following commands (as OpenStack
|
||||
administrator):
|
||||
|
||||
::
|
||||
|
||||
keystone user-create --name cerberus --pass CERBERUS_PASS
|
||||
keystone user-role-add --user cerberus --role admin --tenant service
|
||||
|
||||
Create the ``Security`` service and its endpoints:
|
||||
|
||||
::
|
||||
|
||||
keystone service-create --name Cerberus --type security
|
||||
keystone endpoint-create --service-id SECURITY_SERVICE_ID \
|
||||
--publicurl http://localhost:8300 \
|
||||
--adminurl http://localhost:8300 \
|
||||
--internalurl http://localhost:8300
|
||||
|
||||
Start Cerberus
|
||||
==============
|
||||
|
||||
Start the API and processing services :
|
||||
|
||||
::
|
||||
|
||||
cerberus-api --config-file /etc/cerberus/cerberus.conf
|
||||
cerberus-agent --config-file /etc/cerberus/cerberus.conf
|
||||
|
||||
16
doc/source/webapi/root.rst
Normal file
16
doc/source/webapi/root.rst
Normal file
@@ -0,0 +1,16 @@
|
||||
========================
|
||||
Cerberus REST API (root)
|
||||
========================
|
||||
|
||||
.. rest-controller:: cerberus.api.root:RootController
|
||||
:webprefix: / /
|
||||
.. Dirty hack till the bug is fixed so we can specify root path
|
||||
|
||||
.. autotype:: cerberus.api.root.APILink
|
||||
:members:
|
||||
|
||||
.. autotype:: cerberus.api.root.APIMediaType
|
||||
:members:
|
||||
|
||||
.. autotype:: cerberus.api.root.APIVersion
|
||||
:members:
|
||||
46
doc/source/webapi/v1.rst
Normal file
46
doc/source/webapi/v1.rst
Normal file
@@ -0,0 +1,46 @@
|
||||
======================
|
||||
Cerberus REST API (v1)
|
||||
======================
|
||||
|
||||
|
||||
Plugins
|
||||
=======
|
||||
|
||||
.. rest-controller:: cerberus.api.v1.controllers.plugins:PluginsController
|
||||
:webprefix: /v1/plugins
|
||||
|
||||
|
||||
Security alarms
|
||||
===============
|
||||
|
||||
.. rest-controller:: cerberus.api.v1.controllers.security_alarms:SecurityAlarmsController
|
||||
:webprefix: /v1/security_alarms
|
||||
|
||||
.. rest-controller:: cerberus.api.v1.controllers.security_alarms:SecurityAlarmController
|
||||
:webprefix: /v1/security_alarms/{id}
|
||||
|
||||
.. autotype:: cerberus.api.v1.datamodels.security_alarm.SecurityAlarmResource
|
||||
:members:
|
||||
|
||||
.. autotype:: cerberus.api.v1.datamodels.security_alarm.SecurityAlarmResourceCollection
|
||||
:members:
|
||||
|
||||
|
||||
Security reports
|
||||
================
|
||||
|
||||
.. rest-controller:: cerberus.api.v1.controllers.security_reports:SecurityReportsController
|
||||
:webprefix: /v1/security_reports
|
||||
|
||||
.. rest-controller:: cerberus.api.v1.controllers.security_reports:SecurityReportController
|
||||
:webprefix: /v1/security_reports/{id}
|
||||
|
||||
|
||||
Tasks
|
||||
=====
|
||||
|
||||
.. rest-controller:: cerberus.api.v1.controllers.tasks:TasksController
|
||||
:webprefix: /v1/tasks
|
||||
|
||||
.. rest-controller:: cerberus.api.v1.controllers.tasks:ActionController
|
||||
:webprefix: /v1/tasks/{id}/actions
|
||||
@@ -1,32 +1,759 @@
|
||||
|
||||
[DEFAULT]
|
||||
policy_file = /etc/cerberus/policy.json
|
||||
debug = True
|
||||
verbose = True
|
||||
notification_topics = svc_notifications
|
||||
rabbit_password = guest
|
||||
rabbit_hosts = localhost
|
||||
# rpc_backend = cerberus.openstack.common.rpc.impl_kombu
|
||||
|
||||
[service_credentials]
|
||||
os_tenant_name = service
|
||||
os_password = svc
|
||||
os_username = cerberus
|
||||
#
|
||||
# Options defined in oslo.messaging
|
||||
#
|
||||
|
||||
# Use durable queues in amqp. (boolean value)
|
||||
# Deprecated group/name - [DEFAULT]/rabbit_durable_queues
|
||||
#amqp_durable_queues=false
|
||||
|
||||
# Auto-delete queues in amqp. (boolean value)
|
||||
#amqp_auto_delete=false
|
||||
|
||||
# Size of RPC connection pool. (integer value)
|
||||
#rpc_conn_pool_size=30
|
||||
|
||||
# Qpid broker hostname. (string value)
|
||||
#qpid_hostname=cerberus
|
||||
|
||||
# Qpid broker port. (integer value)
|
||||
#qpid_port=5672
|
||||
|
||||
# Qpid HA cluster host:port pairs. (list value)
|
||||
#qpid_hosts=$qpid_hostname:$qpid_port
|
||||
|
||||
# Username for Qpid connection. (string value)
|
||||
#qpid_username=
|
||||
|
||||
# Password for Qpid connection. (string value)
|
||||
#qpid_password=
|
||||
|
||||
# Space separated list of SASL mechanisms to use for auth.
|
||||
# (string value)
|
||||
#qpid_sasl_mechanisms=
|
||||
|
||||
# Seconds between connection keepalive heartbeats. (integer
|
||||
# value)
|
||||
#qpid_heartbeat=60
|
||||
|
||||
# Transport to use, either 'tcp' or 'ssl'. (string value)
|
||||
#qpid_protocol=tcp
|
||||
|
||||
# Whether to disable the Nagle algorithm. (boolean value)
|
||||
#qpid_tcp_nodelay=true
|
||||
|
||||
# The number of prefetched messages held by receiver. (integer
|
||||
# value)
|
||||
#qpid_receiver_capacity=1
|
||||
|
||||
# The qpid topology version to use. Version 1 is what was
|
||||
# originally used by impl_qpid. Version 2 includes some
|
||||
# backwards-incompatible changes that allow broker federation
|
||||
# to work. Users should update to version 2 when they are
|
||||
# able to take everything down, as it requires a clean break.
|
||||
# (integer value)
|
||||
#qpid_topology_version=1
|
||||
|
||||
# SSL version to use (valid only if SSL enabled). valid values
|
||||
# are TLSv1, SSLv23 and SSLv3. SSLv2 may be available on some
|
||||
# distributions. (string value)
|
||||
#kombu_ssl_version=
|
||||
|
||||
# SSL key file (valid only if SSL enabled). (string value)
|
||||
#kombu_ssl_keyfile=
|
||||
|
||||
# SSL cert file (valid only if SSL enabled). (string value)
|
||||
#kombu_ssl_certfile=
|
||||
|
||||
# SSL certification authority file (valid only if SSL
|
||||
# enabled). (string value)
|
||||
#kombu_ssl_ca_certs=
|
||||
|
||||
# How long to wait before reconnecting in response to an AMQP
|
||||
# consumer cancel notification. (floating point value)
|
||||
#kombu_reconnect_delay=1.0
|
||||
|
||||
# The RabbitMQ broker address where a single node is used.
|
||||
# (string value)
|
||||
#rabbit_host=cerberus
|
||||
|
||||
# The RabbitMQ broker port where a single node is used.
|
||||
# (integer value)
|
||||
#rabbit_port=5672
|
||||
|
||||
# RabbitMQ HA cluster host:port pairs. (list value)
|
||||
#rabbit_hosts=$rabbit_host:$rabbit_port
|
||||
|
||||
# Connect over SSL for RabbitMQ. (boolean value)
|
||||
#rabbit_use_ssl=false
|
||||
|
||||
# The RabbitMQ userid. (string value)
|
||||
#rabbit_userid=guest
|
||||
|
||||
# The RabbitMQ password. (string value)
|
||||
#rabbit_password=guest
|
||||
|
||||
# the RabbitMQ login method (string value)
|
||||
#rabbit_login_method=AMQPLAIN
|
||||
|
||||
# The RabbitMQ virtual host. (string value)
|
||||
#rabbit_virtual_host=/
|
||||
|
||||
# How frequently to retry connecting with RabbitMQ. (integer
|
||||
# value)
|
||||
#rabbit_retry_interval=1
|
||||
|
||||
# How long to backoff for between retries when connecting to
|
||||
# RabbitMQ. (integer value)
|
||||
#rabbit_retry_backoff=2
|
||||
|
||||
# Maximum number of RabbitMQ connection retries. Default is 0
|
||||
# (infinite retry count). (integer value)
|
||||
#rabbit_max_retries=0
|
||||
|
||||
# Use HA queues in RabbitMQ (x-ha-policy: all). If you change
|
||||
# this option, you must wipe the RabbitMQ database. (boolean
|
||||
# value)
|
||||
#rabbit_ha_queues=false
|
||||
|
||||
# If passed, use a fake RabbitMQ provider. (boolean value)
|
||||
#fake_rabbit=false
|
||||
|
||||
# ZeroMQ bind address. Should be a wildcard (*), an ethernet
|
||||
# interface, or IP. The "host" option should point or resolve
|
||||
# to this address. (string value)
|
||||
#rpc_zmq_bind_address=*
|
||||
|
||||
# MatchMaker driver. (string value)
|
||||
#rpc_zmq_matchmaker=oslo.messaging._drivers.matchmaker.MatchMakerLocalhost
|
||||
|
||||
# ZeroMQ receiver listening port. (integer value)
|
||||
#rpc_zmq_port=9501
|
||||
|
||||
# Number of ZeroMQ contexts, defaults to 1. (integer value)
|
||||
#rpc_zmq_contexts=1
|
||||
|
||||
# Maximum number of ingress messages to locally buffer per
|
||||
# topic. Default is unlimited. (integer value)
|
||||
#rpc_zmq_topic_backlog=<None>
|
||||
|
||||
# Directory for holding IPC sockets. (string value)
|
||||
#rpc_zmq_ipc_dir=/var/run/openstack
|
||||
|
||||
# Name of this node. Must be a valid hostname, FQDN, or IP
|
||||
# address. Must match "host" option, if running Nova. (string
|
||||
# value)
|
||||
#rpc_zmq_host=cerberus
|
||||
|
||||
# Seconds to wait before a cast expires (TTL). Only supported
|
||||
# by impl_zmq. (integer value)
|
||||
#rpc_cast_timeout=30
|
||||
|
||||
# Heartbeat frequency. (integer value)
|
||||
#matchmaker_heartbeat_freq=300
|
||||
|
||||
# Heartbeat time-to-live. (integer value)
|
||||
#matchmaker_heartbeat_ttl=600
|
||||
|
||||
# Size of RPC greenthread pool. (integer value)
|
||||
#rpc_thread_pool_size=64
|
||||
|
||||
# Driver or drivers to handle sending notifications. (multi
|
||||
# valued)
|
||||
#notification_driver=
|
||||
|
||||
# AMQP topic used for OpenStack notifications. (list value)
|
||||
# Deprecated group/name - [rpc_notifier2]/topics
|
||||
#notification_topics=notifications
|
||||
|
||||
# Seconds to wait for a response from a call. (integer value)
|
||||
#rpc_response_timeout=60
|
||||
|
||||
# A URL representing the messaging driver to use and its full
|
||||
# configuration. If not set, we fall back to the rpc_backend
|
||||
# option and driver specific configuration. (string value)
|
||||
#transport_url=<None>
|
||||
|
||||
# The messaging driver to use, defaults to rabbit. Other
|
||||
# drivers include qpid and zmq. (string value)
|
||||
#rpc_backend=rabbit
|
||||
|
||||
# The default exchange under which topics are scoped. May be
|
||||
# overridden by an exchange name specified in the
|
||||
# transport_url option. (string value)
|
||||
#control_exchange=openstack
|
||||
|
||||
|
||||
#
|
||||
# Options defined in cerberus.manager
|
||||
#
|
||||
|
||||
# AMQP topic used for OpenStack notifications (list value)
|
||||
#notification_topics=notifications
|
||||
|
||||
# Messaging URLs to listen for notifications. Example:
|
||||
# transport://user:pass@host1:port[,hostN:portN]/virtual_host
|
||||
# (DEFAULT/transport_url is used if empty) (multi valued)
|
||||
#messaging_urls=
|
||||
|
||||
|
||||
#
|
||||
# Options defined in cerberus.service
|
||||
#
|
||||
|
||||
# Name of this node, which must be valid in an AMQP key. Can
|
||||
# be an opaque identifier. For ZeroMQ only, must be a valid
|
||||
# host name, FQDN, or IP address. (string value)
|
||||
#host=cerberus
|
||||
|
||||
# Dispatcher to process data. (multi valued)
|
||||
#dispatcher=database
|
||||
|
||||
# Number of workers for collector service. A single collector
|
||||
# is enabled by default. (integer value)
|
||||
#collector_workers=1
|
||||
|
||||
# Number of workers for notification service. A single
|
||||
# notification agent is enabled by default. (integer value)
|
||||
#notification_workers=1
|
||||
|
||||
|
||||
#
|
||||
# Options defined in cerberus.api
|
||||
#
|
||||
|
||||
# The strategy to use for authentication. (string value)
|
||||
#auth_strategy=keystone
|
||||
|
||||
|
||||
#
|
||||
# Options defined in cerberus.api.app
|
||||
#
|
||||
|
||||
# Configuration file for WSGI definition of API. (string
|
||||
# value)
|
||||
#api_paste_config=api_paste.ini
|
||||
|
||||
|
||||
#
|
||||
# Options defined in cerberus.client.nova_client
|
||||
#
|
||||
|
||||
# Allow novaclient's debug log output. (boolean value)
|
||||
#nova_http_log_debug=false
|
||||
|
||||
|
||||
#
|
||||
# Options defined in cerberus.common.exception
|
||||
#
|
||||
|
||||
# Make exception message format errors fatal (boolean value)
|
||||
#fatal_exception_format_errors=false
|
||||
|
||||
|
||||
#
|
||||
# Options defined in cerberus.openstack.common.eventlet_backdoor
|
||||
#
|
||||
|
||||
# Enable eventlet backdoor. Acceptable values are 0, <port>,
|
||||
# and <start>:<end>, where 0 results in listening on a random
|
||||
# tcp port number; <port> results in listening on the
|
||||
# specified port number (and not enabling backdoor if that
|
||||
# port is in use); and <start>:<end> results in listening on
|
||||
# the smallest unused port number within the specified range
|
||||
# of port numbers. The chosen port is displayed in the
|
||||
# service's log file. (string value)
|
||||
#backdoor_port=<None>
|
||||
|
||||
|
||||
#
|
||||
# Options defined in cerberus.openstack.common.lockutils
|
||||
#
|
||||
|
||||
# Whether to disable inter-process locks (boolean value)
|
||||
#disable_process_locking=false
|
||||
|
||||
# Directory to use for lock files. (string value)
|
||||
#lock_path=<None>
|
||||
|
||||
|
||||
#
|
||||
# Options defined in cerberus.openstack.common.log
|
||||
#
|
||||
|
||||
# Print debugging output (set logging level to DEBUG instead
|
||||
# of default WARNING level). (boolean value)
|
||||
#debug=false
|
||||
|
||||
# Print more verbose output (set logging level to INFO instead
|
||||
# of default WARNING level). (boolean value)
|
||||
#verbose=false
|
||||
|
||||
# Log output to standard error (boolean value)
|
||||
#use_stderr=true
|
||||
|
||||
# Format string to use for log messages with context (string
|
||||
# value)
|
||||
#logging_context_format_string=%(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [%(request_id)s %(user_identity)s] %(instance)s%(message)s
|
||||
|
||||
# Format string to use for log messages without context
|
||||
# (string value)
|
||||
#logging_default_format_string=%(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] %(instance)s%(message)s
|
||||
|
||||
# Data to append to log format when level is DEBUG (string
|
||||
# value)
|
||||
#logging_debug_format_suffix=%(funcName)s %(pathname)s:%(lineno)d
|
||||
|
||||
# Prefix each line of exception output with this format
|
||||
# (string value)
|
||||
#logging_exception_prefix=%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s %(instance)s
|
||||
|
||||
# List of logger=LEVEL pairs (list value)
|
||||
#default_log_levels=amqp=WARN,amqplib=WARN,boto=WARN,qpid=WARN,sqlalchemy=WARN,suds=INFO,oslo.messaging=INFO,iso8601=WARN,requests.packages.urllib3.connectionpool=WARN
|
||||
|
||||
# Publish error events (boolean value)
|
||||
#publish_errors=false
|
||||
|
||||
# Make deprecations fatal (boolean value)
|
||||
#fatal_deprecations=false
|
||||
|
||||
# If an instance is passed with the log message, format it
|
||||
# like this (string value)
|
||||
#instance_format="[instance: %(uuid)s] "
|
||||
|
||||
# If an instance UUID is passed with the log message, format
|
||||
# it like this (string value)
|
||||
#instance_uuid_format="[instance: %(uuid)s] "
|
||||
|
||||
# The name of logging configuration file. It does not disable
|
||||
# existing loggers, but just appends specified logging
|
||||
# configuration to any other existing logging options. Please
|
||||
# see the Python logging module documentation for details on
|
||||
# logging configuration files. (string value)
|
||||
# Deprecated group/name - [DEFAULT]/log_config
|
||||
#log_config_append=<None>
|
||||
|
||||
# DEPRECATED. A logging.Formatter log message format string
|
||||
# which may use any of the available logging.LogRecord
|
||||
# attributes. This option is deprecated. Please use
|
||||
# logging_context_format_string and
|
||||
# logging_default_format_string instead. (string value)
|
||||
#log_format=<None>
|
||||
|
||||
# Format string for %%(asctime)s in log records. Default:
|
||||
# %(default)s (string value)
|
||||
#log_date_format=%Y-%m-%d %H:%M:%S
|
||||
|
||||
# (Optional) Name of log file to output to. If no default is
|
||||
# set, logging will go to stdout. (string value)
|
||||
# Deprecated group/name - [DEFAULT]/logfile
|
||||
#log_file=<None>
|
||||
|
||||
# (Optional) The base directory used for relative --log-file
|
||||
# paths (string value)
|
||||
# Deprecated group/name - [DEFAULT]/logdir
|
||||
#log_dir=<None>
|
||||
|
||||
# Use syslog for logging. Existing syslog format is DEPRECATED
|
||||
# during I, and then will be changed in J to honor RFC5424
|
||||
# (boolean value)
|
||||
#use_syslog=false
|
||||
|
||||
# (Optional) Use syslog rfc5424 format for logging. If
|
||||
# enabled, will add APP-NAME (RFC5424) before the MSG part of
|
||||
# the syslog message. The old format without APP-NAME is
|
||||
# deprecated in I, and will be removed in J. (boolean value)
|
||||
#use_syslog_rfc_format=false
|
||||
|
||||
# Syslog facility to receive log lines (string value)
|
||||
#syslog_log_facility=LOG_USER
|
||||
|
||||
|
||||
#
|
||||
# Options defined in cerberus.openstack.common.periodic_task
|
||||
#
|
||||
|
||||
# Some periodic tasks can be run in a separate process. Should
|
||||
# we run them here? (boolean value)
|
||||
#run_external_periodic_tasks=true
|
||||
|
||||
|
||||
#
|
||||
# Options defined in cerberus.openstack.common.policy
|
||||
#
|
||||
|
||||
# JSON file containing policy (string value)
|
||||
#policy_file=policy.json
|
||||
|
||||
# Rule enforced when requested rule is not found (string
|
||||
# value)
|
||||
#policy_default_rule=default
|
||||
|
||||
|
||||
[api]
|
||||
|
||||
#
|
||||
# Options defined in cerberus.api.app
|
||||
#
|
||||
|
||||
# Host serving the API. (string value)
|
||||
#host_ip=0.0.0.0
|
||||
|
||||
# Host port serving the API. (integer value)
|
||||
#port=8300
|
||||
|
||||
[keystone_authtoken]
|
||||
signing_dir = /var/cache/cerberus
|
||||
admin_tenant_name = service
|
||||
admin_password = svc
|
||||
admin_user = cerberus
|
||||
auth_protocol = http
|
||||
auth_port = 5000
|
||||
auth_host = localhost
|
||||
|
||||
[database]
|
||||
connection = mysql://root:svc@localhost/cerberus?charset=utf8
|
||||
|
||||
[ikare]
|
||||
ikare_admin=NONE
|
||||
ikare_password=NONE
|
||||
ikare_url=HOST
|
||||
ikare_role_name=ikare
|
||||
#
|
||||
# Options defined in cerberus.openstack.common.db.options
|
||||
#
|
||||
|
||||
# The file name to use with SQLite (string value)
|
||||
#sqlite_db=cerberus.sqlite
|
||||
|
||||
# If True, SQLite uses synchronous mode (boolean value)
|
||||
#sqlite_synchronous=true
|
||||
|
||||
# The backend to use for db (string value)
|
||||
# Deprecated group/name - [DEFAULT]/db_backend
|
||||
#backend=sqlalchemy
|
||||
|
||||
# The SQLAlchemy connection string used to connect to the
|
||||
# database (string value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_connection
|
||||
# Deprecated group/name - [DATABASE]/sql_connection
|
||||
# Deprecated group/name - [sql]/connection
|
||||
#connection=<None>
|
||||
|
||||
# The SQL mode to be used for MySQL sessions. This option,
|
||||
# including the default, overrides any server-set SQL mode. To
|
||||
# use whatever SQL mode is set by the server configuration,
|
||||
# set this to no value. Example: mysql_sql_mode= (string
|
||||
# value)
|
||||
#mysql_sql_mode=TRADITIONAL
|
||||
|
||||
# Timeout before idle sql connections are reaped (integer
|
||||
# value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_idle_timeout
|
||||
# Deprecated group/name - [DATABASE]/sql_idle_timeout
|
||||
# Deprecated group/name - [sql]/idle_timeout
|
||||
#idle_timeout=3600
|
||||
|
||||
# Minimum number of SQL connections to keep open in a pool
|
||||
# (integer value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_min_pool_size
|
||||
# Deprecated group/name - [DATABASE]/sql_min_pool_size
|
||||
#min_pool_size=1
|
||||
|
||||
# Maximum number of SQL connections to keep open in a pool
|
||||
# (integer value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_max_pool_size
|
||||
# Deprecated group/name - [DATABASE]/sql_max_pool_size
|
||||
#max_pool_size=<None>
|
||||
|
||||
# Maximum db connection retries during startup. (setting -1
|
||||
# implies an infinite retry count) (integer value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_max_retries
|
||||
# Deprecated group/name - [DATABASE]/sql_max_retries
|
||||
#max_retries=10
|
||||
|
||||
# Interval between retries of opening a sql connection
|
||||
# (integer value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_retry_interval
|
||||
# Deprecated group/name - [DATABASE]/reconnect_interval
|
||||
#retry_interval=10
|
||||
|
||||
# If set, use this value for max_overflow with sqlalchemy
|
||||
# (integer value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_max_overflow
|
||||
# Deprecated group/name - [DATABASE]/sqlalchemy_max_overflow
|
||||
#max_overflow=<None>
|
||||
|
||||
# Verbosity of SQL debugging information. 0=None,
|
||||
# 100=Everything (integer value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_connection_debug
|
||||
#connection_debug=0
|
||||
|
||||
# Add python stack traces to SQL as comment strings (boolean
|
||||
# value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_connection_trace
|
||||
#connection_trace=false
|
||||
|
||||
# If set, use this value for pool_timeout with sqlalchemy
|
||||
# (integer value)
|
||||
# Deprecated group/name - [DATABASE]/sqlalchemy_pool_timeout
|
||||
#pool_timeout=<None>
|
||||
|
||||
# Enable the experimental use of database reconnect on
|
||||
# connection lost (boolean value)
|
||||
#use_db_reconnect=false
|
||||
|
||||
# seconds between db connection retries (integer value)
|
||||
#db_retry_interval=1
|
||||
|
||||
# Whether to increase interval between db connection retries,
|
||||
# up to db_max_retry_interval (boolean value)
|
||||
#db_inc_retry_interval=true
|
||||
|
||||
# max seconds between db connection retries, if
|
||||
# db_inc_retry_interval is enabled (integer value)
|
||||
#db_max_retry_interval=10
|
||||
|
||||
# maximum db connection retries before error is raised.
|
||||
# (setting -1 implies an infinite retry count) (integer value)
|
||||
#db_max_retries=20
|
||||
|
||||
|
||||
[keystone_authtoken]
|
||||
|
||||
#
|
||||
# Options defined in keystoneclient.middleware.auth_token
|
||||
#
|
||||
|
||||
# Prefix to prepend at the beginning of the path. Deprecated,
|
||||
# use identity_uri. (string value)
|
||||
#auth_admin_prefix=
|
||||
|
||||
# Host providing the admin Identity API endpoint. Deprecated,
|
||||
# use identity_uri. (string value)
|
||||
#auth_host=127.0.0.1
|
||||
|
||||
# Port of the admin Identity API endpoint. Deprecated, use
|
||||
# identity_uri. (integer value)
|
||||
#auth_port=35357
|
||||
|
||||
# Protocol of the admin Identity API endpoint (http or https).
|
||||
# Deprecated, use identity_uri. (string value)
|
||||
#auth_protocol=https
|
||||
|
||||
# Complete public Identity API endpoint (string value)
|
||||
#auth_uri=<None>
|
||||
|
||||
# Complete admin Identity API endpoint. This should specify
|
||||
# the unversioned root endpoint e.g. https://localhost:35357/
|
||||
# (string value)
|
||||
#identity_uri=<None>
|
||||
|
||||
# API version of the admin Identity API endpoint (string
|
||||
# value)
|
||||
#auth_version=<None>
|
||||
|
||||
# Do not handle authorization requests within the middleware,
|
||||
# but delegate the authorization decision to downstream WSGI
|
||||
# components (boolean value)
|
||||
#delay_auth_decision=false
|
||||
|
||||
# Request timeout value for communicating with Identity API
|
||||
# server. (boolean value)
|
||||
#http_connect_timeout=<None>
|
||||
|
||||
# How many times are we trying to reconnect when communicating
|
||||
# with Identity API Server. (integer value)
|
||||
#http_request_max_retries=3
|
||||
|
||||
# This option is deprecated and may be removed in a future
|
||||
# release. Single shared secret with the Keystone
|
||||
# configuration used for bootstrapping a Keystone
|
||||
# installation, or otherwise bypassing the normal
|
||||
# authentication process. This option should not be used, use
|
||||
# `admin_user` and `admin_password` instead. (string value)
|
||||
#admin_token=<None>
|
||||
|
||||
# Keystone account username (string value)
|
||||
#admin_user=<None>
|
||||
|
||||
# Keystone account password (string value)
|
||||
#admin_password=<None>
|
||||
|
||||
# Keystone service account tenant name to validate user tokens
|
||||
# (string value)
|
||||
#admin_tenant_name=admin
|
||||
|
||||
# Env key for the swift cache (string value)
|
||||
#cache=<None>
|
||||
|
||||
# Required if Keystone server requires client certificate
|
||||
# (string value)
|
||||
#certfile=<None>
|
||||
|
||||
# Required if Keystone server requires client certificate
|
||||
# (string value)
|
||||
#keyfile=<None>
|
||||
|
||||
# A PEM encoded Certificate Authority to use when verifying
|
||||
# HTTPs connections. Defaults to system CAs. (string value)
|
||||
#cafile=<None>
|
||||
|
||||
# Verify HTTPS connections. (boolean value)
|
||||
#insecure=false
|
||||
|
||||
# Directory used to cache files related to PKI tokens (string
|
||||
# value)
|
||||
#signing_dir=<None>
|
||||
|
||||
# Optionally specify a list of memcached server(s) to use for
|
||||
# caching. If left undefined, tokens will instead be cached
|
||||
# in-process. (list value)
|
||||
# Deprecated group/name - [DEFAULT]/memcache_servers
|
||||
#memcached_servers=<None>
|
||||
|
||||
# In order to prevent excessive effort spent validating
|
||||
# tokens, the middleware caches previously-seen tokens for a
|
||||
# configurable duration (in seconds). Set to -1 to disable
|
||||
# caching completely. (integer value)
|
||||
#token_cache_time=300
|
||||
|
||||
# Determines the frequency at which the list of revoked tokens
|
||||
# is retrieved from the Identity service (in seconds). A high
|
||||
# number of revocation events combined with a low cache
|
||||
# duration may significantly reduce performance. (integer
|
||||
# value)
|
||||
#revocation_cache_time=10
|
||||
|
||||
# (optional) if defined, indicate whether token data should be
|
||||
# authenticated or authenticated and encrypted. Acceptable
|
||||
# values are MAC or ENCRYPT. If MAC, token data is
|
||||
# authenticated (with HMAC) in the cache. If ENCRYPT, token
|
||||
# data is encrypted and authenticated in the cache. If the
|
||||
# value is not one of these options or empty, auth_token will
|
||||
# raise an exception on initialization. (string value)
|
||||
#memcache_security_strategy=<None>
|
||||
|
||||
# (optional, mandatory if memcache_security_strategy is
|
||||
# defined) this string is used for key derivation. (string
|
||||
# value)
|
||||
#memcache_secret_key=<None>
|
||||
|
||||
# (optional) indicate whether to set the X-Service-Catalog
|
||||
# header. If False, middleware will not ask for service
|
||||
# catalog on token validation and will not set the X-Service-
|
||||
# Catalog header. (boolean value)
|
||||
#include_service_catalog=true
|
||||
|
||||
# Used to control the use and type of token binding. Can be
|
||||
# set to: "disabled" to not check token binding. "permissive"
|
||||
# (default) to validate binding information if the bind type
|
||||
# is of a form known to the server and ignore it if not.
|
||||
# "strict" like "permissive" but if the bind type is unknown
|
||||
# the token will be rejected. "required" any form of token
|
||||
# binding is needed to be allowed. Finally the name of a
|
||||
# binding method that must be present in tokens. (string
|
||||
# value)
|
||||
#enforce_token_bind=permissive
|
||||
|
||||
# If true, the revocation list will be checked for cached
|
||||
# tokens. This requires that PKI tokens are configured on the
|
||||
# Keystone server. (boolean value)
|
||||
#check_revocations_for_cached=false
|
||||
|
||||
# Hash algorithms to use for hashing PKI tokens. This may be a
|
||||
# single algorithm or multiple. The algorithms are those
|
||||
# supported by Python standard hashlib.new(). The hashes will
|
||||
# be tried in the order given, so put the preferred one first
|
||||
# for performance. The result of the first hash will be stored
|
||||
# in the cache. This will typically be set to multiple values
|
||||
# only while migrating from a less secure algorithm to a more
|
||||
# secure one. Once all the old tokens are expired this option
|
||||
# should be set to a single value for better performance.
|
||||
# (list value)
|
||||
#hash_algorithms=md5
|
||||
|
||||
|
||||
[matchmaker_redis]
|
||||
|
||||
#
|
||||
# Options defined in oslo.messaging
|
||||
#
|
||||
|
||||
# Host to locate redis. (string value)
|
||||
#host=127.0.0.1
|
||||
|
||||
# Use this port to connect to redis host. (integer value)
|
||||
#port=6379
|
||||
|
||||
# Password for Redis server (optional). (string value)
|
||||
#password=<None>
|
||||
|
||||
|
||||
[matchmaker_ring]
|
||||
|
||||
#
|
||||
# Options defined in oslo.messaging
|
||||
#
|
||||
|
||||
# Matchmaker ring file (JSON). (string value)
|
||||
# Deprecated group/name - [DEFAULT]/matchmaker_ringfile
|
||||
#ringfile=/etc/oslo/matchmaker_ring.json
|
||||
|
||||
|
||||
[service_credentials]
|
||||
|
||||
#
|
||||
# Options defined in cerberus.service
|
||||
#
|
||||
|
||||
# User name to use for OpenStack service access. (string
|
||||
# value)
|
||||
#os_username=cerberus
|
||||
|
||||
# Password to use for OpenStack service access. (string value)
|
||||
#os_password=admin
|
||||
|
||||
# Tenant ID to use for OpenStack service access. (string
|
||||
# value)
|
||||
#os_tenant_id=
|
||||
|
||||
# Tenant name to use for OpenStack service access. (string
|
||||
# value)
|
||||
#os_tenant_name=admin
|
||||
|
||||
# Certificate chain for SSL validation. (string value)
|
||||
#os_cacert=<None>
|
||||
|
||||
# Auth URL to use for OpenStack service access. (string value)
|
||||
#os_auth_url=http://localhost:5000/v2.0
|
||||
|
||||
# Region name to use for OpenStack service endpoints. (string
|
||||
# value)
|
||||
#os_region_name=<None>
|
||||
|
||||
# Type of endpoint in Identity service catalog to use for
|
||||
# communication with OpenStack services. (string value)
|
||||
#os_endpoint_type=publicURL
|
||||
|
||||
# Disables X.509 certificate validation when an SSL connection
|
||||
# to Identity Service is established. (boolean value)
|
||||
#insecure=false
|
||||
|
||||
|
||||
[service_types]
|
||||
|
||||
#
|
||||
# Options defined in cerberus.client.nova_client
|
||||
#
|
||||
|
||||
# Nova service type. (string value)
|
||||
#nova=compute
|
||||
|
||||
|
||||
[ssl]
|
||||
|
||||
#
|
||||
# Options defined in cerberus.openstack.common.sslutils
|
||||
#
|
||||
|
||||
# CA certificate file to use to verify connecting clients.
|
||||
# (string value)
|
||||
#ca_file=<None>
|
||||
|
||||
# Certificate file to use when starting the server securely.
|
||||
# (string value)
|
||||
#cert_file=<None>
|
||||
|
||||
# Private key file to use when starting the server securely.
|
||||
# (string value)
|
||||
#key_file=<None>
|
||||
|
||||
|
||||
|
||||
@@ -55,5 +55,7 @@ console_scripts =
|
||||
|
||||
cerberus.plugins =
|
||||
testplugin = cerberus.plugins.test_plugin:TestPlugin
|
||||
openvasplugin = cerberus.plugins.openvas:OpenVasPlugin
|
||||
taskplugin = cerberus.plugins.task_plugin:TaskPlugin
|
||||
|
||||
oslo.messaging.drivers =
|
||||
cerberusdriver = cerberus.common.cerberus_impl_rabbit:CerberusRabbitDriver
|
||||
|
||||
@@ -14,6 +14,8 @@ python-subunit
|
||||
nose
|
||||
nose-exclude
|
||||
nosexcover
|
||||
sphinx>=1.1.2
|
||||
oslosphinx
|
||||
|
||||
# Doc requirements
|
||||
sphinx>=1.1.2,<1.1.999
|
||||
sphinxcontrib-pecanwsme>=0.6,<=0.8.0
|
||||
oslosphinx<=2.5.0
|
||||
|
||||
2
tools/config/oslo.config.generator.rc
Normal file
2
tools/config/oslo.config.generator.rc
Normal file
@@ -0,0 +1,2 @@
|
||||
export CERBERUS_CONFIG_GENERATOR_EXTRA_LIBRARIES='oslo.messaging'
|
||||
export CERBERUS_CONFIG_GENERATOR_EXTRA_MODULES=keystoneclient.middleware.auth_token
|
||||
Reference in New Issue
Block a user