Add PUT/POST methods to FM restful API
This commit implements the methods PUT and POST in order to insert and update alarms. Story: 2004008 Task: 30270 Change-Id: I773e651165b3654684b95463167fc565aef1ffa4 Co-authored-by: Sun Austin <austin.sun@intel.com> Signed-off-by: Mario Alfredo Carrillo Arevalo <mario.alfredo.c.arevalo@intel.com> Signed-off-by: Sun Austin <austin.sun@intel.com>
This commit is contained in:
parent
0de5931685
commit
1e2d8dc04c
@ -321,6 +321,7 @@ FM_LOG_ID_SW_UPGRADE_AUTO_APPLY_ABORTED = ALARM_GROUP_SW_MGMT + ".221"
|
||||
FM_ALARM_STATE_SET = 'set'
|
||||
FM_ALARM_STATE_CLEAR = 'clear'
|
||||
FM_ALARM_STATE_MSG = 'msg'
|
||||
FM_ALARM_STATE_LOG = 'log'
|
||||
|
||||
FM_ALARM_TYPE_0 = 'other'
|
||||
FM_ALARM_TYPE_1 = 'communication'
|
||||
@ -425,7 +426,8 @@ ALARM_PROBABLE_CAUSE_75 = 'configuration-out-of-date'
|
||||
ALARM_PROBABLE_CAUSE_76 = 'configuration-provisioning-required'
|
||||
ALARM_PROBABLE_CAUSE_UNKNOWN = 'unknown'
|
||||
|
||||
ALARM_STATE = [FM_ALARM_STATE_SET, FM_ALARM_STATE_CLEAR, FM_ALARM_STATE_MSG]
|
||||
ALARM_STATE = [FM_ALARM_STATE_SET, FM_ALARM_STATE_CLEAR,
|
||||
FM_ALARM_STATE_MSG, FM_ALARM_STATE_LOG]
|
||||
|
||||
ALARM_TYPE = [FM_ALARM_TYPE_0, FM_ALARM_TYPE_1, FM_ALARM_TYPE_2,
|
||||
FM_ALARM_TYPE_3, FM_ALARM_TYPE_4, FM_ALARM_TYPE_5,
|
||||
|
@ -30,6 +30,12 @@ sysinv_opts = [
|
||||
|
||||
version_info = pbr.version.VersionInfo('fm')
|
||||
|
||||
fm_opts = [
|
||||
cfg.StrOpt('event_log_max_size',
|
||||
default='4000',
|
||||
help="the max size of event_log"),
|
||||
]
|
||||
|
||||
# Pecan Application Configurations
|
||||
app = {
|
||||
'root': 'fm.api.controllers.root.RootController',
|
||||
@ -51,7 +57,7 @@ def init(args, **kwargs):
|
||||
ks_loading.register_session_conf_options(cfg.CONF,
|
||||
sysinv_group.name)
|
||||
logging.register_options(cfg.CONF)
|
||||
|
||||
cfg.CONF.register_opts(fm_opts)
|
||||
cfg.CONF(args=args, project='fm',
|
||||
version='%%(prog)s %s' % version_info.release_string(),
|
||||
**kwargs)
|
||||
@ -65,3 +71,7 @@ def setup_logging():
|
||||
{'prog': sys.argv[0],
|
||||
'version': version_info.release_string()})
|
||||
LOG.debug("command line: %s", " ".join(sys.argv))
|
||||
|
||||
|
||||
def get_max_event_log():
|
||||
return cfg.CONF.event_log_max_size
|
||||
|
@ -6,12 +6,14 @@
|
||||
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import pecan
|
||||
from pecan import rest
|
||||
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
from oslo_utils._i18n import _
|
||||
from oslo_log import log
|
||||
|
||||
from fm_api import fm_api
|
||||
@ -132,7 +134,8 @@ class Alarm(base.APIBase):
|
||||
not fm_api.FaultAPIs.alarm_allowed(alm.severity, mgmt_affecting))
|
||||
|
||||
alm.degrade_affecting = str(
|
||||
not fm_api.FaultAPIs.alarm_allowed(alm.severity, degrade_affecting))
|
||||
not fm_api.FaultAPIs.alarm_allowed(alm.severity,
|
||||
degrade_affecting))
|
||||
|
||||
return alm
|
||||
|
||||
@ -275,6 +278,28 @@ class AlarmController(rest.RestController):
|
||||
sort_key=sort_key,
|
||||
sort_dir=sort_dir)
|
||||
|
||||
def _get_event_log_data(self, alarm_dict):
|
||||
""" Retrive a dictionary to create an event_log object
|
||||
|
||||
:param alarm_dict: Dictionary obtained from an alarm object.
|
||||
"""
|
||||
event_log_dict = {}
|
||||
for key in alarm_dict.keys():
|
||||
if key == 'alarm_id':
|
||||
event_log_dict['event_log_id'] = alarm_dict[key]
|
||||
elif key == 'alarm_state':
|
||||
event_log_dict['state'] = alarm_dict[key]
|
||||
elif key == 'alarm_type':
|
||||
event_log_dict['event_log_type'] = alarm_dict[key]
|
||||
elif (
|
||||
key == 'inhibit_alarms' or key == 'inhibit_alarms' or
|
||||
key == 'updated_at' or key == 'updated_at' or key == 'masked'
|
||||
):
|
||||
continue
|
||||
else:
|
||||
event_log_dict[key] = alarm_dict[key]
|
||||
return event_log_dict
|
||||
|
||||
@wsme_pecan.wsexpose(AlarmCollection, [Query],
|
||||
types.uuid, int, wtypes.text, wtypes.text, bool, bool)
|
||||
def get_all(self, q=[], marker=None, limit=None, sort_key='id',
|
||||
@ -332,7 +357,13 @@ class AlarmController(rest.RestController):
|
||||
|
||||
:param id: uuid of an alarm.
|
||||
"""
|
||||
data = pecan.request.dbapi.alarm_get(id)
|
||||
if data is None:
|
||||
raise wsme.exc.ClientSideError(_("can not find record to clear!"))
|
||||
pecan.request.dbapi.alarm_destroy(id)
|
||||
alarm_state = fm_constants.FM_ALARM_STATE_CLEAR
|
||||
tmp_dict = data.as_dict()
|
||||
self._alarm_save2event_log(tmp_dict, alarm_state, empty_uuid=True)
|
||||
|
||||
@wsme_pecan.wsexpose(AlarmSummary, bool)
|
||||
def summary(self, include_suppress=False):
|
||||
@ -341,3 +372,77 @@ class AlarmController(rest.RestController):
|
||||
:param include_suppress: filter on suppressed alarms. Default: False
|
||||
"""
|
||||
return self._get_alarm_summary(include_suppress)
|
||||
|
||||
def _alarm_save2event_log(self, data_dict, fm_state, empty_uuid=False):
|
||||
event_log_data = self._get_event_log_data(data_dict)
|
||||
event_log_data['state'] = fm_state
|
||||
event_log_data['id'] = None
|
||||
if empty_uuid is True:
|
||||
event_log_data['uuid'] = None
|
||||
if (event_log_data['timestamp'] is None or
|
||||
fm_state == fm_constants.FM_ALARM_STATE_CLEAR):
|
||||
event_log_data['timestamp'] = datetime.datetime.utcnow()
|
||||
event_data = pecan.request.dbapi.event_log_create(event_log_data)
|
||||
return event_data
|
||||
|
||||
@wsme_pecan.wsexpose(wtypes.text, body=Alarm)
|
||||
def post(self, alarm_data):
|
||||
"""Create an alarm/event log.
|
||||
:param alarm_data: All information required to create an
|
||||
alarm or eventlog.
|
||||
"""
|
||||
|
||||
alarm_data_dict = alarm_data.as_dict()
|
||||
alarm_state = alarm_data_dict['alarm_state']
|
||||
try:
|
||||
if alarm_state == fm_constants.FM_ALARM_STATE_SET:
|
||||
data = pecan.request.dbapi.alarm_create(alarm_data_dict)
|
||||
tmp_dict = data.as_dict()
|
||||
self._alarm_save2event_log(tmp_dict, alarm_state)
|
||||
elif (
|
||||
alarm_state == fm_constants.FM_ALARM_STATE_LOG or
|
||||
alarm_state == fm_constants.FM_ALARM_STATE_MSG
|
||||
):
|
||||
data = self._alarm_save2event_log(alarm_data_dict, 'log')
|
||||
# This is same action as DELETE Method if para is uuid
|
||||
# keep this RESTful for future use to clear/delete alarm with parameters
|
||||
# are alarm_id and entity_instance_id
|
||||
elif alarm_state == fm_constants.FM_ALARM_STATE_CLEAR:
|
||||
clear_uuid = alarm_data_dict['uuid']
|
||||
alarm_id = alarm_data_dict['alarm_id']
|
||||
entity_instance_id = alarm_data_dict['entity_instance_id']
|
||||
if clear_uuid is not None:
|
||||
data = pecan.request.dbapi.alarm_get(clear_uuid)
|
||||
pecan.request.dbapi.alarm_destroy(clear_uuid)
|
||||
tmp_dict = data.as_dict()
|
||||
self._alarm_save2event_log(tmp_dict, alarm_state, empty_uuid=True)
|
||||
elif alarm_id is not None and entity_instance_id is not None:
|
||||
data = pecan.request.dbapi.alarm_get_by_ids(alarm_id, entity_instance_id)
|
||||
if data is None:
|
||||
raise wsme.exc.ClientSideError(_("can not find record to clear!"))
|
||||
pecan.request.dbapi.alarm_destroy_by_ids(alarm_id, entity_instance_id)
|
||||
tmp_dict = data.as_dict()
|
||||
self._alarm_save2event_log(tmp_dict, alarm_state, empty_uuid=True)
|
||||
else:
|
||||
msg = _("The alarm_state %s does not support!")
|
||||
raise wsme.exc.ClientSideError(msg % alarm_state)
|
||||
except Exception as err:
|
||||
return err
|
||||
alarm_dict = data.as_dict()
|
||||
return json.dumps({"uuid": alarm_dict['uuid']})
|
||||
|
||||
@wsme_pecan.wsexpose(wtypes.text, wtypes.text, body=Alarm)
|
||||
def put(self, id, alarm_data):
|
||||
""" Update an alarm
|
||||
|
||||
:param id: uuid of an alarm.
|
||||
:param alarm_data: Information to be updated
|
||||
"""
|
||||
|
||||
alarm_data_dict = alarm_data.as_dict()
|
||||
try:
|
||||
alm = pecan.request.dbapi.alarm_update(id, alarm_data_dict)
|
||||
except Exception as err:
|
||||
return err
|
||||
alarm_dict = alm.as_dict()
|
||||
return json.dumps({"uuid": alarm_dict['uuid']})
|
||||
|
@ -7,12 +7,10 @@
|
||||
|
||||
import sys
|
||||
from oslo_config import cfg
|
||||
|
||||
cfg.CONF(sys.argv[1:], project='fm')
|
||||
from fm.db import migration
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
def main():
|
||||
cfg.CONF(sys.argv[1:], project='fm')
|
||||
migration.db_sync()
|
||||
|
@ -112,3 +112,7 @@ class Conflict(ApiError):
|
||||
|
||||
class AlarmAlreadyExists(Conflict):
|
||||
message = _("An Alarm with UUID %(uuid)s already exists.")
|
||||
|
||||
|
||||
class EventLogAlreadyExists(Conflict):
|
||||
message = _("An Eventlog with ID %(id)s already exists.")
|
||||
|
@ -112,6 +112,15 @@ class Connection(object):
|
||||
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def event_log_create(self, values):
|
||||
"""Create a new event_log.
|
||||
|
||||
:param values: A dict containing several items used to identify
|
||||
and track the event_log.
|
||||
:returns: An event_log.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def event_log_get(self, uuid):
|
||||
"""Return an event_log.
|
||||
|
@ -20,6 +20,7 @@ from oslo_db.sqlalchemy import session as db_session
|
||||
from sqlalchemy import asc, desc, or_
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
from fm.api import config
|
||||
from fm.common import constants
|
||||
from fm.common import exceptions
|
||||
from fm.common import utils
|
||||
@ -290,12 +291,10 @@ class Connection(api.Connection):
|
||||
with _session_for_write() as session:
|
||||
query = model_query(models.Alarm, session=session)
|
||||
query = query.filter_by(uuid=id)
|
||||
|
||||
try:
|
||||
query.one()
|
||||
except NoResultFound:
|
||||
raise exceptions.AlarmNotFound(alarm=id)
|
||||
|
||||
query.delete()
|
||||
|
||||
def alarm_destroy_by_ids(self, alarm_id, entity_instance_id):
|
||||
@ -304,14 +303,52 @@ class Connection(api.Connection):
|
||||
if alarm_id and entity_instance_id:
|
||||
query = query.filter_by(alarm_id=alarm_id)
|
||||
query = query.filter_by(entity_instance_id=entity_instance_id)
|
||||
|
||||
try:
|
||||
query.one()
|
||||
except NoResultFound:
|
||||
raise exceptions.AlarmNotFound(alarm=alarm_id)
|
||||
|
||||
query.delete()
|
||||
|
||||
def event_log_create(self, values):
|
||||
if not values.get('uuid'):
|
||||
values['uuid'] = utils.generate_uuid()
|
||||
event_log = models.EventLog()
|
||||
event_log.update(values)
|
||||
count = self.event_log_get_count()
|
||||
max_log = config.get_max_event_log()
|
||||
if count >= int(max_log):
|
||||
self.delete_oldest_event_log()
|
||||
with _session_for_write() as session:
|
||||
try:
|
||||
session.add(event_log)
|
||||
session.flush()
|
||||
except db_exc.DBDuplicateEntry:
|
||||
raise exceptions.EventLogAlreadyExists(id=values['id'])
|
||||
return event_log
|
||||
|
||||
def event_log_get_count(self):
|
||||
query = model_query(models.EventLog)
|
||||
return query.count()
|
||||
|
||||
def delete_oldest_event_log(self):
|
||||
result = self.event_log_get_oldest()
|
||||
self.event_log_delete(result['id'])
|
||||
|
||||
def event_log_delete(self, id):
|
||||
with _session_for_write() as session:
|
||||
query = model_query(models.EventLog, session=session)
|
||||
query = query.filter_by(id=id)
|
||||
try:
|
||||
query.one()
|
||||
except NoResultFound:
|
||||
raise exceptions.EventLogNotFound(eventLog=id)
|
||||
query.delete()
|
||||
|
||||
def event_log_get_oldest(self):
|
||||
query = model_query(models.EventLog)
|
||||
result = query.order_by(asc(models.EventLog.created_at)).limit(1).one()
|
||||
return result
|
||||
|
||||
@objects.objectify(objects.event_log)
|
||||
def event_log_get(self, uuid):
|
||||
query = model_query(models.EventLog)
|
||||
|
@ -7,8 +7,10 @@
|
||||
|
||||
from sqlalchemy import Column, MetaData, String, Table
|
||||
from sqlalchemy import Boolean, Integer, DateTime
|
||||
from sqlalchemy.dialects.mysql import DATETIME
|
||||
from sqlalchemy.schema import ForeignKeyConstraint
|
||||
|
||||
from oslo_log import log
|
||||
LOG = log.getLogger(__name__)
|
||||
ENGINE = 'InnoDB'
|
||||
CHARSET = 'utf8'
|
||||
|
||||
@ -37,21 +39,25 @@ def upgrade(migrate_engine):
|
||||
mysql_charset=CHARSET,
|
||||
)
|
||||
event_suppression.create()
|
||||
|
||||
if migrate_engine.url.get_dialect().name == 'mysql':
|
||||
LOG.info("alarm dialect is mysql")
|
||||
timestamp_column = Column('timestamp', DATETIME(fsp=6))
|
||||
else:
|
||||
LOG.info("alarm dialect is others")
|
||||
timestamp_column = Column('timestamp', DateTime(timezone=False))
|
||||
alarm = Table(
|
||||
'alarm',
|
||||
meta,
|
||||
Column('created_at', DateTime),
|
||||
Column('updated_at', DateTime),
|
||||
Column('deleted_at', DateTime),
|
||||
|
||||
Column('id', Integer, primary_key=True, nullable=False),
|
||||
Column('uuid', String(255), unique=True, index=True),
|
||||
Column('alarm_id', String(255), index=True),
|
||||
Column('alarm_state', String(255)),
|
||||
Column('entity_type_id', String(255), index=True),
|
||||
Column('entity_instance_id', String(255), index=True),
|
||||
Column('timestamp', DateTime(timezone=False)),
|
||||
timestamp_column,
|
||||
Column('severity', String(255), index=True),
|
||||
Column('reason_text', String(255)),
|
||||
Column('alarm_type', String(255), index=True),
|
||||
@ -72,6 +78,12 @@ def upgrade(migrate_engine):
|
||||
mysql_charset=CHARSET,
|
||||
)
|
||||
alarm.create()
|
||||
if migrate_engine.url.get_dialect().name == 'mysql':
|
||||
LOG.info("event_log dialect is mysql")
|
||||
timestamp_column = Column('timestamp', DATETIME(fsp=6))
|
||||
else:
|
||||
LOG.info("event_log dialect is others")
|
||||
timestamp_column = Column('timestamp', DateTime(timezone=False))
|
||||
|
||||
event_log = Table(
|
||||
'event_log',
|
||||
@ -86,7 +98,7 @@ def upgrade(migrate_engine):
|
||||
Column('state', String(255)),
|
||||
Column('entity_type_id', String(255), index=True),
|
||||
Column('entity_instance_id', String(255), index=True),
|
||||
Column('timestamp', DateTime(timezone=False)),
|
||||
timestamp_column,
|
||||
Column('severity', String(255), index=True),
|
||||
Column('reason_text', String(255)),
|
||||
Column('event_log_type', String(255), index=True),
|
||||
|
@ -23,7 +23,6 @@
|
||||
|
||||
import json
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
from sqlalchemy import Column, ForeignKey, Integer, Boolean
|
||||
@ -32,6 +31,10 @@ from sqlalchemy import DateTime
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.types import TypeDecorator, VARCHAR
|
||||
from oslo_db.sqlalchemy import models
|
||||
from sqlalchemy.dialects.mysql import DATETIME
|
||||
from oslo_log import log
|
||||
CONF = cfg.CONF
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
def table_args():
|
||||
@ -42,6 +45,18 @@ def table_args():
|
||||
return None
|
||||
|
||||
|
||||
def get_dialect_name():
|
||||
db_driver_name = 'mysql'
|
||||
if CONF.database.connection is None:
|
||||
LOG.error("engine_name is None")
|
||||
return db_driver_name
|
||||
engine_name = urlparse(CONF.database.connection).scheme
|
||||
if engine_name is not None:
|
||||
db_driver_name = engine_name.split("+", 1)[0]
|
||||
LOG.info("db_drive_name is %s" % db_driver_name)
|
||||
return db_driver_name
|
||||
|
||||
|
||||
class JSONEncodedDict(TypeDecorator):
|
||||
"""Represents an immutable structure as a json-encoded string."""
|
||||
|
||||
@ -83,7 +98,10 @@ class Alarm(Base):
|
||||
alarm_state = Column(String(255))
|
||||
entity_type_id = Column(String(255), index=True)
|
||||
entity_instance_id = Column(String(255), index=True)
|
||||
timestamp = Column(DateTime(timezone=False))
|
||||
if get_dialect_name() == 'mysql':
|
||||
timestamp = Column(DATETIME(fsp=6))
|
||||
else:
|
||||
timestamp = Column(DateTime(timezone=False))
|
||||
severity = Column(String(255), index=True)
|
||||
reason_text = Column(String(255))
|
||||
alarm_type = Column(String(255), index=True)
|
||||
@ -106,7 +124,10 @@ class EventLog(Base):
|
||||
state = Column(String(255))
|
||||
entity_type_id = Column(String(255), index=True)
|
||||
entity_instance_id = Column(String(255), index=True)
|
||||
timestamp = Column(DateTime(timezone=False))
|
||||
if get_dialect_name() == 'mysql':
|
||||
timestamp = Column(DATETIME(fsp=6))
|
||||
else:
|
||||
timestamp = Column(DateTime(timezone=False))
|
||||
severity = Column(String(255), index=True)
|
||||
reason_text = Column(String(255))
|
||||
event_log_type = Column(String(255), index=True)
|
||||
|
Loading…
Reference in New Issue
Block a user